##// END OF EJS Templates
phabricator: use the `http.timeout` config for conduit call...
marmoute -
r46584:4d70444c default
parent child Browse files
Show More
@@ -1,2368 +1,2377 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 # retry failed command N time (default 0). Useful when using the extension
38 # retry failed command N time (default 0). Useful when using the extension
39 # over flakly connection.
39 # over flakly connection.
40 #
40 #
41 # We wait `retry.interval` between each retry, in seconds.
41 # We wait `retry.interval` between each retry, in seconds.
42 # (default 1 second).
42 # (default 1 second).
43 retry = 3
43 retry = 3
44 retry.interval = 10
44 retry.interval = 10
45
45
46 # the retry option can combine well with the http.timeout one.
47 #
48 # For example to give up on http request after 20 seconds:
49 [http]
50 timeout=20
51
46 [auth]
52 [auth]
47 example.schemes = https
53 example.schemes = https
48 example.prefix = phab.example.com
54 example.prefix = phab.example.com
49
55
50 # API token. Get it from https://$HOST/conduit/login/
56 # API token. Get it from https://$HOST/conduit/login/
51 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
52 """
58 """
53
59
54 from __future__ import absolute_import
60 from __future__ import absolute_import
55
61
56 import base64
62 import base64
57 import contextlib
63 import contextlib
58 import hashlib
64 import hashlib
59 import itertools
65 import itertools
60 import json
66 import json
61 import mimetypes
67 import mimetypes
62 import operator
68 import operator
63 import re
69 import re
64 import time
70 import time
65
71
66 from mercurial.node import bin, nullid, short
72 from mercurial.node import bin, nullid, short
67 from mercurial.i18n import _
73 from mercurial.i18n import _
68 from mercurial.pycompat import getattr
74 from mercurial.pycompat import getattr
69 from mercurial.thirdparty import attr
75 from mercurial.thirdparty import attr
70 from mercurial import (
76 from mercurial import (
71 cmdutil,
77 cmdutil,
72 context,
78 context,
73 copies,
79 copies,
74 encoding,
80 encoding,
75 error,
81 error,
76 exthelper,
82 exthelper,
77 graphmod,
83 graphmod,
78 httpconnection as httpconnectionmod,
84 httpconnection as httpconnectionmod,
79 localrepo,
85 localrepo,
80 logcmdutil,
86 logcmdutil,
81 match,
87 match,
82 mdiff,
88 mdiff,
83 obsutil,
89 obsutil,
84 parser,
90 parser,
85 patch,
91 patch,
86 phases,
92 phases,
87 pycompat,
93 pycompat,
88 rewriteutil,
94 rewriteutil,
89 scmutil,
95 scmutil,
90 smartset,
96 smartset,
91 tags,
97 tags,
92 templatefilters,
98 templatefilters,
93 templateutil,
99 templateutil,
94 url as urlmod,
100 url as urlmod,
95 util,
101 util,
96 )
102 )
97 from mercurial.utils import (
103 from mercurial.utils import (
98 procutil,
104 procutil,
99 stringutil,
105 stringutil,
100 )
106 )
101 from . import show
107 from . import show
102
108
103
109
104 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
110 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
105 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
111 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
106 # be specifying the version(s) of Mercurial they are tested with, or
112 # be specifying the version(s) of Mercurial they are tested with, or
107 # leave the attribute unspecified.
113 # leave the attribute unspecified.
108 testedwith = b'ships-with-hg-core'
114 testedwith = b'ships-with-hg-core'
109
115
110 eh = exthelper.exthelper()
116 eh = exthelper.exthelper()
111
117
112 cmdtable = eh.cmdtable
118 cmdtable = eh.cmdtable
113 command = eh.command
119 command = eh.command
114 configtable = eh.configtable
120 configtable = eh.configtable
115 templatekeyword = eh.templatekeyword
121 templatekeyword = eh.templatekeyword
116 uisetup = eh.finaluisetup
122 uisetup = eh.finaluisetup
117
123
118 # developer config: phabricator.batchsize
124 # developer config: phabricator.batchsize
119 eh.configitem(
125 eh.configitem(
120 b'phabricator',
126 b'phabricator',
121 b'batchsize',
127 b'batchsize',
122 default=12,
128 default=12,
123 )
129 )
124 eh.configitem(
130 eh.configitem(
125 b'phabricator',
131 b'phabricator',
126 b'callsign',
132 b'callsign',
127 default=None,
133 default=None,
128 )
134 )
129 eh.configitem(
135 eh.configitem(
130 b'phabricator',
136 b'phabricator',
131 b'curlcmd',
137 b'curlcmd',
132 default=None,
138 default=None,
133 )
139 )
134 # developer config: phabricator.debug
140 # developer config: phabricator.debug
135 eh.configitem(
141 eh.configitem(
136 b'phabricator',
142 b'phabricator',
137 b'debug',
143 b'debug',
138 default=False,
144 default=False,
139 )
145 )
140 # developer config: phabricator.repophid
146 # developer config: phabricator.repophid
141 eh.configitem(
147 eh.configitem(
142 b'phabricator',
148 b'phabricator',
143 b'repophid',
149 b'repophid',
144 default=None,
150 default=None,
145 )
151 )
146 eh.configitem(
152 eh.configitem(
147 b'phabricator',
153 b'phabricator',
148 b'retry',
154 b'retry',
149 default=0,
155 default=0,
150 )
156 )
151 eh.configitem(
157 eh.configitem(
152 b'phabricator',
158 b'phabricator',
153 b'retry.interval',
159 b'retry.interval',
154 default=1,
160 default=1,
155 )
161 )
156 eh.configitem(
162 eh.configitem(
157 b'phabricator',
163 b'phabricator',
158 b'url',
164 b'url',
159 default=None,
165 default=None,
160 )
166 )
161 eh.configitem(
167 eh.configitem(
162 b'phabsend',
168 b'phabsend',
163 b'confirm',
169 b'confirm',
164 default=False,
170 default=False,
165 )
171 )
166 eh.configitem(
172 eh.configitem(
167 b'phabimport',
173 b'phabimport',
168 b'secret',
174 b'secret',
169 default=False,
175 default=False,
170 )
176 )
171 eh.configitem(
177 eh.configitem(
172 b'phabimport',
178 b'phabimport',
173 b'obsolete',
179 b'obsolete',
174 default=False,
180 default=False,
175 )
181 )
176
182
177 colortable = {
183 colortable = {
178 b'phabricator.action.created': b'green',
184 b'phabricator.action.created': b'green',
179 b'phabricator.action.skipped': b'magenta',
185 b'phabricator.action.skipped': b'magenta',
180 b'phabricator.action.updated': b'magenta',
186 b'phabricator.action.updated': b'magenta',
181 b'phabricator.drev': b'bold',
187 b'phabricator.drev': b'bold',
182 b'phabricator.status.abandoned': b'magenta dim',
188 b'phabricator.status.abandoned': b'magenta dim',
183 b'phabricator.status.accepted': b'green bold',
189 b'phabricator.status.accepted': b'green bold',
184 b'phabricator.status.closed': b'green',
190 b'phabricator.status.closed': b'green',
185 b'phabricator.status.needsreview': b'yellow',
191 b'phabricator.status.needsreview': b'yellow',
186 b'phabricator.status.needsrevision': b'red',
192 b'phabricator.status.needsrevision': b'red',
187 b'phabricator.status.changesplanned': b'red',
193 b'phabricator.status.changesplanned': b'red',
188 }
194 }
189
195
190 _VCR_FLAGS = [
196 _VCR_FLAGS = [
191 (
197 (
192 b'',
198 b'',
193 b'test-vcr',
199 b'test-vcr',
194 b'',
200 b'',
195 _(
201 _(
196 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
202 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
197 b', otherwise will mock all http requests using the specified vcr file.'
203 b', otherwise will mock all http requests using the specified vcr file.'
198 b' (ADVANCED)'
204 b' (ADVANCED)'
199 ),
205 ),
200 ),
206 ),
201 ]
207 ]
202
208
203
209
204 @eh.wrapfunction(localrepo, "loadhgrc")
210 @eh.wrapfunction(localrepo, "loadhgrc")
205 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
211 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
206 """Load ``.arcconfig`` content into a ui instance on repository open."""
212 """Load ``.arcconfig`` content into a ui instance on repository open."""
207 result = False
213 result = False
208 arcconfig = {}
214 arcconfig = {}
209
215
210 try:
216 try:
211 # json.loads only accepts bytes from 3.6+
217 # json.loads only accepts bytes from 3.6+
212 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
218 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
213 # json.loads only returns unicode strings
219 # json.loads only returns unicode strings
214 arcconfig = pycompat.rapply(
220 arcconfig = pycompat.rapply(
215 lambda x: encoding.unitolocal(x)
221 lambda x: encoding.unitolocal(x)
216 if isinstance(x, pycompat.unicode)
222 if isinstance(x, pycompat.unicode)
217 else x,
223 else x,
218 pycompat.json_loads(rawparams),
224 pycompat.json_loads(rawparams),
219 )
225 )
220
226
221 result = True
227 result = True
222 except ValueError:
228 except ValueError:
223 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
229 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
224 except IOError:
230 except IOError:
225 pass
231 pass
226
232
227 cfg = util.sortdict()
233 cfg = util.sortdict()
228
234
229 if b"repository.callsign" in arcconfig:
235 if b"repository.callsign" in arcconfig:
230 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
236 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
231
237
232 if b"phabricator.uri" in arcconfig:
238 if b"phabricator.uri" in arcconfig:
233 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
239 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
234
240
235 if cfg:
241 if cfg:
236 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
242 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
237
243
238 return (
244 return (
239 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
245 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
240 ) # Load .hg/hgrc
246 ) # Load .hg/hgrc
241
247
242
248
243 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
249 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
244 fullflags = flags + _VCR_FLAGS
250 fullflags = flags + _VCR_FLAGS
245
251
246 def hgmatcher(r1, r2):
252 def hgmatcher(r1, r2):
247 if r1.uri != r2.uri or r1.method != r2.method:
253 if r1.uri != r2.uri or r1.method != r2.method:
248 return False
254 return False
249 r1params = util.urlreq.parseqs(r1.body)
255 r1params = util.urlreq.parseqs(r1.body)
250 r2params = util.urlreq.parseqs(r2.body)
256 r2params = util.urlreq.parseqs(r2.body)
251 for key in r1params:
257 for key in r1params:
252 if key not in r2params:
258 if key not in r2params:
253 return False
259 return False
254 value = r1params[key][0]
260 value = r1params[key][0]
255 # we want to compare json payloads without worrying about ordering
261 # we want to compare json payloads without worrying about ordering
256 if value.startswith(b'{') and value.endswith(b'}'):
262 if value.startswith(b'{') and value.endswith(b'}'):
257 r1json = pycompat.json_loads(value)
263 r1json = pycompat.json_loads(value)
258 r2json = pycompat.json_loads(r2params[key][0])
264 r2json = pycompat.json_loads(r2params[key][0])
259 if r1json != r2json:
265 if r1json != r2json:
260 return False
266 return False
261 elif r2params[key][0] != value:
267 elif r2params[key][0] != value:
262 return False
268 return False
263 return True
269 return True
264
270
265 def sanitiserequest(request):
271 def sanitiserequest(request):
266 request.body = re.sub(
272 request.body = re.sub(
267 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
273 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
268 )
274 )
269 return request
275 return request
270
276
271 def sanitiseresponse(response):
277 def sanitiseresponse(response):
272 if 'set-cookie' in response['headers']:
278 if 'set-cookie' in response['headers']:
273 del response['headers']['set-cookie']
279 del response['headers']['set-cookie']
274 return response
280 return response
275
281
276 def decorate(fn):
282 def decorate(fn):
277 def inner(*args, **kwargs):
283 def inner(*args, **kwargs):
278 vcr = kwargs.pop('test_vcr')
284 vcr = kwargs.pop('test_vcr')
279 if vcr:
285 if vcr:
280 cassette = pycompat.fsdecode(vcr)
286 cassette = pycompat.fsdecode(vcr)
281 import hgdemandimport
287 import hgdemandimport
282
288
283 with hgdemandimport.deactivated():
289 with hgdemandimport.deactivated():
284 import vcr as vcrmod
290 import vcr as vcrmod
285 import vcr.stubs as stubs
291 import vcr.stubs as stubs
286
292
287 vcr = vcrmod.VCR(
293 vcr = vcrmod.VCR(
288 serializer='json',
294 serializer='json',
289 before_record_request=sanitiserequest,
295 before_record_request=sanitiserequest,
290 before_record_response=sanitiseresponse,
296 before_record_response=sanitiseresponse,
291 custom_patches=[
297 custom_patches=[
292 (
298 (
293 urlmod,
299 urlmod,
294 'httpconnection',
300 'httpconnection',
295 stubs.VCRHTTPConnection,
301 stubs.VCRHTTPConnection,
296 ),
302 ),
297 (
303 (
298 urlmod,
304 urlmod,
299 'httpsconnection',
305 'httpsconnection',
300 stubs.VCRHTTPSConnection,
306 stubs.VCRHTTPSConnection,
301 ),
307 ),
302 ],
308 ],
303 )
309 )
304 vcr.register_matcher('hgmatcher', hgmatcher)
310 vcr.register_matcher('hgmatcher', hgmatcher)
305 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
311 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
306 return fn(*args, **kwargs)
312 return fn(*args, **kwargs)
307 return fn(*args, **kwargs)
313 return fn(*args, **kwargs)
308
314
309 cmd = util.checksignature(inner, depth=2)
315 cmd = util.checksignature(inner, depth=2)
310 cmd.__name__ = fn.__name__
316 cmd.__name__ = fn.__name__
311 cmd.__doc__ = fn.__doc__
317 cmd.__doc__ = fn.__doc__
312
318
313 return command(
319 return command(
314 name,
320 name,
315 fullflags,
321 fullflags,
316 spec,
322 spec,
317 helpcategory=helpcategory,
323 helpcategory=helpcategory,
318 optionalrepo=optionalrepo,
324 optionalrepo=optionalrepo,
319 )(cmd)
325 )(cmd)
320
326
321 return decorate
327 return decorate
322
328
323
329
324 def _debug(ui, *msg, **opts):
330 def _debug(ui, *msg, **opts):
325 """write debug output for Phabricator if ``phabricator.debug`` is set
331 """write debug output for Phabricator if ``phabricator.debug`` is set
326
332
327 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
333 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
328 printed with the --debug argument.
334 printed with the --debug argument.
329 """
335 """
330 if ui.configbool(b"phabricator", b"debug"):
336 if ui.configbool(b"phabricator", b"debug"):
331 flag = ui.debugflag
337 flag = ui.debugflag
332 try:
338 try:
333 ui.debugflag = True
339 ui.debugflag = True
334 ui.write(*msg, **opts)
340 ui.write(*msg, **opts)
335 finally:
341 finally:
336 ui.debugflag = flag
342 ui.debugflag = flag
337
343
338
344
339 def urlencodenested(params):
345 def urlencodenested(params):
340 """like urlencode, but works with nested parameters.
346 """like urlencode, but works with nested parameters.
341
347
342 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
348 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
343 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
349 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
344 urlencode. Note: the encoding is consistent with PHP's http_build_query.
350 urlencode. Note: the encoding is consistent with PHP's http_build_query.
345 """
351 """
346 flatparams = util.sortdict()
352 flatparams = util.sortdict()
347
353
348 def process(prefix, obj):
354 def process(prefix, obj):
349 if isinstance(obj, bool):
355 if isinstance(obj, bool):
350 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
356 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
351 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
357 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
352 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
358 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
353 if items is None:
359 if items is None:
354 flatparams[prefix] = obj
360 flatparams[prefix] = obj
355 else:
361 else:
356 for k, v in items(obj):
362 for k, v in items(obj):
357 if prefix:
363 if prefix:
358 process(b'%s[%s]' % (prefix, k), v)
364 process(b'%s[%s]' % (prefix, k), v)
359 else:
365 else:
360 process(k, v)
366 process(k, v)
361
367
362 process(b'', params)
368 process(b'', params)
363 return util.urlreq.urlencode(flatparams)
369 return util.urlreq.urlencode(flatparams)
364
370
365
371
366 def readurltoken(ui):
372 def readurltoken(ui):
367 """return conduit url, token and make sure they exist
373 """return conduit url, token and make sure they exist
368
374
369 Currently read from [auth] config section. In the future, it might
375 Currently read from [auth] config section. In the future, it might
370 make sense to read from .arcconfig and .arcrc as well.
376 make sense to read from .arcconfig and .arcrc as well.
371 """
377 """
372 url = ui.config(b'phabricator', b'url')
378 url = ui.config(b'phabricator', b'url')
373 if not url:
379 if not url:
374 raise error.Abort(
380 raise error.Abort(
375 _(b'config %s.%s is required') % (b'phabricator', b'url')
381 _(b'config %s.%s is required') % (b'phabricator', b'url')
376 )
382 )
377
383
378 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
384 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
379 token = None
385 token = None
380
386
381 if res:
387 if res:
382 group, auth = res
388 group, auth = res
383
389
384 ui.debug(b"using auth.%s.* for authentication\n" % group)
390 ui.debug(b"using auth.%s.* for authentication\n" % group)
385
391
386 token = auth.get(b'phabtoken')
392 token = auth.get(b'phabtoken')
387
393
388 if not token:
394 if not token:
389 raise error.Abort(
395 raise error.Abort(
390 _(b'Can\'t find conduit token associated to %s') % (url,)
396 _(b'Can\'t find conduit token associated to %s') % (url,)
391 )
397 )
392
398
393 return url, token
399 return url, token
394
400
395
401
396 def callconduit(ui, name, params):
402 def callconduit(ui, name, params):
397 """call Conduit API, params is a dict. return json.loads result, or None"""
403 """call Conduit API, params is a dict. return json.loads result, or None"""
398 host, token = readurltoken(ui)
404 host, token = readurltoken(ui)
399 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
405 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
400 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
406 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
401 params = params.copy()
407 params = params.copy()
402 params[b'__conduit__'] = {
408 params[b'__conduit__'] = {
403 b'token': token,
409 b'token': token,
404 }
410 }
405 rawdata = {
411 rawdata = {
406 b'params': templatefilters.json(params),
412 b'params': templatefilters.json(params),
407 b'output': b'json',
413 b'output': b'json',
408 b'__conduit__': 1,
414 b'__conduit__': 1,
409 }
415 }
410 data = urlencodenested(rawdata)
416 data = urlencodenested(rawdata)
411 curlcmd = ui.config(b'phabricator', b'curlcmd')
417 curlcmd = ui.config(b'phabricator', b'curlcmd')
412 if curlcmd:
418 if curlcmd:
413 sin, sout = procutil.popen2(
419 sin, sout = procutil.popen2(
414 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
420 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
415 )
421 )
416 sin.write(data)
422 sin.write(data)
417 sin.close()
423 sin.close()
418 body = sout.read()
424 body = sout.read()
419 else:
425 else:
420 urlopener = urlmod.opener(ui, authinfo)
426 urlopener = urlmod.opener(ui, authinfo)
421 request = util.urlreq.request(pycompat.strurl(url), data=data)
427 request = util.urlreq.request(pycompat.strurl(url), data=data)
422 max_try = ui.configint(b'phabricator', b'retry') + 1
428 max_try = ui.configint(b'phabricator', b'retry') + 1
429 timeout = ui.configwith(float, b'http', b'timeout')
423 for try_count in range(max_try):
430 for try_count in range(max_try):
424 try:
431 try:
425 with contextlib.closing(urlopener.open(request)) as rsp:
432 with contextlib.closing(
433 urlopener.open(request, timeout=timeout)
434 ) as rsp:
426 body = rsp.read()
435 body = rsp.read()
427 break
436 break
428 except util.urlerr.urlerror as err:
437 except util.urlerr.urlerror as err:
429 if try_count == max_try - 1:
438 if try_count == max_try - 1:
430 raise
439 raise
431 ui.debug(
440 ui.debug(
432 b'Conduit Request failed (try %d/%d): %r\n'
441 b'Conduit Request failed (try %d/%d): %r\n'
433 % (try_count + 1, max_try, err)
442 % (try_count + 1, max_try, err)
434 )
443 )
435 # failing request might come from overloaded server
444 # failing request might come from overloaded server
436 retry_interval = ui.configint(b'phabricator', b'retry.interval')
445 retry_interval = ui.configint(b'phabricator', b'retry.interval')
437 time.sleep(retry_interval)
446 time.sleep(retry_interval)
438 ui.debug(b'Conduit Response: %s\n' % body)
447 ui.debug(b'Conduit Response: %s\n' % body)
439 parsed = pycompat.rapply(
448 parsed = pycompat.rapply(
440 lambda x: encoding.unitolocal(x)
449 lambda x: encoding.unitolocal(x)
441 if isinstance(x, pycompat.unicode)
450 if isinstance(x, pycompat.unicode)
442 else x,
451 else x,
443 # json.loads only accepts bytes from py3.6+
452 # json.loads only accepts bytes from py3.6+
444 pycompat.json_loads(encoding.unifromlocal(body)),
453 pycompat.json_loads(encoding.unifromlocal(body)),
445 )
454 )
446 if parsed.get(b'error_code'):
455 if parsed.get(b'error_code'):
447 msg = _(b'Conduit Error (%s): %s') % (
456 msg = _(b'Conduit Error (%s): %s') % (
448 parsed[b'error_code'],
457 parsed[b'error_code'],
449 parsed[b'error_info'],
458 parsed[b'error_info'],
450 )
459 )
451 raise error.Abort(msg)
460 raise error.Abort(msg)
452 return parsed[b'result']
461 return parsed[b'result']
453
462
454
463
455 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
464 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
456 def debugcallconduit(ui, repo, name):
465 def debugcallconduit(ui, repo, name):
457 """call Conduit API
466 """call Conduit API
458
467
459 Call parameters are read from stdin as a JSON blob. Result will be written
468 Call parameters are read from stdin as a JSON blob. Result will be written
460 to stdout as a JSON blob.
469 to stdout as a JSON blob.
461 """
470 """
462 # json.loads only accepts bytes from 3.6+
471 # json.loads only accepts bytes from 3.6+
463 rawparams = encoding.unifromlocal(ui.fin.read())
472 rawparams = encoding.unifromlocal(ui.fin.read())
464 # json.loads only returns unicode strings
473 # json.loads only returns unicode strings
465 params = pycompat.rapply(
474 params = pycompat.rapply(
466 lambda x: encoding.unitolocal(x)
475 lambda x: encoding.unitolocal(x)
467 if isinstance(x, pycompat.unicode)
476 if isinstance(x, pycompat.unicode)
468 else x,
477 else x,
469 pycompat.json_loads(rawparams),
478 pycompat.json_loads(rawparams),
470 )
479 )
471 # json.dumps only accepts unicode strings
480 # json.dumps only accepts unicode strings
472 result = pycompat.rapply(
481 result = pycompat.rapply(
473 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
482 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
474 callconduit(ui, name, params),
483 callconduit(ui, name, params),
475 )
484 )
476 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
485 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
477 ui.write(b'%s\n' % encoding.unitolocal(s))
486 ui.write(b'%s\n' % encoding.unitolocal(s))
478
487
479
488
480 def getrepophid(repo):
489 def getrepophid(repo):
481 """given callsign, return repository PHID or None"""
490 """given callsign, return repository PHID or None"""
482 # developer config: phabricator.repophid
491 # developer config: phabricator.repophid
483 repophid = repo.ui.config(b'phabricator', b'repophid')
492 repophid = repo.ui.config(b'phabricator', b'repophid')
484 if repophid:
493 if repophid:
485 return repophid
494 return repophid
486 callsign = repo.ui.config(b'phabricator', b'callsign')
495 callsign = repo.ui.config(b'phabricator', b'callsign')
487 if not callsign:
496 if not callsign:
488 return None
497 return None
489 query = callconduit(
498 query = callconduit(
490 repo.ui,
499 repo.ui,
491 b'diffusion.repository.search',
500 b'diffusion.repository.search',
492 {b'constraints': {b'callsigns': [callsign]}},
501 {b'constraints': {b'callsigns': [callsign]}},
493 )
502 )
494 if len(query[b'data']) == 0:
503 if len(query[b'data']) == 0:
495 return None
504 return None
496 repophid = query[b'data'][0][b'phid']
505 repophid = query[b'data'][0][b'phid']
497 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
506 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
498 return repophid
507 return repophid
499
508
500
509
501 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
510 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
502 _differentialrevisiondescre = re.compile(
511 _differentialrevisiondescre = re.compile(
503 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
512 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
504 )
513 )
505
514
506
515
507 def getoldnodedrevmap(repo, nodelist):
516 def getoldnodedrevmap(repo, nodelist):
508 """find previous nodes that has been sent to Phabricator
517 """find previous nodes that has been sent to Phabricator
509
518
510 return {node: (oldnode, Differential diff, Differential Revision ID)}
519 return {node: (oldnode, Differential diff, Differential Revision ID)}
511 for node in nodelist with known previous sent versions, or associated
520 for node in nodelist with known previous sent versions, or associated
512 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
521 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
513 be ``None``.
522 be ``None``.
514
523
515 Examines commit messages like "Differential Revision:" to get the
524 Examines commit messages like "Differential Revision:" to get the
516 association information.
525 association information.
517
526
518 If such commit message line is not found, examines all precursors and their
527 If such commit message line is not found, examines all precursors and their
519 tags. Tags with format like "D1234" are considered a match and the node
528 tags. Tags with format like "D1234" are considered a match and the node
520 with that tag, and the number after "D" (ex. 1234) will be returned.
529 with that tag, and the number after "D" (ex. 1234) will be returned.
521
530
522 The ``old node``, if not None, is guaranteed to be the last diff of
531 The ``old node``, if not None, is guaranteed to be the last diff of
523 corresponding Differential Revision, and exist in the repo.
532 corresponding Differential Revision, and exist in the repo.
524 """
533 """
525 unfi = repo.unfiltered()
534 unfi = repo.unfiltered()
526 has_node = unfi.changelog.index.has_node
535 has_node = unfi.changelog.index.has_node
527
536
528 result = {} # {node: (oldnode?, lastdiff?, drev)}
537 result = {} # {node: (oldnode?, lastdiff?, drev)}
529 # ordered for test stability when printing new -> old mapping below
538 # ordered for test stability when printing new -> old mapping below
530 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
539 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
531 for node in nodelist:
540 for node in nodelist:
532 ctx = unfi[node]
541 ctx = unfi[node]
533 # For tags like "D123", put them into "toconfirm" to verify later
542 # For tags like "D123", put them into "toconfirm" to verify later
534 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
543 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
535 for n in precnodes:
544 for n in precnodes:
536 if has_node(n):
545 if has_node(n):
537 for tag in unfi.nodetags(n):
546 for tag in unfi.nodetags(n):
538 m = _differentialrevisiontagre.match(tag)
547 m = _differentialrevisiontagre.match(tag)
539 if m:
548 if m:
540 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
549 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
541 break
550 break
542 else:
551 else:
543 continue # move to next predecessor
552 continue # move to next predecessor
544 break # found a tag, stop
553 break # found a tag, stop
545 else:
554 else:
546 # Check commit message
555 # Check commit message
547 m = _differentialrevisiondescre.search(ctx.description())
556 m = _differentialrevisiondescre.search(ctx.description())
548 if m:
557 if m:
549 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
558 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
550
559
551 # Double check if tags are genuine by collecting all old nodes from
560 # Double check if tags are genuine by collecting all old nodes from
552 # Phabricator, and expect precursors overlap with it.
561 # Phabricator, and expect precursors overlap with it.
553 if toconfirm:
562 if toconfirm:
554 drevs = [drev for force, precs, drev in toconfirm.values()]
563 drevs = [drev for force, precs, drev in toconfirm.values()]
555 alldiffs = callconduit(
564 alldiffs = callconduit(
556 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
565 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
557 )
566 )
558
567
559 def getnodes(d, precset):
568 def getnodes(d, precset):
560 # Ignore other nodes that were combined into the Differential
569 # Ignore other nodes that were combined into the Differential
561 # that aren't predecessors of the current local node.
570 # that aren't predecessors of the current local node.
562 return [n for n in getlocalcommits(d) if n in precset]
571 return [n for n in getlocalcommits(d) if n in precset]
563
572
564 for newnode, (force, precset, drev) in toconfirm.items():
573 for newnode, (force, precset, drev) in toconfirm.items():
565 diffs = [
574 diffs = [
566 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
575 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
567 ]
576 ]
568
577
569 # local predecessors known by Phabricator
578 # local predecessors known by Phabricator
570 phprecset = {n for d in diffs for n in getnodes(d, precset)}
579 phprecset = {n for d in diffs for n in getnodes(d, precset)}
571
580
572 # Ignore if precursors (Phabricator and local repo) do not overlap,
581 # Ignore if precursors (Phabricator and local repo) do not overlap,
573 # and force is not set (when commit message says nothing)
582 # and force is not set (when commit message says nothing)
574 if not force and not phprecset:
583 if not force and not phprecset:
575 tagname = b'D%d' % drev
584 tagname = b'D%d' % drev
576 tags.tag(
585 tags.tag(
577 repo,
586 repo,
578 tagname,
587 tagname,
579 nullid,
588 nullid,
580 message=None,
589 message=None,
581 user=None,
590 user=None,
582 date=None,
591 date=None,
583 local=True,
592 local=True,
584 )
593 )
585 unfi.ui.warn(
594 unfi.ui.warn(
586 _(
595 _(
587 b'D%d: local tag removed - does not match '
596 b'D%d: local tag removed - does not match '
588 b'Differential history\n'
597 b'Differential history\n'
589 )
598 )
590 % drev
599 % drev
591 )
600 )
592 continue
601 continue
593
602
594 # Find the last node using Phabricator metadata, and make sure it
603 # Find the last node using Phabricator metadata, and make sure it
595 # exists in the repo
604 # exists in the repo
596 oldnode = lastdiff = None
605 oldnode = lastdiff = None
597 if diffs:
606 if diffs:
598 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
607 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
599 oldnodes = getnodes(lastdiff, precset)
608 oldnodes = getnodes(lastdiff, precset)
600
609
601 _debug(
610 _debug(
602 unfi.ui,
611 unfi.ui,
603 b"%s mapped to old nodes %s\n"
612 b"%s mapped to old nodes %s\n"
604 % (
613 % (
605 short(newnode),
614 short(newnode),
606 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
615 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
607 ),
616 ),
608 )
617 )
609
618
610 # If this commit was the result of `hg fold` after submission,
619 # If this commit was the result of `hg fold` after submission,
611 # and now resubmitted with --fold, the easiest thing to do is
620 # and now resubmitted with --fold, the easiest thing to do is
612 # to leave the node clear. This only results in creating a new
621 # to leave the node clear. This only results in creating a new
613 # diff for the _same_ Differential Revision if this commit is
622 # diff for the _same_ Differential Revision if this commit is
614 # the first or last in the selected range. If we picked a node
623 # the first or last in the selected range. If we picked a node
615 # from the list instead, it would have to be the lowest if at
624 # from the list instead, it would have to be the lowest if at
616 # the beginning of the --fold range, or the highest at the end.
625 # the beginning of the --fold range, or the highest at the end.
617 # Otherwise, one or more of the nodes wouldn't be considered in
626 # Otherwise, one or more of the nodes wouldn't be considered in
618 # the diff, and the Differential wouldn't be properly updated.
627 # the diff, and the Differential wouldn't be properly updated.
619 # If this commit is the result of `hg split` in the same
628 # If this commit is the result of `hg split` in the same
620 # scenario, there is a single oldnode here (and multiple
629 # scenario, there is a single oldnode here (and multiple
621 # newnodes mapped to it). That makes it the same as the normal
630 # newnodes mapped to it). That makes it the same as the normal
622 # case, as the edges of the newnode range cleanly maps to one
631 # case, as the edges of the newnode range cleanly maps to one
623 # oldnode each.
632 # oldnode each.
624 if len(oldnodes) == 1:
633 if len(oldnodes) == 1:
625 oldnode = oldnodes[0]
634 oldnode = oldnodes[0]
626 if oldnode and not has_node(oldnode):
635 if oldnode and not has_node(oldnode):
627 oldnode = None
636 oldnode = None
628
637
629 result[newnode] = (oldnode, lastdiff, drev)
638 result[newnode] = (oldnode, lastdiff, drev)
630
639
631 return result
640 return result
632
641
633
642
634 def getdrevmap(repo, revs):
643 def getdrevmap(repo, revs):
635 """Return a dict mapping each rev in `revs` to their Differential Revision
644 """Return a dict mapping each rev in `revs` to their Differential Revision
636 ID or None.
645 ID or None.
637 """
646 """
638 result = {}
647 result = {}
639 for rev in revs:
648 for rev in revs:
640 result[rev] = None
649 result[rev] = None
641 ctx = repo[rev]
650 ctx = repo[rev]
642 # Check commit message
651 # Check commit message
643 m = _differentialrevisiondescre.search(ctx.description())
652 m = _differentialrevisiondescre.search(ctx.description())
644 if m:
653 if m:
645 result[rev] = int(m.group('id'))
654 result[rev] = int(m.group('id'))
646 continue
655 continue
647 # Check tags
656 # Check tags
648 for tag in repo.nodetags(ctx.node()):
657 for tag in repo.nodetags(ctx.node()):
649 m = _differentialrevisiontagre.match(tag)
658 m = _differentialrevisiontagre.match(tag)
650 if m:
659 if m:
651 result[rev] = int(m.group(1))
660 result[rev] = int(m.group(1))
652 break
661 break
653
662
654 return result
663 return result
655
664
656
665
657 def getdiff(basectx, ctx, diffopts):
666 def getdiff(basectx, ctx, diffopts):
658 """plain-text diff without header (user, commit message, etc)"""
667 """plain-text diff without header (user, commit message, etc)"""
659 output = util.stringio()
668 output = util.stringio()
660 for chunk, _label in patch.diffui(
669 for chunk, _label in patch.diffui(
661 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
670 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
662 ):
671 ):
663 output.write(chunk)
672 output.write(chunk)
664 return output.getvalue()
673 return output.getvalue()
665
674
666
675
667 class DiffChangeType(object):
676 class DiffChangeType(object):
668 ADD = 1
677 ADD = 1
669 CHANGE = 2
678 CHANGE = 2
670 DELETE = 3
679 DELETE = 3
671 MOVE_AWAY = 4
680 MOVE_AWAY = 4
672 COPY_AWAY = 5
681 COPY_AWAY = 5
673 MOVE_HERE = 6
682 MOVE_HERE = 6
674 COPY_HERE = 7
683 COPY_HERE = 7
675 MULTICOPY = 8
684 MULTICOPY = 8
676
685
677
686
678 class DiffFileType(object):
687 class DiffFileType(object):
679 TEXT = 1
688 TEXT = 1
680 IMAGE = 2
689 IMAGE = 2
681 BINARY = 3
690 BINARY = 3
682
691
683
692
684 @attr.s
693 @attr.s
685 class phabhunk(dict):
694 class phabhunk(dict):
686 """Represents a Differential hunk, which is owned by a Differential change"""
695 """Represents a Differential hunk, which is owned by a Differential change"""
687
696
688 oldOffset = attr.ib(default=0) # camelcase-required
697 oldOffset = attr.ib(default=0) # camelcase-required
689 oldLength = attr.ib(default=0) # camelcase-required
698 oldLength = attr.ib(default=0) # camelcase-required
690 newOffset = attr.ib(default=0) # camelcase-required
699 newOffset = attr.ib(default=0) # camelcase-required
691 newLength = attr.ib(default=0) # camelcase-required
700 newLength = attr.ib(default=0) # camelcase-required
692 corpus = attr.ib(default='')
701 corpus = attr.ib(default='')
693 # These get added to the phabchange's equivalents
702 # These get added to the phabchange's equivalents
694 addLines = attr.ib(default=0) # camelcase-required
703 addLines = attr.ib(default=0) # camelcase-required
695 delLines = attr.ib(default=0) # camelcase-required
704 delLines = attr.ib(default=0) # camelcase-required
696
705
697
706
698 @attr.s
707 @attr.s
699 class phabchange(object):
708 class phabchange(object):
700 """Represents a Differential change, owns Differential hunks and owned by a
709 """Represents a Differential change, owns Differential hunks and owned by a
701 Differential diff. Each one represents one file in a diff.
710 Differential diff. Each one represents one file in a diff.
702 """
711 """
703
712
704 currentPath = attr.ib(default=None) # camelcase-required
713 currentPath = attr.ib(default=None) # camelcase-required
705 oldPath = attr.ib(default=None) # camelcase-required
714 oldPath = attr.ib(default=None) # camelcase-required
706 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
715 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
707 metadata = attr.ib(default=attr.Factory(dict))
716 metadata = attr.ib(default=attr.Factory(dict))
708 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
717 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
709 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
718 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
710 type = attr.ib(default=DiffChangeType.CHANGE)
719 type = attr.ib(default=DiffChangeType.CHANGE)
711 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
720 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
712 commitHash = attr.ib(default=None) # camelcase-required
721 commitHash = attr.ib(default=None) # camelcase-required
713 addLines = attr.ib(default=0) # camelcase-required
722 addLines = attr.ib(default=0) # camelcase-required
714 delLines = attr.ib(default=0) # camelcase-required
723 delLines = attr.ib(default=0) # camelcase-required
715 hunks = attr.ib(default=attr.Factory(list))
724 hunks = attr.ib(default=attr.Factory(list))
716
725
717 def copynewmetadatatoold(self):
726 def copynewmetadatatoold(self):
718 for key in list(self.metadata.keys()):
727 for key in list(self.metadata.keys()):
719 newkey = key.replace(b'new:', b'old:')
728 newkey = key.replace(b'new:', b'old:')
720 self.metadata[newkey] = self.metadata[key]
729 self.metadata[newkey] = self.metadata[key]
721
730
722 def addoldmode(self, value):
731 def addoldmode(self, value):
723 self.oldProperties[b'unix:filemode'] = value
732 self.oldProperties[b'unix:filemode'] = value
724
733
725 def addnewmode(self, value):
734 def addnewmode(self, value):
726 self.newProperties[b'unix:filemode'] = value
735 self.newProperties[b'unix:filemode'] = value
727
736
728 def addhunk(self, hunk):
737 def addhunk(self, hunk):
729 if not isinstance(hunk, phabhunk):
738 if not isinstance(hunk, phabhunk):
730 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
739 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
731 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
740 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
732 # It's useful to include these stats since the Phab web UI shows them,
741 # It's useful to include these stats since the Phab web UI shows them,
733 # and uses them to estimate how large a change a Revision is. Also used
742 # and uses them to estimate how large a change a Revision is. Also used
734 # in email subjects for the [+++--] bit.
743 # in email subjects for the [+++--] bit.
735 self.addLines += hunk.addLines
744 self.addLines += hunk.addLines
736 self.delLines += hunk.delLines
745 self.delLines += hunk.delLines
737
746
738
747
739 @attr.s
748 @attr.s
740 class phabdiff(object):
749 class phabdiff(object):
741 """Represents a Differential diff, owns Differential changes. Corresponds
750 """Represents a Differential diff, owns Differential changes. Corresponds
742 to a commit.
751 to a commit.
743 """
752 """
744
753
745 # Doesn't seem to be any reason to send this (output of uname -n)
754 # Doesn't seem to be any reason to send this (output of uname -n)
746 sourceMachine = attr.ib(default=b'') # camelcase-required
755 sourceMachine = attr.ib(default=b'') # camelcase-required
747 sourcePath = attr.ib(default=b'/') # camelcase-required
756 sourcePath = attr.ib(default=b'/') # camelcase-required
748 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
757 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
749 sourceControlPath = attr.ib(default=b'/') # camelcase-required
758 sourceControlPath = attr.ib(default=b'/') # camelcase-required
750 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
759 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
751 branch = attr.ib(default=b'default')
760 branch = attr.ib(default=b'default')
752 bookmark = attr.ib(default=None)
761 bookmark = attr.ib(default=None)
753 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
762 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
754 lintStatus = attr.ib(default=b'none') # camelcase-required
763 lintStatus = attr.ib(default=b'none') # camelcase-required
755 unitStatus = attr.ib(default=b'none') # camelcase-required
764 unitStatus = attr.ib(default=b'none') # camelcase-required
756 changes = attr.ib(default=attr.Factory(dict))
765 changes = attr.ib(default=attr.Factory(dict))
757 repositoryPHID = attr.ib(default=None) # camelcase-required
766 repositoryPHID = attr.ib(default=None) # camelcase-required
758
767
759 def addchange(self, change):
768 def addchange(self, change):
760 if not isinstance(change, phabchange):
769 if not isinstance(change, phabchange):
761 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
770 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
762 self.changes[change.currentPath] = pycompat.byteskwargs(
771 self.changes[change.currentPath] = pycompat.byteskwargs(
763 attr.asdict(change)
772 attr.asdict(change)
764 )
773 )
765
774
766
775
767 def maketext(pchange, basectx, ctx, fname):
776 def maketext(pchange, basectx, ctx, fname):
768 """populate the phabchange for a text file"""
777 """populate the phabchange for a text file"""
769 repo = ctx.repo()
778 repo = ctx.repo()
770 fmatcher = match.exact([fname])
779 fmatcher = match.exact([fname])
771 diffopts = mdiff.diffopts(git=True, context=32767)
780 diffopts = mdiff.diffopts(git=True, context=32767)
772 _pfctx, _fctx, header, fhunks = next(
781 _pfctx, _fctx, header, fhunks = next(
773 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
782 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
774 )
783 )
775
784
776 for fhunk in fhunks:
785 for fhunk in fhunks:
777 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
786 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
778 corpus = b''.join(lines[1:])
787 corpus = b''.join(lines[1:])
779 shunk = list(header)
788 shunk = list(header)
780 shunk.extend(lines)
789 shunk.extend(lines)
781 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
790 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
782 patch.diffstatdata(util.iterlines(shunk))
791 patch.diffstatdata(util.iterlines(shunk))
783 )
792 )
784 pchange.addhunk(
793 pchange.addhunk(
785 phabhunk(
794 phabhunk(
786 oldOffset,
795 oldOffset,
787 oldLength,
796 oldLength,
788 newOffset,
797 newOffset,
789 newLength,
798 newLength,
790 corpus,
799 corpus,
791 addLines,
800 addLines,
792 delLines,
801 delLines,
793 )
802 )
794 )
803 )
795
804
796
805
797 def uploadchunks(fctx, fphid):
806 def uploadchunks(fctx, fphid):
798 """upload large binary files as separate chunks.
807 """upload large binary files as separate chunks.
799 Phab requests chunking over 8MiB, and splits into 4MiB chunks
808 Phab requests chunking over 8MiB, and splits into 4MiB chunks
800 """
809 """
801 ui = fctx.repo().ui
810 ui = fctx.repo().ui
802 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
811 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
803 with ui.makeprogress(
812 with ui.makeprogress(
804 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
813 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
805 ) as progress:
814 ) as progress:
806 for chunk in chunks:
815 for chunk in chunks:
807 progress.increment()
816 progress.increment()
808 if chunk[b'complete']:
817 if chunk[b'complete']:
809 continue
818 continue
810 bstart = int(chunk[b'byteStart'])
819 bstart = int(chunk[b'byteStart'])
811 bend = int(chunk[b'byteEnd'])
820 bend = int(chunk[b'byteEnd'])
812 callconduit(
821 callconduit(
813 ui,
822 ui,
814 b'file.uploadchunk',
823 b'file.uploadchunk',
815 {
824 {
816 b'filePHID': fphid,
825 b'filePHID': fphid,
817 b'byteStart': bstart,
826 b'byteStart': bstart,
818 b'data': base64.b64encode(fctx.data()[bstart:bend]),
827 b'data': base64.b64encode(fctx.data()[bstart:bend]),
819 b'dataEncoding': b'base64',
828 b'dataEncoding': b'base64',
820 },
829 },
821 )
830 )
822
831
823
832
824 def uploadfile(fctx):
833 def uploadfile(fctx):
825 """upload binary files to Phabricator"""
834 """upload binary files to Phabricator"""
826 repo = fctx.repo()
835 repo = fctx.repo()
827 ui = repo.ui
836 ui = repo.ui
828 fname = fctx.path()
837 fname = fctx.path()
829 size = fctx.size()
838 size = fctx.size()
830 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
839 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
831
840
832 # an allocate call is required first to see if an upload is even required
841 # an allocate call is required first to see if an upload is even required
833 # (Phab might already have it) and to determine if chunking is needed
842 # (Phab might already have it) and to determine if chunking is needed
834 allocateparams = {
843 allocateparams = {
835 b'name': fname,
844 b'name': fname,
836 b'contentLength': size,
845 b'contentLength': size,
837 b'contentHash': fhash,
846 b'contentHash': fhash,
838 }
847 }
839 filealloc = callconduit(ui, b'file.allocate', allocateparams)
848 filealloc = callconduit(ui, b'file.allocate', allocateparams)
840 fphid = filealloc[b'filePHID']
849 fphid = filealloc[b'filePHID']
841
850
842 if filealloc[b'upload']:
851 if filealloc[b'upload']:
843 ui.write(_(b'uploading %s\n') % bytes(fctx))
852 ui.write(_(b'uploading %s\n') % bytes(fctx))
844 if not fphid:
853 if not fphid:
845 uploadparams = {
854 uploadparams = {
846 b'name': fname,
855 b'name': fname,
847 b'data_base64': base64.b64encode(fctx.data()),
856 b'data_base64': base64.b64encode(fctx.data()),
848 }
857 }
849 fphid = callconduit(ui, b'file.upload', uploadparams)
858 fphid = callconduit(ui, b'file.upload', uploadparams)
850 else:
859 else:
851 uploadchunks(fctx, fphid)
860 uploadchunks(fctx, fphid)
852 else:
861 else:
853 ui.debug(b'server already has %s\n' % bytes(fctx))
862 ui.debug(b'server already has %s\n' % bytes(fctx))
854
863
855 if not fphid:
864 if not fphid:
856 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
865 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
857
866
858 return fphid
867 return fphid
859
868
860
869
861 def addoldbinary(pchange, oldfctx, fctx):
870 def addoldbinary(pchange, oldfctx, fctx):
862 """add the metadata for the previous version of a binary file to the
871 """add the metadata for the previous version of a binary file to the
863 phabchange for the new version
872 phabchange for the new version
864
873
865 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
874 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
866 version of the file, or None if the file is being removed.
875 version of the file, or None if the file is being removed.
867 """
876 """
868 if not fctx or fctx.cmp(oldfctx):
877 if not fctx or fctx.cmp(oldfctx):
869 # Files differ, add the old one
878 # Files differ, add the old one
870 pchange.metadata[b'old:file:size'] = oldfctx.size()
879 pchange.metadata[b'old:file:size'] = oldfctx.size()
871 mimeguess, _enc = mimetypes.guess_type(
880 mimeguess, _enc = mimetypes.guess_type(
872 encoding.unifromlocal(oldfctx.path())
881 encoding.unifromlocal(oldfctx.path())
873 )
882 )
874 if mimeguess:
883 if mimeguess:
875 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
884 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
876 mimeguess
885 mimeguess
877 )
886 )
878 fphid = uploadfile(oldfctx)
887 fphid = uploadfile(oldfctx)
879 pchange.metadata[b'old:binary-phid'] = fphid
888 pchange.metadata[b'old:binary-phid'] = fphid
880 else:
889 else:
881 # If it's left as IMAGE/BINARY web UI might try to display it
890 # If it's left as IMAGE/BINARY web UI might try to display it
882 pchange.fileType = DiffFileType.TEXT
891 pchange.fileType = DiffFileType.TEXT
883 pchange.copynewmetadatatoold()
892 pchange.copynewmetadatatoold()
884
893
885
894
886 def makebinary(pchange, fctx):
895 def makebinary(pchange, fctx):
887 """populate the phabchange for a binary file"""
896 """populate the phabchange for a binary file"""
888 pchange.fileType = DiffFileType.BINARY
897 pchange.fileType = DiffFileType.BINARY
889 fphid = uploadfile(fctx)
898 fphid = uploadfile(fctx)
890 pchange.metadata[b'new:binary-phid'] = fphid
899 pchange.metadata[b'new:binary-phid'] = fphid
891 pchange.metadata[b'new:file:size'] = fctx.size()
900 pchange.metadata[b'new:file:size'] = fctx.size()
892 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
901 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
893 if mimeguess:
902 if mimeguess:
894 mimeguess = pycompat.bytestr(mimeguess)
903 mimeguess = pycompat.bytestr(mimeguess)
895 pchange.metadata[b'new:file:mime-type'] = mimeguess
904 pchange.metadata[b'new:file:mime-type'] = mimeguess
896 if mimeguess.startswith(b'image/'):
905 if mimeguess.startswith(b'image/'):
897 pchange.fileType = DiffFileType.IMAGE
906 pchange.fileType = DiffFileType.IMAGE
898
907
899
908
900 # Copied from mercurial/patch.py
909 # Copied from mercurial/patch.py
901 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
910 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
902
911
903
912
904 def notutf8(fctx):
913 def notutf8(fctx):
905 """detect non-UTF-8 text files since Phabricator requires them to be marked
914 """detect non-UTF-8 text files since Phabricator requires them to be marked
906 as binary
915 as binary
907 """
916 """
908 try:
917 try:
909 fctx.data().decode('utf-8')
918 fctx.data().decode('utf-8')
910 return False
919 return False
911 except UnicodeDecodeError:
920 except UnicodeDecodeError:
912 fctx.repo().ui.write(
921 fctx.repo().ui.write(
913 _(b'file %s detected as non-UTF-8, marked as binary\n')
922 _(b'file %s detected as non-UTF-8, marked as binary\n')
914 % fctx.path()
923 % fctx.path()
915 )
924 )
916 return True
925 return True
917
926
918
927
919 def addremoved(pdiff, basectx, ctx, removed):
928 def addremoved(pdiff, basectx, ctx, removed):
920 """add removed files to the phabdiff. Shouldn't include moves"""
929 """add removed files to the phabdiff. Shouldn't include moves"""
921 for fname in removed:
930 for fname in removed:
922 pchange = phabchange(
931 pchange = phabchange(
923 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
932 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
924 )
933 )
925 oldfctx = basectx.p1()[fname]
934 oldfctx = basectx.p1()[fname]
926 pchange.addoldmode(gitmode[oldfctx.flags()])
935 pchange.addoldmode(gitmode[oldfctx.flags()])
927 if not (oldfctx.isbinary() or notutf8(oldfctx)):
936 if not (oldfctx.isbinary() or notutf8(oldfctx)):
928 maketext(pchange, basectx, ctx, fname)
937 maketext(pchange, basectx, ctx, fname)
929
938
930 pdiff.addchange(pchange)
939 pdiff.addchange(pchange)
931
940
932
941
933 def addmodified(pdiff, basectx, ctx, modified):
942 def addmodified(pdiff, basectx, ctx, modified):
934 """add modified files to the phabdiff"""
943 """add modified files to the phabdiff"""
935 for fname in modified:
944 for fname in modified:
936 fctx = ctx[fname]
945 fctx = ctx[fname]
937 oldfctx = basectx.p1()[fname]
946 oldfctx = basectx.p1()[fname]
938 pchange = phabchange(currentPath=fname, oldPath=fname)
947 pchange = phabchange(currentPath=fname, oldPath=fname)
939 filemode = gitmode[fctx.flags()]
948 filemode = gitmode[fctx.flags()]
940 originalmode = gitmode[oldfctx.flags()]
949 originalmode = gitmode[oldfctx.flags()]
941 if filemode != originalmode:
950 if filemode != originalmode:
942 pchange.addoldmode(originalmode)
951 pchange.addoldmode(originalmode)
943 pchange.addnewmode(filemode)
952 pchange.addnewmode(filemode)
944
953
945 if (
954 if (
946 fctx.isbinary()
955 fctx.isbinary()
947 or notutf8(fctx)
956 or notutf8(fctx)
948 or oldfctx.isbinary()
957 or oldfctx.isbinary()
949 or notutf8(oldfctx)
958 or notutf8(oldfctx)
950 ):
959 ):
951 makebinary(pchange, fctx)
960 makebinary(pchange, fctx)
952 addoldbinary(pchange, oldfctx, fctx)
961 addoldbinary(pchange, oldfctx, fctx)
953 else:
962 else:
954 maketext(pchange, basectx, ctx, fname)
963 maketext(pchange, basectx, ctx, fname)
955
964
956 pdiff.addchange(pchange)
965 pdiff.addchange(pchange)
957
966
958
967
959 def addadded(pdiff, basectx, ctx, added, removed):
968 def addadded(pdiff, basectx, ctx, added, removed):
960 """add file adds to the phabdiff, both new files and copies/moves"""
969 """add file adds to the phabdiff, both new files and copies/moves"""
961 # Keep track of files that've been recorded as moved/copied, so if there are
970 # Keep track of files that've been recorded as moved/copied, so if there are
962 # additional copies we can mark them (moves get removed from removed)
971 # additional copies we can mark them (moves get removed from removed)
963 copiedchanges = {}
972 copiedchanges = {}
964 movedchanges = {}
973 movedchanges = {}
965
974
966 copy = {}
975 copy = {}
967 if basectx != ctx:
976 if basectx != ctx:
968 copy = copies.pathcopies(basectx.p1(), ctx)
977 copy = copies.pathcopies(basectx.p1(), ctx)
969
978
970 for fname in added:
979 for fname in added:
971 fctx = ctx[fname]
980 fctx = ctx[fname]
972 oldfctx = None
981 oldfctx = None
973 pchange = phabchange(currentPath=fname)
982 pchange = phabchange(currentPath=fname)
974
983
975 filemode = gitmode[fctx.flags()]
984 filemode = gitmode[fctx.flags()]
976
985
977 if copy:
986 if copy:
978 originalfname = copy.get(fname, fname)
987 originalfname = copy.get(fname, fname)
979 else:
988 else:
980 originalfname = fname
989 originalfname = fname
981 if fctx.renamed():
990 if fctx.renamed():
982 originalfname = fctx.renamed()[0]
991 originalfname = fctx.renamed()[0]
983
992
984 renamed = fname != originalfname
993 renamed = fname != originalfname
985
994
986 if renamed:
995 if renamed:
987 oldfctx = basectx.p1()[originalfname]
996 oldfctx = basectx.p1()[originalfname]
988 originalmode = gitmode[oldfctx.flags()]
997 originalmode = gitmode[oldfctx.flags()]
989 pchange.oldPath = originalfname
998 pchange.oldPath = originalfname
990
999
991 if originalfname in removed:
1000 if originalfname in removed:
992 origpchange = phabchange(
1001 origpchange = phabchange(
993 currentPath=originalfname,
1002 currentPath=originalfname,
994 oldPath=originalfname,
1003 oldPath=originalfname,
995 type=DiffChangeType.MOVE_AWAY,
1004 type=DiffChangeType.MOVE_AWAY,
996 awayPaths=[fname],
1005 awayPaths=[fname],
997 )
1006 )
998 movedchanges[originalfname] = origpchange
1007 movedchanges[originalfname] = origpchange
999 removed.remove(originalfname)
1008 removed.remove(originalfname)
1000 pchange.type = DiffChangeType.MOVE_HERE
1009 pchange.type = DiffChangeType.MOVE_HERE
1001 elif originalfname in movedchanges:
1010 elif originalfname in movedchanges:
1002 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1011 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1003 movedchanges[originalfname].awayPaths.append(fname)
1012 movedchanges[originalfname].awayPaths.append(fname)
1004 pchange.type = DiffChangeType.COPY_HERE
1013 pchange.type = DiffChangeType.COPY_HERE
1005 else: # pure copy
1014 else: # pure copy
1006 if originalfname not in copiedchanges:
1015 if originalfname not in copiedchanges:
1007 origpchange = phabchange(
1016 origpchange = phabchange(
1008 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1017 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1009 )
1018 )
1010 copiedchanges[originalfname] = origpchange
1019 copiedchanges[originalfname] = origpchange
1011 else:
1020 else:
1012 origpchange = copiedchanges[originalfname]
1021 origpchange = copiedchanges[originalfname]
1013 origpchange.awayPaths.append(fname)
1022 origpchange.awayPaths.append(fname)
1014 pchange.type = DiffChangeType.COPY_HERE
1023 pchange.type = DiffChangeType.COPY_HERE
1015
1024
1016 if filemode != originalmode:
1025 if filemode != originalmode:
1017 pchange.addoldmode(originalmode)
1026 pchange.addoldmode(originalmode)
1018 pchange.addnewmode(filemode)
1027 pchange.addnewmode(filemode)
1019 else: # Brand-new file
1028 else: # Brand-new file
1020 pchange.addnewmode(gitmode[fctx.flags()])
1029 pchange.addnewmode(gitmode[fctx.flags()])
1021 pchange.type = DiffChangeType.ADD
1030 pchange.type = DiffChangeType.ADD
1022
1031
1023 if (
1032 if (
1024 fctx.isbinary()
1033 fctx.isbinary()
1025 or notutf8(fctx)
1034 or notutf8(fctx)
1026 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1035 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1027 ):
1036 ):
1028 makebinary(pchange, fctx)
1037 makebinary(pchange, fctx)
1029 if renamed:
1038 if renamed:
1030 addoldbinary(pchange, oldfctx, fctx)
1039 addoldbinary(pchange, oldfctx, fctx)
1031 else:
1040 else:
1032 maketext(pchange, basectx, ctx, fname)
1041 maketext(pchange, basectx, ctx, fname)
1033
1042
1034 pdiff.addchange(pchange)
1043 pdiff.addchange(pchange)
1035
1044
1036 for _path, copiedchange in copiedchanges.items():
1045 for _path, copiedchange in copiedchanges.items():
1037 pdiff.addchange(copiedchange)
1046 pdiff.addchange(copiedchange)
1038 for _path, movedchange in movedchanges.items():
1047 for _path, movedchange in movedchanges.items():
1039 pdiff.addchange(movedchange)
1048 pdiff.addchange(movedchange)
1040
1049
1041
1050
1042 def creatediff(basectx, ctx):
1051 def creatediff(basectx, ctx):
1043 """create a Differential Diff"""
1052 """create a Differential Diff"""
1044 repo = ctx.repo()
1053 repo = ctx.repo()
1045 repophid = getrepophid(repo)
1054 repophid = getrepophid(repo)
1046 # Create a "Differential Diff" via "differential.creatediff" API
1055 # Create a "Differential Diff" via "differential.creatediff" API
1047 pdiff = phabdiff(
1056 pdiff = phabdiff(
1048 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1057 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1049 branch=b'%s' % ctx.branch(),
1058 branch=b'%s' % ctx.branch(),
1050 )
1059 )
1051 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1060 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1052 # addadded will remove moved files from removed, so addremoved won't get
1061 # addadded will remove moved files from removed, so addremoved won't get
1053 # them
1062 # them
1054 addadded(pdiff, basectx, ctx, added, removed)
1063 addadded(pdiff, basectx, ctx, added, removed)
1055 addmodified(pdiff, basectx, ctx, modified)
1064 addmodified(pdiff, basectx, ctx, modified)
1056 addremoved(pdiff, basectx, ctx, removed)
1065 addremoved(pdiff, basectx, ctx, removed)
1057 if repophid:
1066 if repophid:
1058 pdiff.repositoryPHID = repophid
1067 pdiff.repositoryPHID = repophid
1059 diff = callconduit(
1068 diff = callconduit(
1060 repo.ui,
1069 repo.ui,
1061 b'differential.creatediff',
1070 b'differential.creatediff',
1062 pycompat.byteskwargs(attr.asdict(pdiff)),
1071 pycompat.byteskwargs(attr.asdict(pdiff)),
1063 )
1072 )
1064 if not diff:
1073 if not diff:
1065 if basectx != ctx:
1074 if basectx != ctx:
1066 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1075 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1067 else:
1076 else:
1068 msg = _(b'cannot create diff for %s') % ctx
1077 msg = _(b'cannot create diff for %s') % ctx
1069 raise error.Abort(msg)
1078 raise error.Abort(msg)
1070 return diff
1079 return diff
1071
1080
1072
1081
1073 def writediffproperties(ctxs, diff):
1082 def writediffproperties(ctxs, diff):
1074 """write metadata to diff so patches could be applied losslessly
1083 """write metadata to diff so patches could be applied losslessly
1075
1084
1076 ``ctxs`` is the list of commits that created the diff, in ascending order.
1085 ``ctxs`` is the list of commits that created the diff, in ascending order.
1077 The list is generally a single commit, but may be several when using
1086 The list is generally a single commit, but may be several when using
1078 ``phabsend --fold``.
1087 ``phabsend --fold``.
1079 """
1088 """
1080 # creatediff returns with a diffid but query returns with an id
1089 # creatediff returns with a diffid but query returns with an id
1081 diffid = diff.get(b'diffid', diff.get(b'id'))
1090 diffid = diff.get(b'diffid', diff.get(b'id'))
1082 basectx = ctxs[0]
1091 basectx = ctxs[0]
1083 tipctx = ctxs[-1]
1092 tipctx = ctxs[-1]
1084
1093
1085 params = {
1094 params = {
1086 b'diff_id': diffid,
1095 b'diff_id': diffid,
1087 b'name': b'hg:meta',
1096 b'name': b'hg:meta',
1088 b'data': templatefilters.json(
1097 b'data': templatefilters.json(
1089 {
1098 {
1090 b'user': tipctx.user(),
1099 b'user': tipctx.user(),
1091 b'date': b'%d %d' % tipctx.date(),
1100 b'date': b'%d %d' % tipctx.date(),
1092 b'branch': tipctx.branch(),
1101 b'branch': tipctx.branch(),
1093 b'node': tipctx.hex(),
1102 b'node': tipctx.hex(),
1094 b'parent': basectx.p1().hex(),
1103 b'parent': basectx.p1().hex(),
1095 }
1104 }
1096 ),
1105 ),
1097 }
1106 }
1098 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1107 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1099
1108
1100 commits = {}
1109 commits = {}
1101 for ctx in ctxs:
1110 for ctx in ctxs:
1102 commits[ctx.hex()] = {
1111 commits[ctx.hex()] = {
1103 b'author': stringutil.person(ctx.user()),
1112 b'author': stringutil.person(ctx.user()),
1104 b'authorEmail': stringutil.email(ctx.user()),
1113 b'authorEmail': stringutil.email(ctx.user()),
1105 b'time': int(ctx.date()[0]),
1114 b'time': int(ctx.date()[0]),
1106 b'commit': ctx.hex(),
1115 b'commit': ctx.hex(),
1107 b'parents': [ctx.p1().hex()],
1116 b'parents': [ctx.p1().hex()],
1108 b'branch': ctx.branch(),
1117 b'branch': ctx.branch(),
1109 }
1118 }
1110 params = {
1119 params = {
1111 b'diff_id': diffid,
1120 b'diff_id': diffid,
1112 b'name': b'local:commits',
1121 b'name': b'local:commits',
1113 b'data': templatefilters.json(commits),
1122 b'data': templatefilters.json(commits),
1114 }
1123 }
1115 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1124 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1116
1125
1117
1126
1118 def createdifferentialrevision(
1127 def createdifferentialrevision(
1119 ctxs,
1128 ctxs,
1120 revid=None,
1129 revid=None,
1121 parentrevphid=None,
1130 parentrevphid=None,
1122 oldbasenode=None,
1131 oldbasenode=None,
1123 oldnode=None,
1132 oldnode=None,
1124 olddiff=None,
1133 olddiff=None,
1125 actions=None,
1134 actions=None,
1126 comment=None,
1135 comment=None,
1127 ):
1136 ):
1128 """create or update a Differential Revision
1137 """create or update a Differential Revision
1129
1138
1130 If revid is None, create a new Differential Revision, otherwise update
1139 If revid is None, create a new Differential Revision, otherwise update
1131 revid. If parentrevphid is not None, set it as a dependency.
1140 revid. If parentrevphid is not None, set it as a dependency.
1132
1141
1133 If there is a single commit for the new Differential Revision, ``ctxs`` will
1142 If there is a single commit for the new Differential Revision, ``ctxs`` will
1134 be a list of that single context. Otherwise, it is a list that covers the
1143 be a list of that single context. Otherwise, it is a list that covers the
1135 range of changes for the differential, where ``ctxs[0]`` is the first change
1144 range of changes for the differential, where ``ctxs[0]`` is the first change
1136 to include and ``ctxs[-1]`` is the last.
1145 to include and ``ctxs[-1]`` is the last.
1137
1146
1138 If oldnode is not None, check if the patch content (without commit message
1147 If oldnode is not None, check if the patch content (without commit message
1139 and metadata) has changed before creating another diff. For a Revision with
1148 and metadata) has changed before creating another diff. For a Revision with
1140 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1149 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1141 Revision covering multiple commits, ``oldbasenode`` corresponds to
1150 Revision covering multiple commits, ``oldbasenode`` corresponds to
1142 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1151 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1143 corresponds to ``ctxs[-1]``.
1152 corresponds to ``ctxs[-1]``.
1144
1153
1145 If actions is not None, they will be appended to the transaction.
1154 If actions is not None, they will be appended to the transaction.
1146 """
1155 """
1147 ctx = ctxs[-1]
1156 ctx = ctxs[-1]
1148 basectx = ctxs[0]
1157 basectx = ctxs[0]
1149
1158
1150 repo = ctx.repo()
1159 repo = ctx.repo()
1151 if oldnode:
1160 if oldnode:
1152 diffopts = mdiff.diffopts(git=True, context=32767)
1161 diffopts = mdiff.diffopts(git=True, context=32767)
1153 unfi = repo.unfiltered()
1162 unfi = repo.unfiltered()
1154 oldctx = unfi[oldnode]
1163 oldctx = unfi[oldnode]
1155 oldbasectx = unfi[oldbasenode]
1164 oldbasectx = unfi[oldbasenode]
1156 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1165 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1157 oldbasectx, oldctx, diffopts
1166 oldbasectx, oldctx, diffopts
1158 )
1167 )
1159 else:
1168 else:
1160 neednewdiff = True
1169 neednewdiff = True
1161
1170
1162 transactions = []
1171 transactions = []
1163 if neednewdiff:
1172 if neednewdiff:
1164 diff = creatediff(basectx, ctx)
1173 diff = creatediff(basectx, ctx)
1165 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1174 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1166 if comment:
1175 if comment:
1167 transactions.append({b'type': b'comment', b'value': comment})
1176 transactions.append({b'type': b'comment', b'value': comment})
1168 else:
1177 else:
1169 # Even if we don't need to upload a new diff because the patch content
1178 # Even if we don't need to upload a new diff because the patch content
1170 # does not change. We might still need to update its metadata so
1179 # does not change. We might still need to update its metadata so
1171 # pushers could know the correct node metadata.
1180 # pushers could know the correct node metadata.
1172 assert olddiff
1181 assert olddiff
1173 diff = olddiff
1182 diff = olddiff
1174 writediffproperties(ctxs, diff)
1183 writediffproperties(ctxs, diff)
1175
1184
1176 # Set the parent Revision every time, so commit re-ordering is picked-up
1185 # Set the parent Revision every time, so commit re-ordering is picked-up
1177 if parentrevphid:
1186 if parentrevphid:
1178 transactions.append(
1187 transactions.append(
1179 {b'type': b'parents.set', b'value': [parentrevphid]}
1188 {b'type': b'parents.set', b'value': [parentrevphid]}
1180 )
1189 )
1181
1190
1182 if actions:
1191 if actions:
1183 transactions += actions
1192 transactions += actions
1184
1193
1185 # When folding multiple local commits into a single review, arcanist will
1194 # When folding multiple local commits into a single review, arcanist will
1186 # take the summary line of the first commit as the title, and then
1195 # take the summary line of the first commit as the title, and then
1187 # concatenate the rest of the remaining messages (including each of their
1196 # concatenate the rest of the remaining messages (including each of their
1188 # first lines) to the rest of the first commit message (each separated by
1197 # first lines) to the rest of the first commit message (each separated by
1189 # an empty line), and use that as the summary field. Do the same here.
1198 # an empty line), and use that as the summary field. Do the same here.
1190 # For commits with only a one line message, there is no summary field, as
1199 # For commits with only a one line message, there is no summary field, as
1191 # this gets assigned to the title.
1200 # this gets assigned to the title.
1192 fields = util.sortdict() # sorted for stable wire protocol in tests
1201 fields = util.sortdict() # sorted for stable wire protocol in tests
1193
1202
1194 for i, _ctx in enumerate(ctxs):
1203 for i, _ctx in enumerate(ctxs):
1195 # Parse commit message and update related fields.
1204 # Parse commit message and update related fields.
1196 desc = _ctx.description()
1205 desc = _ctx.description()
1197 info = callconduit(
1206 info = callconduit(
1198 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1207 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1199 )
1208 )
1200
1209
1201 for k in [b'title', b'summary', b'testPlan']:
1210 for k in [b'title', b'summary', b'testPlan']:
1202 v = info[b'fields'].get(k)
1211 v = info[b'fields'].get(k)
1203 if not v:
1212 if not v:
1204 continue
1213 continue
1205
1214
1206 if i == 0:
1215 if i == 0:
1207 # Title, summary and test plan (if present) are taken verbatim
1216 # Title, summary and test plan (if present) are taken verbatim
1208 # for the first commit.
1217 # for the first commit.
1209 fields[k] = v.rstrip()
1218 fields[k] = v.rstrip()
1210 continue
1219 continue
1211 elif k == b'title':
1220 elif k == b'title':
1212 # Add subsequent titles (i.e. the first line of the commit
1221 # Add subsequent titles (i.e. the first line of the commit
1213 # message) back to the summary.
1222 # message) back to the summary.
1214 k = b'summary'
1223 k = b'summary'
1215
1224
1216 # Append any current field to the existing composite field
1225 # Append any current field to the existing composite field
1217 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1226 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1218
1227
1219 for k, v in fields.items():
1228 for k, v in fields.items():
1220 transactions.append({b'type': k, b'value': v})
1229 transactions.append({b'type': k, b'value': v})
1221
1230
1222 params = {b'transactions': transactions}
1231 params = {b'transactions': transactions}
1223 if revid is not None:
1232 if revid is not None:
1224 # Update an existing Differential Revision
1233 # Update an existing Differential Revision
1225 params[b'objectIdentifier'] = revid
1234 params[b'objectIdentifier'] = revid
1226
1235
1227 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1236 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1228 if not revision:
1237 if not revision:
1229 if len(ctxs) == 1:
1238 if len(ctxs) == 1:
1230 msg = _(b'cannot create revision for %s') % ctx
1239 msg = _(b'cannot create revision for %s') % ctx
1231 else:
1240 else:
1232 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1241 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1233 raise error.Abort(msg)
1242 raise error.Abort(msg)
1234
1243
1235 return revision, diff
1244 return revision, diff
1236
1245
1237
1246
1238 def userphids(ui, names):
1247 def userphids(ui, names):
1239 """convert user names to PHIDs"""
1248 """convert user names to PHIDs"""
1240 names = [name.lower() for name in names]
1249 names = [name.lower() for name in names]
1241 query = {b'constraints': {b'usernames': names}}
1250 query = {b'constraints': {b'usernames': names}}
1242 result = callconduit(ui, b'user.search', query)
1251 result = callconduit(ui, b'user.search', query)
1243 # username not found is not an error of the API. So check if we have missed
1252 # username not found is not an error of the API. So check if we have missed
1244 # some names here.
1253 # some names here.
1245 data = result[b'data']
1254 data = result[b'data']
1246 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1255 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1247 unresolved = set(names) - resolved
1256 unresolved = set(names) - resolved
1248 if unresolved:
1257 if unresolved:
1249 raise error.Abort(
1258 raise error.Abort(
1250 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1259 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1251 )
1260 )
1252 return [entry[b'phid'] for entry in data]
1261 return [entry[b'phid'] for entry in data]
1253
1262
1254
1263
1255 def _print_phabsend_action(ui, ctx, newrevid, action):
1264 def _print_phabsend_action(ui, ctx, newrevid, action):
1256 """print the ``action`` that occurred when posting ``ctx`` for review
1265 """print the ``action`` that occurred when posting ``ctx`` for review
1257
1266
1258 This is a utility function for the sending phase of ``phabsend``, which
1267 This is a utility function for the sending phase of ``phabsend``, which
1259 makes it easier to show a status for all local commits with `--fold``.
1268 makes it easier to show a status for all local commits with `--fold``.
1260 """
1269 """
1261 actiondesc = ui.label(
1270 actiondesc = ui.label(
1262 {
1271 {
1263 b'created': _(b'created'),
1272 b'created': _(b'created'),
1264 b'skipped': _(b'skipped'),
1273 b'skipped': _(b'skipped'),
1265 b'updated': _(b'updated'),
1274 b'updated': _(b'updated'),
1266 }[action],
1275 }[action],
1267 b'phabricator.action.%s' % action,
1276 b'phabricator.action.%s' % action,
1268 )
1277 )
1269 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1278 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1270 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1279 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1271 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1280 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1272
1281
1273
1282
1274 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1283 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1275 """update the local commit list for the ``diff`` associated with ``drevid``
1284 """update the local commit list for the ``diff`` associated with ``drevid``
1276
1285
1277 This is a utility function for the amend phase of ``phabsend``, which
1286 This is a utility function for the amend phase of ``phabsend``, which
1278 converts failures to warning messages.
1287 converts failures to warning messages.
1279 """
1288 """
1280 _debug(
1289 _debug(
1281 unfi.ui,
1290 unfi.ui,
1282 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1291 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1283 )
1292 )
1284
1293
1285 try:
1294 try:
1286 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1295 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1287 except util.urlerr.urlerror:
1296 except util.urlerr.urlerror:
1288 # If it fails just warn and keep going, otherwise the DREV
1297 # If it fails just warn and keep going, otherwise the DREV
1289 # associations will be lost
1298 # associations will be lost
1290 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1299 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1291
1300
1292
1301
1293 @vcrcommand(
1302 @vcrcommand(
1294 b'phabsend',
1303 b'phabsend',
1295 [
1304 [
1296 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1305 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1297 (b'', b'amend', True, _(b'update commit messages')),
1306 (b'', b'amend', True, _(b'update commit messages')),
1298 (b'', b'reviewer', [], _(b'specify reviewers')),
1307 (b'', b'reviewer', [], _(b'specify reviewers')),
1299 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1308 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1300 (
1309 (
1301 b'm',
1310 b'm',
1302 b'comment',
1311 b'comment',
1303 b'',
1312 b'',
1304 _(b'add a comment to Revisions with new/updated Diffs'),
1313 _(b'add a comment to Revisions with new/updated Diffs'),
1305 ),
1314 ),
1306 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1315 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1307 (b'', b'fold', False, _(b'combine the revisions into one review')),
1316 (b'', b'fold', False, _(b'combine the revisions into one review')),
1308 ],
1317 ],
1309 _(b'REV [OPTIONS]'),
1318 _(b'REV [OPTIONS]'),
1310 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1319 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1311 )
1320 )
1312 def phabsend(ui, repo, *revs, **opts):
1321 def phabsend(ui, repo, *revs, **opts):
1313 """upload changesets to Phabricator
1322 """upload changesets to Phabricator
1314
1323
1315 If there are multiple revisions specified, they will be send as a stack
1324 If there are multiple revisions specified, they will be send as a stack
1316 with a linear dependencies relationship using the order specified by the
1325 with a linear dependencies relationship using the order specified by the
1317 revset.
1326 revset.
1318
1327
1319 For the first time uploading changesets, local tags will be created to
1328 For the first time uploading changesets, local tags will be created to
1320 maintain the association. After the first time, phabsend will check
1329 maintain the association. After the first time, phabsend will check
1321 obsstore and tags information so it can figure out whether to update an
1330 obsstore and tags information so it can figure out whether to update an
1322 existing Differential Revision, or create a new one.
1331 existing Differential Revision, or create a new one.
1323
1332
1324 If --amend is set, update commit messages so they have the
1333 If --amend is set, update commit messages so they have the
1325 ``Differential Revision`` URL, remove related tags. This is similar to what
1334 ``Differential Revision`` URL, remove related tags. This is similar to what
1326 arcanist will do, and is more desired in author-push workflows. Otherwise,
1335 arcanist will do, and is more desired in author-push workflows. Otherwise,
1327 use local tags to record the ``Differential Revision`` association.
1336 use local tags to record the ``Differential Revision`` association.
1328
1337
1329 The --confirm option lets you confirm changesets before sending them. You
1338 The --confirm option lets you confirm changesets before sending them. You
1330 can also add following to your configuration file to make it default
1339 can also add following to your configuration file to make it default
1331 behaviour::
1340 behaviour::
1332
1341
1333 [phabsend]
1342 [phabsend]
1334 confirm = true
1343 confirm = true
1335
1344
1336 By default, a separate review will be created for each commit that is
1345 By default, a separate review will be created for each commit that is
1337 selected, and will have the same parent/child relationship in Phabricator.
1346 selected, and will have the same parent/child relationship in Phabricator.
1338 If ``--fold`` is set, multiple commits are rolled up into a single review
1347 If ``--fold`` is set, multiple commits are rolled up into a single review
1339 as if diffed from the parent of the first revision to the last. The commit
1348 as if diffed from the parent of the first revision to the last. The commit
1340 messages are concatenated in the summary field on Phabricator.
1349 messages are concatenated in the summary field on Phabricator.
1341
1350
1342 phabsend will check obsstore and the above association to decide whether to
1351 phabsend will check obsstore and the above association to decide whether to
1343 update an existing Differential Revision, or create a new one.
1352 update an existing Differential Revision, or create a new one.
1344 """
1353 """
1345 opts = pycompat.byteskwargs(opts)
1354 opts = pycompat.byteskwargs(opts)
1346 revs = list(revs) + opts.get(b'rev', [])
1355 revs = list(revs) + opts.get(b'rev', [])
1347 revs = scmutil.revrange(repo, revs)
1356 revs = scmutil.revrange(repo, revs)
1348 revs.sort() # ascending order to preserve topological parent/child in phab
1357 revs.sort() # ascending order to preserve topological parent/child in phab
1349
1358
1350 if not revs:
1359 if not revs:
1351 raise error.Abort(_(b'phabsend requires at least one changeset'))
1360 raise error.Abort(_(b'phabsend requires at least one changeset'))
1352 if opts.get(b'amend'):
1361 if opts.get(b'amend'):
1353 cmdutil.checkunfinished(repo)
1362 cmdutil.checkunfinished(repo)
1354
1363
1355 ctxs = [repo[rev] for rev in revs]
1364 ctxs = [repo[rev] for rev in revs]
1356
1365
1357 if any(c for c in ctxs if c.obsolete()):
1366 if any(c for c in ctxs if c.obsolete()):
1358 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1367 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1359
1368
1360 # Ensure the local commits are an unbroken range. The semantics of the
1369 # Ensure the local commits are an unbroken range. The semantics of the
1361 # --fold option implies this, and the auto restacking of orphans requires
1370 # --fold option implies this, and the auto restacking of orphans requires
1362 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1371 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1363 # get A' as a parent.
1372 # get A' as a parent.
1364 def _fail_nonlinear_revs(revs, revtype):
1373 def _fail_nonlinear_revs(revs, revtype):
1365 badnodes = [repo[r].node() for r in revs]
1374 badnodes = [repo[r].node() for r in revs]
1366 raise error.Abort(
1375 raise error.Abort(
1367 _(b"cannot phabsend multiple %s revisions: %s")
1376 _(b"cannot phabsend multiple %s revisions: %s")
1368 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1377 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1369 hint=_(b"the revisions must form a linear chain"),
1378 hint=_(b"the revisions must form a linear chain"),
1370 )
1379 )
1371
1380
1372 heads = repo.revs(b'heads(%ld)', revs)
1381 heads = repo.revs(b'heads(%ld)', revs)
1373 if len(heads) > 1:
1382 if len(heads) > 1:
1374 _fail_nonlinear_revs(heads, b"head")
1383 _fail_nonlinear_revs(heads, b"head")
1375
1384
1376 roots = repo.revs(b'roots(%ld)', revs)
1385 roots = repo.revs(b'roots(%ld)', revs)
1377 if len(roots) > 1:
1386 if len(roots) > 1:
1378 _fail_nonlinear_revs(roots, b"root")
1387 _fail_nonlinear_revs(roots, b"root")
1379
1388
1380 fold = opts.get(b'fold')
1389 fold = opts.get(b'fold')
1381 if fold:
1390 if fold:
1382 if len(revs) == 1:
1391 if len(revs) == 1:
1383 # TODO: just switch to --no-fold instead?
1392 # TODO: just switch to --no-fold instead?
1384 raise error.Abort(_(b"cannot fold a single revision"))
1393 raise error.Abort(_(b"cannot fold a single revision"))
1385
1394
1386 # There's no clear way to manage multiple commits with a Dxxx tag, so
1395 # There's no clear way to manage multiple commits with a Dxxx tag, so
1387 # require the amend option. (We could append "_nnn", but then it
1396 # require the amend option. (We could append "_nnn", but then it
1388 # becomes jumbled if earlier commits are added to an update.) It should
1397 # becomes jumbled if earlier commits are added to an update.) It should
1389 # lock the repo and ensure that the range is editable, but that would
1398 # lock the repo and ensure that the range is editable, but that would
1390 # make the code pretty convoluted. The default behavior of `arc` is to
1399 # make the code pretty convoluted. The default behavior of `arc` is to
1391 # create a new review anyway.
1400 # create a new review anyway.
1392 if not opts.get(b"amend"):
1401 if not opts.get(b"amend"):
1393 raise error.Abort(_(b"cannot fold with --no-amend"))
1402 raise error.Abort(_(b"cannot fold with --no-amend"))
1394
1403
1395 # It might be possible to bucketize the revisions by the DREV value, and
1404 # It might be possible to bucketize the revisions by the DREV value, and
1396 # iterate over those groups when posting, and then again when amending.
1405 # iterate over those groups when posting, and then again when amending.
1397 # But for simplicity, require all selected revisions to be for the same
1406 # But for simplicity, require all selected revisions to be for the same
1398 # DREV (if present). Adding local revisions to an existing DREV is
1407 # DREV (if present). Adding local revisions to an existing DREV is
1399 # acceptable.
1408 # acceptable.
1400 drevmatchers = [
1409 drevmatchers = [
1401 _differentialrevisiondescre.search(ctx.description())
1410 _differentialrevisiondescre.search(ctx.description())
1402 for ctx in ctxs
1411 for ctx in ctxs
1403 ]
1412 ]
1404 if len({m.group('url') for m in drevmatchers if m}) > 1:
1413 if len({m.group('url') for m in drevmatchers if m}) > 1:
1405 raise error.Abort(
1414 raise error.Abort(
1406 _(b"cannot fold revisions with different DREV values")
1415 _(b"cannot fold revisions with different DREV values")
1407 )
1416 )
1408
1417
1409 # {newnode: (oldnode, olddiff, olddrev}
1418 # {newnode: (oldnode, olddiff, olddrev}
1410 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1419 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1411
1420
1412 confirm = ui.configbool(b'phabsend', b'confirm')
1421 confirm = ui.configbool(b'phabsend', b'confirm')
1413 confirm |= bool(opts.get(b'confirm'))
1422 confirm |= bool(opts.get(b'confirm'))
1414 if confirm:
1423 if confirm:
1415 confirmed = _confirmbeforesend(repo, revs, oldmap)
1424 confirmed = _confirmbeforesend(repo, revs, oldmap)
1416 if not confirmed:
1425 if not confirmed:
1417 raise error.Abort(_(b'phabsend cancelled'))
1426 raise error.Abort(_(b'phabsend cancelled'))
1418
1427
1419 actions = []
1428 actions = []
1420 reviewers = opts.get(b'reviewer', [])
1429 reviewers = opts.get(b'reviewer', [])
1421 blockers = opts.get(b'blocker', [])
1430 blockers = opts.get(b'blocker', [])
1422 phids = []
1431 phids = []
1423 if reviewers:
1432 if reviewers:
1424 phids.extend(userphids(repo.ui, reviewers))
1433 phids.extend(userphids(repo.ui, reviewers))
1425 if blockers:
1434 if blockers:
1426 phids.extend(
1435 phids.extend(
1427 map(
1436 map(
1428 lambda phid: b'blocking(%s)' % phid,
1437 lambda phid: b'blocking(%s)' % phid,
1429 userphids(repo.ui, blockers),
1438 userphids(repo.ui, blockers),
1430 )
1439 )
1431 )
1440 )
1432 if phids:
1441 if phids:
1433 actions.append({b'type': b'reviewers.add', b'value': phids})
1442 actions.append({b'type': b'reviewers.add', b'value': phids})
1434
1443
1435 drevids = [] # [int]
1444 drevids = [] # [int]
1436 diffmap = {} # {newnode: diff}
1445 diffmap = {} # {newnode: diff}
1437
1446
1438 # Send patches one by one so we know their Differential Revision PHIDs and
1447 # Send patches one by one so we know their Differential Revision PHIDs and
1439 # can provide dependency relationship
1448 # can provide dependency relationship
1440 lastrevphid = None
1449 lastrevphid = None
1441 for ctx in ctxs:
1450 for ctx in ctxs:
1442 if fold:
1451 if fold:
1443 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1452 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1444 else:
1453 else:
1445 ui.debug(b'sending rev %d\n' % ctx.rev())
1454 ui.debug(b'sending rev %d\n' % ctx.rev())
1446
1455
1447 # Get Differential Revision ID
1456 # Get Differential Revision ID
1448 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1457 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1449 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1458 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1450
1459
1451 if fold:
1460 if fold:
1452 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1461 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1453 ctxs[-1].node(), (None, None, None)
1462 ctxs[-1].node(), (None, None, None)
1454 )
1463 )
1455
1464
1456 if oldnode != ctx.node() or opts.get(b'amend'):
1465 if oldnode != ctx.node() or opts.get(b'amend'):
1457 # Create or update Differential Revision
1466 # Create or update Differential Revision
1458 revision, diff = createdifferentialrevision(
1467 revision, diff = createdifferentialrevision(
1459 ctxs if fold else [ctx],
1468 ctxs if fold else [ctx],
1460 revid,
1469 revid,
1461 lastrevphid,
1470 lastrevphid,
1462 oldbasenode,
1471 oldbasenode,
1463 oldnode,
1472 oldnode,
1464 olddiff,
1473 olddiff,
1465 actions,
1474 actions,
1466 opts.get(b'comment'),
1475 opts.get(b'comment'),
1467 )
1476 )
1468
1477
1469 if fold:
1478 if fold:
1470 for ctx in ctxs:
1479 for ctx in ctxs:
1471 diffmap[ctx.node()] = diff
1480 diffmap[ctx.node()] = diff
1472 else:
1481 else:
1473 diffmap[ctx.node()] = diff
1482 diffmap[ctx.node()] = diff
1474
1483
1475 newrevid = int(revision[b'object'][b'id'])
1484 newrevid = int(revision[b'object'][b'id'])
1476 newrevphid = revision[b'object'][b'phid']
1485 newrevphid = revision[b'object'][b'phid']
1477 if revid:
1486 if revid:
1478 action = b'updated'
1487 action = b'updated'
1479 else:
1488 else:
1480 action = b'created'
1489 action = b'created'
1481
1490
1482 # Create a local tag to note the association, if commit message
1491 # Create a local tag to note the association, if commit message
1483 # does not have it already
1492 # does not have it already
1484 if not fold:
1493 if not fold:
1485 m = _differentialrevisiondescre.search(ctx.description())
1494 m = _differentialrevisiondescre.search(ctx.description())
1486 if not m or int(m.group('id')) != newrevid:
1495 if not m or int(m.group('id')) != newrevid:
1487 tagname = b'D%d' % newrevid
1496 tagname = b'D%d' % newrevid
1488 tags.tag(
1497 tags.tag(
1489 repo,
1498 repo,
1490 tagname,
1499 tagname,
1491 ctx.node(),
1500 ctx.node(),
1492 message=None,
1501 message=None,
1493 user=None,
1502 user=None,
1494 date=None,
1503 date=None,
1495 local=True,
1504 local=True,
1496 )
1505 )
1497 else:
1506 else:
1498 # Nothing changed. But still set "newrevphid" so the next revision
1507 # Nothing changed. But still set "newrevphid" so the next revision
1499 # could depend on this one and "newrevid" for the summary line.
1508 # could depend on this one and "newrevid" for the summary line.
1500 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1509 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1501 newrevid = revid
1510 newrevid = revid
1502 action = b'skipped'
1511 action = b'skipped'
1503
1512
1504 drevids.append(newrevid)
1513 drevids.append(newrevid)
1505 lastrevphid = newrevphid
1514 lastrevphid = newrevphid
1506
1515
1507 if fold:
1516 if fold:
1508 for c in ctxs:
1517 for c in ctxs:
1509 if oldmap.get(c.node(), (None, None, None))[2]:
1518 if oldmap.get(c.node(), (None, None, None))[2]:
1510 action = b'updated'
1519 action = b'updated'
1511 else:
1520 else:
1512 action = b'created'
1521 action = b'created'
1513 _print_phabsend_action(ui, c, newrevid, action)
1522 _print_phabsend_action(ui, c, newrevid, action)
1514 break
1523 break
1515
1524
1516 _print_phabsend_action(ui, ctx, newrevid, action)
1525 _print_phabsend_action(ui, ctx, newrevid, action)
1517
1526
1518 # Update commit messages and remove tags
1527 # Update commit messages and remove tags
1519 if opts.get(b'amend'):
1528 if opts.get(b'amend'):
1520 unfi = repo.unfiltered()
1529 unfi = repo.unfiltered()
1521 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1530 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1522 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1531 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1523 # Eagerly evaluate commits to restabilize before creating new
1532 # Eagerly evaluate commits to restabilize before creating new
1524 # commits. The selected revisions are excluded because they are
1533 # commits. The selected revisions are excluded because they are
1525 # automatically restacked as part of the submission process.
1534 # automatically restacked as part of the submission process.
1526 restack = [
1535 restack = [
1527 c
1536 c
1528 for c in repo.set(
1537 for c in repo.set(
1529 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1538 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1530 revs,
1539 revs,
1531 revs,
1540 revs,
1532 )
1541 )
1533 ]
1542 ]
1534 wnode = unfi[b'.'].node()
1543 wnode = unfi[b'.'].node()
1535 mapping = {} # {oldnode: [newnode]}
1544 mapping = {} # {oldnode: [newnode]}
1536 newnodes = []
1545 newnodes = []
1537
1546
1538 drevid = drevids[0]
1547 drevid = drevids[0]
1539
1548
1540 for i, rev in enumerate(revs):
1549 for i, rev in enumerate(revs):
1541 old = unfi[rev]
1550 old = unfi[rev]
1542 if not fold:
1551 if not fold:
1543 drevid = drevids[i]
1552 drevid = drevids[i]
1544 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1553 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1545
1554
1546 newdesc = get_amended_desc(drev, old, fold)
1555 newdesc = get_amended_desc(drev, old, fold)
1547 # Make sure commit message contain "Differential Revision"
1556 # Make sure commit message contain "Differential Revision"
1548 if (
1557 if (
1549 old.description() != newdesc
1558 old.description() != newdesc
1550 or old.p1().node() in mapping
1559 or old.p1().node() in mapping
1551 or old.p2().node() in mapping
1560 or old.p2().node() in mapping
1552 ):
1561 ):
1553 if old.phase() == phases.public:
1562 if old.phase() == phases.public:
1554 ui.warn(
1563 ui.warn(
1555 _(b"warning: not updating public commit %s\n")
1564 _(b"warning: not updating public commit %s\n")
1556 % scmutil.formatchangeid(old)
1565 % scmutil.formatchangeid(old)
1557 )
1566 )
1558 continue
1567 continue
1559 parents = [
1568 parents = [
1560 mapping.get(old.p1().node(), (old.p1(),))[0],
1569 mapping.get(old.p1().node(), (old.p1(),))[0],
1561 mapping.get(old.p2().node(), (old.p2(),))[0],
1570 mapping.get(old.p2().node(), (old.p2(),))[0],
1562 ]
1571 ]
1563 newdesc = rewriteutil.update_hash_refs(
1572 newdesc = rewriteutil.update_hash_refs(
1564 repo,
1573 repo,
1565 newdesc,
1574 newdesc,
1566 mapping,
1575 mapping,
1567 )
1576 )
1568 new = context.metadataonlyctx(
1577 new = context.metadataonlyctx(
1569 repo,
1578 repo,
1570 old,
1579 old,
1571 parents=parents,
1580 parents=parents,
1572 text=newdesc,
1581 text=newdesc,
1573 user=old.user(),
1582 user=old.user(),
1574 date=old.date(),
1583 date=old.date(),
1575 extra=old.extra(),
1584 extra=old.extra(),
1576 )
1585 )
1577
1586
1578 newnode = new.commit()
1587 newnode = new.commit()
1579
1588
1580 mapping[old.node()] = [newnode]
1589 mapping[old.node()] = [newnode]
1581
1590
1582 if fold:
1591 if fold:
1583 # Defer updating the (single) Diff until all nodes are
1592 # Defer updating the (single) Diff until all nodes are
1584 # collected. No tags were created, so none need to be
1593 # collected. No tags were created, so none need to be
1585 # removed.
1594 # removed.
1586 newnodes.append(newnode)
1595 newnodes.append(newnode)
1587 continue
1596 continue
1588
1597
1589 _amend_diff_properties(
1598 _amend_diff_properties(
1590 unfi, drevid, [newnode], diffmap[old.node()]
1599 unfi, drevid, [newnode], diffmap[old.node()]
1591 )
1600 )
1592
1601
1593 # Remove local tags since it's no longer necessary
1602 # Remove local tags since it's no longer necessary
1594 tagname = b'D%d' % drevid
1603 tagname = b'D%d' % drevid
1595 if tagname in repo.tags():
1604 if tagname in repo.tags():
1596 tags.tag(
1605 tags.tag(
1597 repo,
1606 repo,
1598 tagname,
1607 tagname,
1599 nullid,
1608 nullid,
1600 message=None,
1609 message=None,
1601 user=None,
1610 user=None,
1602 date=None,
1611 date=None,
1603 local=True,
1612 local=True,
1604 )
1613 )
1605 elif fold:
1614 elif fold:
1606 # When folding multiple commits into one review with
1615 # When folding multiple commits into one review with
1607 # --fold, track even the commits that weren't amended, so
1616 # --fold, track even the commits that weren't amended, so
1608 # that their association isn't lost if the properties are
1617 # that their association isn't lost if the properties are
1609 # rewritten below.
1618 # rewritten below.
1610 newnodes.append(old.node())
1619 newnodes.append(old.node())
1611
1620
1612 # If the submitted commits are public, no amend takes place so
1621 # If the submitted commits are public, no amend takes place so
1613 # there are no newnodes and therefore no diff update to do.
1622 # there are no newnodes and therefore no diff update to do.
1614 if fold and newnodes:
1623 if fold and newnodes:
1615 diff = diffmap[old.node()]
1624 diff = diffmap[old.node()]
1616
1625
1617 # The diff object in diffmap doesn't have the local commits
1626 # The diff object in diffmap doesn't have the local commits
1618 # because that could be returned from differential.creatediff,
1627 # because that could be returned from differential.creatediff,
1619 # not differential.querydiffs. So use the queried diff (if
1628 # not differential.querydiffs. So use the queried diff (if
1620 # present), or force the amend (a new revision is being posted.)
1629 # present), or force the amend (a new revision is being posted.)
1621 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1630 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1622 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1631 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1623 _amend_diff_properties(unfi, drevid, newnodes, diff)
1632 _amend_diff_properties(unfi, drevid, newnodes, diff)
1624 else:
1633 else:
1625 _debug(
1634 _debug(
1626 ui,
1635 ui,
1627 b"local commit list for D%d is already up-to-date\n"
1636 b"local commit list for D%d is already up-to-date\n"
1628 % drevid,
1637 % drevid,
1629 )
1638 )
1630 elif fold:
1639 elif fold:
1631 _debug(ui, b"no newnodes to update\n")
1640 _debug(ui, b"no newnodes to update\n")
1632
1641
1633 # Restack any children of first-time submissions that were orphaned
1642 # Restack any children of first-time submissions that were orphaned
1634 # in the process. The ctx won't report that it is an orphan until
1643 # in the process. The ctx won't report that it is an orphan until
1635 # the cleanup takes place below.
1644 # the cleanup takes place below.
1636 for old in restack:
1645 for old in restack:
1637 parents = [
1646 parents = [
1638 mapping.get(old.p1().node(), (old.p1(),))[0],
1647 mapping.get(old.p1().node(), (old.p1(),))[0],
1639 mapping.get(old.p2().node(), (old.p2(),))[0],
1648 mapping.get(old.p2().node(), (old.p2(),))[0],
1640 ]
1649 ]
1641 new = context.metadataonlyctx(
1650 new = context.metadataonlyctx(
1642 repo,
1651 repo,
1643 old,
1652 old,
1644 parents=parents,
1653 parents=parents,
1645 text=rewriteutil.update_hash_refs(
1654 text=rewriteutil.update_hash_refs(
1646 repo, old.description(), mapping
1655 repo, old.description(), mapping
1647 ),
1656 ),
1648 user=old.user(),
1657 user=old.user(),
1649 date=old.date(),
1658 date=old.date(),
1650 extra=old.extra(),
1659 extra=old.extra(),
1651 )
1660 )
1652
1661
1653 newnode = new.commit()
1662 newnode = new.commit()
1654
1663
1655 # Don't obsolete unselected descendants of nodes that have not
1664 # Don't obsolete unselected descendants of nodes that have not
1656 # been changed in this transaction- that results in an error.
1665 # been changed in this transaction- that results in an error.
1657 if newnode != old.node():
1666 if newnode != old.node():
1658 mapping[old.node()] = [newnode]
1667 mapping[old.node()] = [newnode]
1659 _debug(
1668 _debug(
1660 ui,
1669 ui,
1661 b"restabilizing %s as %s\n"
1670 b"restabilizing %s as %s\n"
1662 % (short(old.node()), short(newnode)),
1671 % (short(old.node()), short(newnode)),
1663 )
1672 )
1664 else:
1673 else:
1665 _debug(
1674 _debug(
1666 ui,
1675 ui,
1667 b"not restabilizing unchanged %s\n" % short(old.node()),
1676 b"not restabilizing unchanged %s\n" % short(old.node()),
1668 )
1677 )
1669
1678
1670 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1679 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1671 if wnode in mapping:
1680 if wnode in mapping:
1672 unfi.setparents(mapping[wnode][0])
1681 unfi.setparents(mapping[wnode][0])
1673
1682
1674
1683
1675 # Map from "hg:meta" keys to header understood by "hg import". The order is
1684 # Map from "hg:meta" keys to header understood by "hg import". The order is
1676 # consistent with "hg export" output.
1685 # consistent with "hg export" output.
1677 _metanamemap = util.sortdict(
1686 _metanamemap = util.sortdict(
1678 [
1687 [
1679 (b'user', b'User'),
1688 (b'user', b'User'),
1680 (b'date', b'Date'),
1689 (b'date', b'Date'),
1681 (b'branch', b'Branch'),
1690 (b'branch', b'Branch'),
1682 (b'node', b'Node ID'),
1691 (b'node', b'Node ID'),
1683 (b'parent', b'Parent '),
1692 (b'parent', b'Parent '),
1684 ]
1693 ]
1685 )
1694 )
1686
1695
1687
1696
1688 def _confirmbeforesend(repo, revs, oldmap):
1697 def _confirmbeforesend(repo, revs, oldmap):
1689 url, token = readurltoken(repo.ui)
1698 url, token = readurltoken(repo.ui)
1690 ui = repo.ui
1699 ui = repo.ui
1691 for rev in revs:
1700 for rev in revs:
1692 ctx = repo[rev]
1701 ctx = repo[rev]
1693 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1702 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1694 if drevid:
1703 if drevid:
1695 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1704 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1696 else:
1705 else:
1697 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1706 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1698
1707
1699 ui.write(
1708 ui.write(
1700 _(b'%s - %s\n')
1709 _(b'%s - %s\n')
1701 % (
1710 % (
1702 drevdesc,
1711 drevdesc,
1703 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1712 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1704 )
1713 )
1705 )
1714 )
1706
1715
1707 if ui.promptchoice(
1716 if ui.promptchoice(
1708 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1717 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1709 ):
1718 ):
1710 return False
1719 return False
1711
1720
1712 return True
1721 return True
1713
1722
1714
1723
1715 _knownstatusnames = {
1724 _knownstatusnames = {
1716 b'accepted',
1725 b'accepted',
1717 b'needsreview',
1726 b'needsreview',
1718 b'needsrevision',
1727 b'needsrevision',
1719 b'closed',
1728 b'closed',
1720 b'abandoned',
1729 b'abandoned',
1721 b'changesplanned',
1730 b'changesplanned',
1722 }
1731 }
1723
1732
1724
1733
1725 def _getstatusname(drev):
1734 def _getstatusname(drev):
1726 """get normalized status name from a Differential Revision"""
1735 """get normalized status name from a Differential Revision"""
1727 return drev[b'statusName'].replace(b' ', b'').lower()
1736 return drev[b'statusName'].replace(b' ', b'').lower()
1728
1737
1729
1738
1730 # Small language to specify differential revisions. Support symbols: (), :X,
1739 # Small language to specify differential revisions. Support symbols: (), :X,
1731 # +, and -.
1740 # +, and -.
1732
1741
1733 _elements = {
1742 _elements = {
1734 # token-type: binding-strength, primary, prefix, infix, suffix
1743 # token-type: binding-strength, primary, prefix, infix, suffix
1735 b'(': (12, None, (b'group', 1, b')'), None, None),
1744 b'(': (12, None, (b'group', 1, b')'), None, None),
1736 b':': (8, None, (b'ancestors', 8), None, None),
1745 b':': (8, None, (b'ancestors', 8), None, None),
1737 b'&': (5, None, None, (b'and_', 5), None),
1746 b'&': (5, None, None, (b'and_', 5), None),
1738 b'+': (4, None, None, (b'add', 4), None),
1747 b'+': (4, None, None, (b'add', 4), None),
1739 b'-': (4, None, None, (b'sub', 4), None),
1748 b'-': (4, None, None, (b'sub', 4), None),
1740 b')': (0, None, None, None, None),
1749 b')': (0, None, None, None, None),
1741 b'symbol': (0, b'symbol', None, None, None),
1750 b'symbol': (0, b'symbol', None, None, None),
1742 b'end': (0, None, None, None, None),
1751 b'end': (0, None, None, None, None),
1743 }
1752 }
1744
1753
1745
1754
1746 def _tokenize(text):
1755 def _tokenize(text):
1747 view = memoryview(text) # zero-copy slice
1756 view = memoryview(text) # zero-copy slice
1748 special = b'():+-& '
1757 special = b'():+-& '
1749 pos = 0
1758 pos = 0
1750 length = len(text)
1759 length = len(text)
1751 while pos < length:
1760 while pos < length:
1752 symbol = b''.join(
1761 symbol = b''.join(
1753 itertools.takewhile(
1762 itertools.takewhile(
1754 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1763 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1755 )
1764 )
1756 )
1765 )
1757 if symbol:
1766 if symbol:
1758 yield (b'symbol', symbol, pos)
1767 yield (b'symbol', symbol, pos)
1759 pos += len(symbol)
1768 pos += len(symbol)
1760 else: # special char, ignore space
1769 else: # special char, ignore space
1761 if text[pos : pos + 1] != b' ':
1770 if text[pos : pos + 1] != b' ':
1762 yield (text[pos : pos + 1], None, pos)
1771 yield (text[pos : pos + 1], None, pos)
1763 pos += 1
1772 pos += 1
1764 yield (b'end', None, pos)
1773 yield (b'end', None, pos)
1765
1774
1766
1775
1767 def _parse(text):
1776 def _parse(text):
1768 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1777 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1769 if pos != len(text):
1778 if pos != len(text):
1770 raise error.ParseError(b'invalid token', pos)
1779 raise error.ParseError(b'invalid token', pos)
1771 return tree
1780 return tree
1772
1781
1773
1782
1774 def _parsedrev(symbol):
1783 def _parsedrev(symbol):
1775 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1784 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1776 if symbol.startswith(b'D') and symbol[1:].isdigit():
1785 if symbol.startswith(b'D') and symbol[1:].isdigit():
1777 return int(symbol[1:])
1786 return int(symbol[1:])
1778 if symbol.isdigit():
1787 if symbol.isdigit():
1779 return int(symbol)
1788 return int(symbol)
1780
1789
1781
1790
1782 def _prefetchdrevs(tree):
1791 def _prefetchdrevs(tree):
1783 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1792 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1784 drevs = set()
1793 drevs = set()
1785 ancestordrevs = set()
1794 ancestordrevs = set()
1786 op = tree[0]
1795 op = tree[0]
1787 if op == b'symbol':
1796 if op == b'symbol':
1788 r = _parsedrev(tree[1])
1797 r = _parsedrev(tree[1])
1789 if r:
1798 if r:
1790 drevs.add(r)
1799 drevs.add(r)
1791 elif op == b'ancestors':
1800 elif op == b'ancestors':
1792 r, a = _prefetchdrevs(tree[1])
1801 r, a = _prefetchdrevs(tree[1])
1793 drevs.update(r)
1802 drevs.update(r)
1794 ancestordrevs.update(r)
1803 ancestordrevs.update(r)
1795 ancestordrevs.update(a)
1804 ancestordrevs.update(a)
1796 else:
1805 else:
1797 for t in tree[1:]:
1806 for t in tree[1:]:
1798 r, a = _prefetchdrevs(t)
1807 r, a = _prefetchdrevs(t)
1799 drevs.update(r)
1808 drevs.update(r)
1800 ancestordrevs.update(a)
1809 ancestordrevs.update(a)
1801 return drevs, ancestordrevs
1810 return drevs, ancestordrevs
1802
1811
1803
1812
1804 def querydrev(ui, spec):
1813 def querydrev(ui, spec):
1805 """return a list of "Differential Revision" dicts
1814 """return a list of "Differential Revision" dicts
1806
1815
1807 spec is a string using a simple query language, see docstring in phabread
1816 spec is a string using a simple query language, see docstring in phabread
1808 for details.
1817 for details.
1809
1818
1810 A "Differential Revision dict" looks like:
1819 A "Differential Revision dict" looks like:
1811
1820
1812 {
1821 {
1813 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1822 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1814 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1823 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1815 "auxiliary": {
1824 "auxiliary": {
1816 "phabricator:depends-on": [
1825 "phabricator:depends-on": [
1817 "PHID-DREV-gbapp366kutjebt7agcd"
1826 "PHID-DREV-gbapp366kutjebt7agcd"
1818 ]
1827 ]
1819 "phabricator:projects": [],
1828 "phabricator:projects": [],
1820 },
1829 },
1821 "branch": "default",
1830 "branch": "default",
1822 "ccs": [],
1831 "ccs": [],
1823 "commits": [],
1832 "commits": [],
1824 "dateCreated": "1499181406",
1833 "dateCreated": "1499181406",
1825 "dateModified": "1499182103",
1834 "dateModified": "1499182103",
1826 "diffs": [
1835 "diffs": [
1827 "3",
1836 "3",
1828 "4",
1837 "4",
1829 ],
1838 ],
1830 "hashes": [],
1839 "hashes": [],
1831 "id": "2",
1840 "id": "2",
1832 "lineCount": "2",
1841 "lineCount": "2",
1833 "phid": "PHID-DREV-672qvysjcczopag46qty",
1842 "phid": "PHID-DREV-672qvysjcczopag46qty",
1834 "properties": {},
1843 "properties": {},
1835 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1844 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1836 "reviewers": [],
1845 "reviewers": [],
1837 "sourcePath": null
1846 "sourcePath": null
1838 "status": "0",
1847 "status": "0",
1839 "statusName": "Needs Review",
1848 "statusName": "Needs Review",
1840 "summary": "",
1849 "summary": "",
1841 "testPlan": "",
1850 "testPlan": "",
1842 "title": "example",
1851 "title": "example",
1843 "uri": "https://phab.example.com/D2",
1852 "uri": "https://phab.example.com/D2",
1844 }
1853 }
1845 """
1854 """
1846 # TODO: replace differential.query and differential.querydiffs with
1855 # TODO: replace differential.query and differential.querydiffs with
1847 # differential.diff.search because the former (and their output) are
1856 # differential.diff.search because the former (and their output) are
1848 # frozen, and planned to be deprecated and removed.
1857 # frozen, and planned to be deprecated and removed.
1849
1858
1850 def fetch(params):
1859 def fetch(params):
1851 """params -> single drev or None"""
1860 """params -> single drev or None"""
1852 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1861 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1853 if key in prefetched:
1862 if key in prefetched:
1854 return prefetched[key]
1863 return prefetched[key]
1855 drevs = callconduit(ui, b'differential.query', params)
1864 drevs = callconduit(ui, b'differential.query', params)
1856 # Fill prefetched with the result
1865 # Fill prefetched with the result
1857 for drev in drevs:
1866 for drev in drevs:
1858 prefetched[drev[b'phid']] = drev
1867 prefetched[drev[b'phid']] = drev
1859 prefetched[int(drev[b'id'])] = drev
1868 prefetched[int(drev[b'id'])] = drev
1860 if key not in prefetched:
1869 if key not in prefetched:
1861 raise error.Abort(
1870 raise error.Abort(
1862 _(b'cannot get Differential Revision %r') % params
1871 _(b'cannot get Differential Revision %r') % params
1863 )
1872 )
1864 return prefetched[key]
1873 return prefetched[key]
1865
1874
1866 def getstack(topdrevids):
1875 def getstack(topdrevids):
1867 """given a top, get a stack from the bottom, [id] -> [id]"""
1876 """given a top, get a stack from the bottom, [id] -> [id]"""
1868 visited = set()
1877 visited = set()
1869 result = []
1878 result = []
1870 queue = [{b'ids': [i]} for i in topdrevids]
1879 queue = [{b'ids': [i]} for i in topdrevids]
1871 while queue:
1880 while queue:
1872 params = queue.pop()
1881 params = queue.pop()
1873 drev = fetch(params)
1882 drev = fetch(params)
1874 if drev[b'id'] in visited:
1883 if drev[b'id'] in visited:
1875 continue
1884 continue
1876 visited.add(drev[b'id'])
1885 visited.add(drev[b'id'])
1877 result.append(int(drev[b'id']))
1886 result.append(int(drev[b'id']))
1878 auxiliary = drev.get(b'auxiliary', {})
1887 auxiliary = drev.get(b'auxiliary', {})
1879 depends = auxiliary.get(b'phabricator:depends-on', [])
1888 depends = auxiliary.get(b'phabricator:depends-on', [])
1880 for phid in depends:
1889 for phid in depends:
1881 queue.append({b'phids': [phid]})
1890 queue.append({b'phids': [phid]})
1882 result.reverse()
1891 result.reverse()
1883 return smartset.baseset(result)
1892 return smartset.baseset(result)
1884
1893
1885 # Initialize prefetch cache
1894 # Initialize prefetch cache
1886 prefetched = {} # {id or phid: drev}
1895 prefetched = {} # {id or phid: drev}
1887
1896
1888 tree = _parse(spec)
1897 tree = _parse(spec)
1889 drevs, ancestordrevs = _prefetchdrevs(tree)
1898 drevs, ancestordrevs = _prefetchdrevs(tree)
1890
1899
1891 # developer config: phabricator.batchsize
1900 # developer config: phabricator.batchsize
1892 batchsize = ui.configint(b'phabricator', b'batchsize')
1901 batchsize = ui.configint(b'phabricator', b'batchsize')
1893
1902
1894 # Prefetch Differential Revisions in batch
1903 # Prefetch Differential Revisions in batch
1895 tofetch = set(drevs)
1904 tofetch = set(drevs)
1896 for r in ancestordrevs:
1905 for r in ancestordrevs:
1897 tofetch.update(range(max(1, r - batchsize), r + 1))
1906 tofetch.update(range(max(1, r - batchsize), r + 1))
1898 if drevs:
1907 if drevs:
1899 fetch({b'ids': list(tofetch)})
1908 fetch({b'ids': list(tofetch)})
1900 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1909 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1901
1910
1902 # Walk through the tree, return smartsets
1911 # Walk through the tree, return smartsets
1903 def walk(tree):
1912 def walk(tree):
1904 op = tree[0]
1913 op = tree[0]
1905 if op == b'symbol':
1914 if op == b'symbol':
1906 drev = _parsedrev(tree[1])
1915 drev = _parsedrev(tree[1])
1907 if drev:
1916 if drev:
1908 return smartset.baseset([drev])
1917 return smartset.baseset([drev])
1909 elif tree[1] in _knownstatusnames:
1918 elif tree[1] in _knownstatusnames:
1910 drevs = [
1919 drevs = [
1911 r
1920 r
1912 for r in validids
1921 for r in validids
1913 if _getstatusname(prefetched[r]) == tree[1]
1922 if _getstatusname(prefetched[r]) == tree[1]
1914 ]
1923 ]
1915 return smartset.baseset(drevs)
1924 return smartset.baseset(drevs)
1916 else:
1925 else:
1917 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1926 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1918 elif op in {b'and_', b'add', b'sub'}:
1927 elif op in {b'and_', b'add', b'sub'}:
1919 assert len(tree) == 3
1928 assert len(tree) == 3
1920 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1929 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1921 elif op == b'group':
1930 elif op == b'group':
1922 return walk(tree[1])
1931 return walk(tree[1])
1923 elif op == b'ancestors':
1932 elif op == b'ancestors':
1924 return getstack(walk(tree[1]))
1933 return getstack(walk(tree[1]))
1925 else:
1934 else:
1926 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1935 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1927
1936
1928 return [prefetched[r] for r in walk(tree)]
1937 return [prefetched[r] for r in walk(tree)]
1929
1938
1930
1939
1931 def getdescfromdrev(drev):
1940 def getdescfromdrev(drev):
1932 """get description (commit message) from "Differential Revision"
1941 """get description (commit message) from "Differential Revision"
1933
1942
1934 This is similar to differential.getcommitmessage API. But we only care
1943 This is similar to differential.getcommitmessage API. But we only care
1935 about limited fields: title, summary, test plan, and URL.
1944 about limited fields: title, summary, test plan, and URL.
1936 """
1945 """
1937 title = drev[b'title']
1946 title = drev[b'title']
1938 summary = drev[b'summary'].rstrip()
1947 summary = drev[b'summary'].rstrip()
1939 testplan = drev[b'testPlan'].rstrip()
1948 testplan = drev[b'testPlan'].rstrip()
1940 if testplan:
1949 if testplan:
1941 testplan = b'Test Plan:\n%s' % testplan
1950 testplan = b'Test Plan:\n%s' % testplan
1942 uri = b'Differential Revision: %s' % drev[b'uri']
1951 uri = b'Differential Revision: %s' % drev[b'uri']
1943 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1952 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1944
1953
1945
1954
1946 def get_amended_desc(drev, ctx, folded):
1955 def get_amended_desc(drev, ctx, folded):
1947 """similar to ``getdescfromdrev``, but supports a folded series of commits
1956 """similar to ``getdescfromdrev``, but supports a folded series of commits
1948
1957
1949 This is used when determining if an individual commit needs to have its
1958 This is used when determining if an individual commit needs to have its
1950 message amended after posting it for review. The determination is made for
1959 message amended after posting it for review. The determination is made for
1951 each individual commit, even when they were folded into one review.
1960 each individual commit, even when they were folded into one review.
1952 """
1961 """
1953 if not folded:
1962 if not folded:
1954 return getdescfromdrev(drev)
1963 return getdescfromdrev(drev)
1955
1964
1956 uri = b'Differential Revision: %s' % drev[b'uri']
1965 uri = b'Differential Revision: %s' % drev[b'uri']
1957
1966
1958 # Since the commit messages were combined when posting multiple commits
1967 # Since the commit messages were combined when posting multiple commits
1959 # with --fold, the fields can't be read from Phabricator here, or *all*
1968 # with --fold, the fields can't be read from Phabricator here, or *all*
1960 # affected local revisions will end up with the same commit message after
1969 # affected local revisions will end up with the same commit message after
1961 # the URI is amended in. Append in the DREV line, or update it if it
1970 # the URI is amended in. Append in the DREV line, or update it if it
1962 # exists. At worst, this means commit message or test plan updates on
1971 # exists. At worst, this means commit message or test plan updates on
1963 # Phabricator aren't propagated back to the repository, but that seems
1972 # Phabricator aren't propagated back to the repository, but that seems
1964 # reasonable for the case where local commits are effectively combined
1973 # reasonable for the case where local commits are effectively combined
1965 # in Phabricator.
1974 # in Phabricator.
1966 m = _differentialrevisiondescre.search(ctx.description())
1975 m = _differentialrevisiondescre.search(ctx.description())
1967 if not m:
1976 if not m:
1968 return b'\n\n'.join([ctx.description(), uri])
1977 return b'\n\n'.join([ctx.description(), uri])
1969
1978
1970 return _differentialrevisiondescre.sub(uri, ctx.description())
1979 return _differentialrevisiondescre.sub(uri, ctx.description())
1971
1980
1972
1981
1973 def getlocalcommits(diff):
1982 def getlocalcommits(diff):
1974 """get the set of local commits from a diff object
1983 """get the set of local commits from a diff object
1975
1984
1976 See ``getdiffmeta()`` for an example diff object.
1985 See ``getdiffmeta()`` for an example diff object.
1977 """
1986 """
1978 props = diff.get(b'properties') or {}
1987 props = diff.get(b'properties') or {}
1979 commits = props.get(b'local:commits') or {}
1988 commits = props.get(b'local:commits') or {}
1980 if len(commits) > 1:
1989 if len(commits) > 1:
1981 return {bin(c) for c in commits.keys()}
1990 return {bin(c) for c in commits.keys()}
1982
1991
1983 # Storing the diff metadata predates storing `local:commits`, so continue
1992 # Storing the diff metadata predates storing `local:commits`, so continue
1984 # to use that in the --no-fold case.
1993 # to use that in the --no-fold case.
1985 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1994 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1986
1995
1987
1996
1988 def getdiffmeta(diff):
1997 def getdiffmeta(diff):
1989 """get commit metadata (date, node, user, p1) from a diff object
1998 """get commit metadata (date, node, user, p1) from a diff object
1990
1999
1991 The metadata could be "hg:meta", sent by phabsend, like:
2000 The metadata could be "hg:meta", sent by phabsend, like:
1992
2001
1993 "properties": {
2002 "properties": {
1994 "hg:meta": {
2003 "hg:meta": {
1995 "branch": "default",
2004 "branch": "default",
1996 "date": "1499571514 25200",
2005 "date": "1499571514 25200",
1997 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2006 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1998 "user": "Foo Bar <foo@example.com>",
2007 "user": "Foo Bar <foo@example.com>",
1999 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2008 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2000 }
2009 }
2001 }
2010 }
2002
2011
2003 Or converted from "local:commits", sent by "arc", like:
2012 Or converted from "local:commits", sent by "arc", like:
2004
2013
2005 "properties": {
2014 "properties": {
2006 "local:commits": {
2015 "local:commits": {
2007 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2016 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2008 "author": "Foo Bar",
2017 "author": "Foo Bar",
2009 "authorEmail": "foo@example.com"
2018 "authorEmail": "foo@example.com"
2010 "branch": "default",
2019 "branch": "default",
2011 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2020 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2012 "local": "1000",
2021 "local": "1000",
2013 "message": "...",
2022 "message": "...",
2014 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2023 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2015 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2024 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2016 "summary": "...",
2025 "summary": "...",
2017 "tag": "",
2026 "tag": "",
2018 "time": 1499546314,
2027 "time": 1499546314,
2019 }
2028 }
2020 }
2029 }
2021 }
2030 }
2022
2031
2023 Note: metadata extracted from "local:commits" will lose time zone
2032 Note: metadata extracted from "local:commits" will lose time zone
2024 information.
2033 information.
2025 """
2034 """
2026 props = diff.get(b'properties') or {}
2035 props = diff.get(b'properties') or {}
2027 meta = props.get(b'hg:meta')
2036 meta = props.get(b'hg:meta')
2028 if not meta:
2037 if not meta:
2029 if props.get(b'local:commits'):
2038 if props.get(b'local:commits'):
2030 commit = sorted(props[b'local:commits'].values())[0]
2039 commit = sorted(props[b'local:commits'].values())[0]
2031 meta = {}
2040 meta = {}
2032 if b'author' in commit and b'authorEmail' in commit:
2041 if b'author' in commit and b'authorEmail' in commit:
2033 meta[b'user'] = b'%s <%s>' % (
2042 meta[b'user'] = b'%s <%s>' % (
2034 commit[b'author'],
2043 commit[b'author'],
2035 commit[b'authorEmail'],
2044 commit[b'authorEmail'],
2036 )
2045 )
2037 if b'time' in commit:
2046 if b'time' in commit:
2038 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2047 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2039 if b'branch' in commit:
2048 if b'branch' in commit:
2040 meta[b'branch'] = commit[b'branch']
2049 meta[b'branch'] = commit[b'branch']
2041 node = commit.get(b'commit', commit.get(b'rev'))
2050 node = commit.get(b'commit', commit.get(b'rev'))
2042 if node:
2051 if node:
2043 meta[b'node'] = node
2052 meta[b'node'] = node
2044 if len(commit.get(b'parents', ())) >= 1:
2053 if len(commit.get(b'parents', ())) >= 1:
2045 meta[b'parent'] = commit[b'parents'][0]
2054 meta[b'parent'] = commit[b'parents'][0]
2046 else:
2055 else:
2047 meta = {}
2056 meta = {}
2048 if b'date' not in meta and b'dateCreated' in diff:
2057 if b'date' not in meta and b'dateCreated' in diff:
2049 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2058 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2050 if b'branch' not in meta and diff.get(b'branch'):
2059 if b'branch' not in meta and diff.get(b'branch'):
2051 meta[b'branch'] = diff[b'branch']
2060 meta[b'branch'] = diff[b'branch']
2052 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2061 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2053 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2062 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2054 return meta
2063 return meta
2055
2064
2056
2065
2057 def _getdrevs(ui, stack, specs):
2066 def _getdrevs(ui, stack, specs):
2058 """convert user supplied DREVSPECs into "Differential Revision" dicts
2067 """convert user supplied DREVSPECs into "Differential Revision" dicts
2059
2068
2060 See ``hg help phabread`` for how to specify each DREVSPEC.
2069 See ``hg help phabread`` for how to specify each DREVSPEC.
2061 """
2070 """
2062 if len(specs) > 0:
2071 if len(specs) > 0:
2063
2072
2064 def _formatspec(s):
2073 def _formatspec(s):
2065 if stack:
2074 if stack:
2066 s = b':(%s)' % s
2075 s = b':(%s)' % s
2067 return b'(%s)' % s
2076 return b'(%s)' % s
2068
2077
2069 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2078 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2070
2079
2071 drevs = querydrev(ui, spec)
2080 drevs = querydrev(ui, spec)
2072 if drevs:
2081 if drevs:
2073 return drevs
2082 return drevs
2074
2083
2075 raise error.Abort(_(b"empty DREVSPEC set"))
2084 raise error.Abort(_(b"empty DREVSPEC set"))
2076
2085
2077
2086
2078 def readpatch(ui, drevs, write):
2087 def readpatch(ui, drevs, write):
2079 """generate plain-text patch readable by 'hg import'
2088 """generate plain-text patch readable by 'hg import'
2080
2089
2081 write takes a list of (DREV, bytes), where DREV is the differential number
2090 write takes a list of (DREV, bytes), where DREV is the differential number
2082 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2091 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2083 to be imported. drevs is what "querydrev" returns, results of
2092 to be imported. drevs is what "querydrev" returns, results of
2084 "differential.query".
2093 "differential.query".
2085 """
2094 """
2086 # Prefetch hg:meta property for all diffs
2095 # Prefetch hg:meta property for all diffs
2087 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2096 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2088 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2097 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2089
2098
2090 patches = []
2099 patches = []
2091
2100
2092 # Generate patch for each drev
2101 # Generate patch for each drev
2093 for drev in drevs:
2102 for drev in drevs:
2094 ui.note(_(b'reading D%s\n') % drev[b'id'])
2103 ui.note(_(b'reading D%s\n') % drev[b'id'])
2095
2104
2096 diffid = max(int(v) for v in drev[b'diffs'])
2105 diffid = max(int(v) for v in drev[b'diffs'])
2097 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2106 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2098 desc = getdescfromdrev(drev)
2107 desc = getdescfromdrev(drev)
2099 header = b'# HG changeset patch\n'
2108 header = b'# HG changeset patch\n'
2100
2109
2101 # Try to preserve metadata from hg:meta property. Write hg patch
2110 # Try to preserve metadata from hg:meta property. Write hg patch
2102 # headers that can be read by the "import" command. See patchheadermap
2111 # headers that can be read by the "import" command. See patchheadermap
2103 # and extract in mercurial/patch.py for supported headers.
2112 # and extract in mercurial/patch.py for supported headers.
2104 meta = getdiffmeta(diffs[b'%d' % diffid])
2113 meta = getdiffmeta(diffs[b'%d' % diffid])
2105 for k in _metanamemap.keys():
2114 for k in _metanamemap.keys():
2106 if k in meta:
2115 if k in meta:
2107 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2116 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2108
2117
2109 content = b'%s%s\n%s' % (header, desc, body)
2118 content = b'%s%s\n%s' % (header, desc, body)
2110 patches.append((drev[b'id'], content))
2119 patches.append((drev[b'id'], content))
2111
2120
2112 # Write patches to the supplied callback
2121 # Write patches to the supplied callback
2113 write(patches)
2122 write(patches)
2114
2123
2115
2124
2116 @vcrcommand(
2125 @vcrcommand(
2117 b'phabread',
2126 b'phabread',
2118 [(b'', b'stack', False, _(b'read dependencies'))],
2127 [(b'', b'stack', False, _(b'read dependencies'))],
2119 _(b'DREVSPEC... [OPTIONS]'),
2128 _(b'DREVSPEC... [OPTIONS]'),
2120 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2129 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2121 optionalrepo=True,
2130 optionalrepo=True,
2122 )
2131 )
2123 def phabread(ui, repo, *specs, **opts):
2132 def phabread(ui, repo, *specs, **opts):
2124 """print patches from Phabricator suitable for importing
2133 """print patches from Phabricator suitable for importing
2125
2134
2126 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2135 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2127 the number ``123``. It could also have common operators like ``+``, ``-``,
2136 the number ``123``. It could also have common operators like ``+``, ``-``,
2128 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2137 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2129 select a stack. If multiple DREVSPEC values are given, the result is the
2138 select a stack. If multiple DREVSPEC values are given, the result is the
2130 union of each individually evaluated value. No attempt is currently made
2139 union of each individually evaluated value. No attempt is currently made
2131 to reorder the values to run from parent to child.
2140 to reorder the values to run from parent to child.
2132
2141
2133 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2142 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2134 could be used to filter patches by status. For performance reason, they
2143 could be used to filter patches by status. For performance reason, they
2135 only represent a subset of non-status selections and cannot be used alone.
2144 only represent a subset of non-status selections and cannot be used alone.
2136
2145
2137 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2146 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2138 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2147 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2139 stack up to D9.
2148 stack up to D9.
2140
2149
2141 If --stack is given, follow dependencies information and read all patches.
2150 If --stack is given, follow dependencies information and read all patches.
2142 It is equivalent to the ``:`` operator.
2151 It is equivalent to the ``:`` operator.
2143 """
2152 """
2144 opts = pycompat.byteskwargs(opts)
2153 opts = pycompat.byteskwargs(opts)
2145 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2154 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2146
2155
2147 def _write(patches):
2156 def _write(patches):
2148 for drev, content in patches:
2157 for drev, content in patches:
2149 ui.write(content)
2158 ui.write(content)
2150
2159
2151 readpatch(ui, drevs, _write)
2160 readpatch(ui, drevs, _write)
2152
2161
2153
2162
2154 @vcrcommand(
2163 @vcrcommand(
2155 b'phabimport',
2164 b'phabimport',
2156 [(b'', b'stack', False, _(b'import dependencies as well'))],
2165 [(b'', b'stack', False, _(b'import dependencies as well'))],
2157 _(b'DREVSPEC... [OPTIONS]'),
2166 _(b'DREVSPEC... [OPTIONS]'),
2158 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2167 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2159 )
2168 )
2160 def phabimport(ui, repo, *specs, **opts):
2169 def phabimport(ui, repo, *specs, **opts):
2161 """import patches from Phabricator for the specified Differential Revisions
2170 """import patches from Phabricator for the specified Differential Revisions
2162
2171
2163 The patches are read and applied starting at the parent of the working
2172 The patches are read and applied starting at the parent of the working
2164 directory.
2173 directory.
2165
2174
2166 See ``hg help phabread`` for how to specify DREVSPEC.
2175 See ``hg help phabread`` for how to specify DREVSPEC.
2167 """
2176 """
2168 opts = pycompat.byteskwargs(opts)
2177 opts = pycompat.byteskwargs(opts)
2169
2178
2170 # --bypass avoids losing exec and symlink bits when importing on Windows,
2179 # --bypass avoids losing exec and symlink bits when importing on Windows,
2171 # and allows importing with a dirty wdir. It also aborts instead of leaving
2180 # and allows importing with a dirty wdir. It also aborts instead of leaving
2172 # rejects.
2181 # rejects.
2173 opts[b'bypass'] = True
2182 opts[b'bypass'] = True
2174
2183
2175 # Mandatory default values, synced with commands.import
2184 # Mandatory default values, synced with commands.import
2176 opts[b'strip'] = 1
2185 opts[b'strip'] = 1
2177 opts[b'prefix'] = b''
2186 opts[b'prefix'] = b''
2178 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2187 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2179 opts[b'obsolete'] = False
2188 opts[b'obsolete'] = False
2180
2189
2181 if ui.configbool(b'phabimport', b'secret'):
2190 if ui.configbool(b'phabimport', b'secret'):
2182 opts[b'secret'] = True
2191 opts[b'secret'] = True
2183 if ui.configbool(b'phabimport', b'obsolete'):
2192 if ui.configbool(b'phabimport', b'obsolete'):
2184 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2193 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2185
2194
2186 def _write(patches):
2195 def _write(patches):
2187 parents = repo[None].parents()
2196 parents = repo[None].parents()
2188
2197
2189 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2198 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2190 for drev, contents in patches:
2199 for drev, contents in patches:
2191 ui.status(_(b'applying patch from D%s\n') % drev)
2200 ui.status(_(b'applying patch from D%s\n') % drev)
2192
2201
2193 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2202 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2194 msg, node, rej = cmdutil.tryimportone(
2203 msg, node, rej = cmdutil.tryimportone(
2195 ui,
2204 ui,
2196 repo,
2205 repo,
2197 patchdata,
2206 patchdata,
2198 parents,
2207 parents,
2199 opts,
2208 opts,
2200 [],
2209 [],
2201 None, # Never update wdir to another revision
2210 None, # Never update wdir to another revision
2202 )
2211 )
2203
2212
2204 if not node:
2213 if not node:
2205 raise error.Abort(_(b'D%s: no diffs found') % drev)
2214 raise error.Abort(_(b'D%s: no diffs found') % drev)
2206
2215
2207 ui.note(msg + b'\n')
2216 ui.note(msg + b'\n')
2208 parents = [repo[node]]
2217 parents = [repo[node]]
2209
2218
2210 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2219 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2211
2220
2212 readpatch(repo.ui, drevs, _write)
2221 readpatch(repo.ui, drevs, _write)
2213
2222
2214
2223
2215 @vcrcommand(
2224 @vcrcommand(
2216 b'phabupdate',
2225 b'phabupdate',
2217 [
2226 [
2218 (b'', b'accept', False, _(b'accept revisions')),
2227 (b'', b'accept', False, _(b'accept revisions')),
2219 (b'', b'reject', False, _(b'reject revisions')),
2228 (b'', b'reject', False, _(b'reject revisions')),
2220 (b'', b'request-review', False, _(b'request review on revisions')),
2229 (b'', b'request-review', False, _(b'request review on revisions')),
2221 (b'', b'abandon', False, _(b'abandon revisions')),
2230 (b'', b'abandon', False, _(b'abandon revisions')),
2222 (b'', b'reclaim', False, _(b'reclaim revisions')),
2231 (b'', b'reclaim', False, _(b'reclaim revisions')),
2223 (b'', b'close', False, _(b'close revisions')),
2232 (b'', b'close', False, _(b'close revisions')),
2224 (b'', b'reopen', False, _(b'reopen revisions')),
2233 (b'', b'reopen', False, _(b'reopen revisions')),
2225 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2234 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2226 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2235 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2227 (b'', b'commandeer', False, _(b'commandeer revisions')),
2236 (b'', b'commandeer', False, _(b'commandeer revisions')),
2228 (b'm', b'comment', b'', _(b'comment on the last revision')),
2237 (b'm', b'comment', b'', _(b'comment on the last revision')),
2229 ],
2238 ],
2230 _(b'DREVSPEC... [OPTIONS]'),
2239 _(b'DREVSPEC... [OPTIONS]'),
2231 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2240 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2232 optionalrepo=True,
2241 optionalrepo=True,
2233 )
2242 )
2234 def phabupdate(ui, repo, *specs, **opts):
2243 def phabupdate(ui, repo, *specs, **opts):
2235 """update Differential Revision in batch
2244 """update Differential Revision in batch
2236
2245
2237 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2246 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2238 """
2247 """
2239 opts = pycompat.byteskwargs(opts)
2248 opts = pycompat.byteskwargs(opts)
2240 transactions = [
2249 transactions = [
2241 b'abandon',
2250 b'abandon',
2242 b'accept',
2251 b'accept',
2243 b'close',
2252 b'close',
2244 b'commandeer',
2253 b'commandeer',
2245 b'plan-changes',
2254 b'plan-changes',
2246 b'reclaim',
2255 b'reclaim',
2247 b'reject',
2256 b'reject',
2248 b'reopen',
2257 b'reopen',
2249 b'request-review',
2258 b'request-review',
2250 b'resign',
2259 b'resign',
2251 ]
2260 ]
2252 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2261 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2253 if len(flags) > 1:
2262 if len(flags) > 1:
2254 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2263 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2255
2264
2256 actions = []
2265 actions = []
2257 for f in flags:
2266 for f in flags:
2258 actions.append({b'type': f, b'value': True})
2267 actions.append({b'type': f, b'value': True})
2259
2268
2260 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2269 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2261 for i, drev in enumerate(drevs):
2270 for i, drev in enumerate(drevs):
2262 if i + 1 == len(drevs) and opts.get(b'comment'):
2271 if i + 1 == len(drevs) and opts.get(b'comment'):
2263 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2272 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2264 if actions:
2273 if actions:
2265 params = {
2274 params = {
2266 b'objectIdentifier': drev[b'phid'],
2275 b'objectIdentifier': drev[b'phid'],
2267 b'transactions': actions,
2276 b'transactions': actions,
2268 }
2277 }
2269 callconduit(ui, b'differential.revision.edit', params)
2278 callconduit(ui, b'differential.revision.edit', params)
2270
2279
2271
2280
2272 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2281 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2273 def template_review(context, mapping):
2282 def template_review(context, mapping):
2274 """:phabreview: Object describing the review for this changeset.
2283 """:phabreview: Object describing the review for this changeset.
2275 Has attributes `url` and `id`.
2284 Has attributes `url` and `id`.
2276 """
2285 """
2277 ctx = context.resource(mapping, b'ctx')
2286 ctx = context.resource(mapping, b'ctx')
2278 m = _differentialrevisiondescre.search(ctx.description())
2287 m = _differentialrevisiondescre.search(ctx.description())
2279 if m:
2288 if m:
2280 return templateutil.hybriddict(
2289 return templateutil.hybriddict(
2281 {
2290 {
2282 b'url': m.group('url'),
2291 b'url': m.group('url'),
2283 b'id': b"D%s" % m.group('id'),
2292 b'id': b"D%s" % m.group('id'),
2284 }
2293 }
2285 )
2294 )
2286 else:
2295 else:
2287 tags = ctx.repo().nodetags(ctx.node())
2296 tags = ctx.repo().nodetags(ctx.node())
2288 for t in tags:
2297 for t in tags:
2289 if _differentialrevisiontagre.match(t):
2298 if _differentialrevisiontagre.match(t):
2290 url = ctx.repo().ui.config(b'phabricator', b'url')
2299 url = ctx.repo().ui.config(b'phabricator', b'url')
2291 if not url.endswith(b'/'):
2300 if not url.endswith(b'/'):
2292 url += b'/'
2301 url += b'/'
2293 url += t
2302 url += t
2294
2303
2295 return templateutil.hybriddict(
2304 return templateutil.hybriddict(
2296 {
2305 {
2297 b'url': url,
2306 b'url': url,
2298 b'id': t,
2307 b'id': t,
2299 }
2308 }
2300 )
2309 )
2301 return None
2310 return None
2302
2311
2303
2312
2304 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2313 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2305 def template_status(context, mapping):
2314 def template_status(context, mapping):
2306 """:phabstatus: String. Status of Phabricator differential."""
2315 """:phabstatus: String. Status of Phabricator differential."""
2307 ctx = context.resource(mapping, b'ctx')
2316 ctx = context.resource(mapping, b'ctx')
2308 repo = context.resource(mapping, b'repo')
2317 repo = context.resource(mapping, b'repo')
2309 ui = context.resource(mapping, b'ui')
2318 ui = context.resource(mapping, b'ui')
2310
2319
2311 rev = ctx.rev()
2320 rev = ctx.rev()
2312 try:
2321 try:
2313 drevid = getdrevmap(repo, [rev])[rev]
2322 drevid = getdrevmap(repo, [rev])[rev]
2314 except KeyError:
2323 except KeyError:
2315 return None
2324 return None
2316 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2325 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2317 for drev in drevs:
2326 for drev in drevs:
2318 if int(drev[b'id']) == drevid:
2327 if int(drev[b'id']) == drevid:
2319 return templateutil.hybriddict(
2328 return templateutil.hybriddict(
2320 {
2329 {
2321 b'url': drev[b'uri'],
2330 b'url': drev[b'uri'],
2322 b'status': drev[b'statusName'],
2331 b'status': drev[b'statusName'],
2323 }
2332 }
2324 )
2333 )
2325 return None
2334 return None
2326
2335
2327
2336
2328 @show.showview(b'phabstatus', csettopic=b'work')
2337 @show.showview(b'phabstatus', csettopic=b'work')
2329 def phabstatusshowview(ui, repo, displayer):
2338 def phabstatusshowview(ui, repo, displayer):
2330 """Phabricator differiential status"""
2339 """Phabricator differiential status"""
2331 revs = repo.revs('sort(_underway(), topo)')
2340 revs = repo.revs('sort(_underway(), topo)')
2332 drevmap = getdrevmap(repo, revs)
2341 drevmap = getdrevmap(repo, revs)
2333 unknownrevs, drevids, revsbydrevid = [], set(), {}
2342 unknownrevs, drevids, revsbydrevid = [], set(), {}
2334 for rev, drevid in pycompat.iteritems(drevmap):
2343 for rev, drevid in pycompat.iteritems(drevmap):
2335 if drevid is not None:
2344 if drevid is not None:
2336 drevids.add(drevid)
2345 drevids.add(drevid)
2337 revsbydrevid.setdefault(drevid, set()).add(rev)
2346 revsbydrevid.setdefault(drevid, set()).add(rev)
2338 else:
2347 else:
2339 unknownrevs.append(rev)
2348 unknownrevs.append(rev)
2340
2349
2341 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2350 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2342 drevsbyrev = {}
2351 drevsbyrev = {}
2343 for drev in drevs:
2352 for drev in drevs:
2344 for rev in revsbydrevid[int(drev[b'id'])]:
2353 for rev in revsbydrevid[int(drev[b'id'])]:
2345 drevsbyrev[rev] = drev
2354 drevsbyrev[rev] = drev
2346
2355
2347 def phabstatus(ctx):
2356 def phabstatus(ctx):
2348 drev = drevsbyrev[ctx.rev()]
2357 drev = drevsbyrev[ctx.rev()]
2349 status = ui.label(
2358 status = ui.label(
2350 b'%(statusName)s' % drev,
2359 b'%(statusName)s' % drev,
2351 b'phabricator.status.%s' % _getstatusname(drev),
2360 b'phabricator.status.%s' % _getstatusname(drev),
2352 )
2361 )
2353 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2362 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2354
2363
2355 revs -= smartset.baseset(unknownrevs)
2364 revs -= smartset.baseset(unknownrevs)
2356 revdag = graphmod.dagwalker(repo, revs)
2365 revdag = graphmod.dagwalker(repo, revs)
2357
2366
2358 ui.setconfig(b'experimental', b'graphshorten', True)
2367 ui.setconfig(b'experimental', b'graphshorten', True)
2359 displayer._exthook = phabstatus
2368 displayer._exthook = phabstatus
2360 nodelen = show.longestshortest(repo, revs)
2369 nodelen = show.longestshortest(repo, revs)
2361 logcmdutil.displaygraph(
2370 logcmdutil.displaygraph(
2362 ui,
2371 ui,
2363 repo,
2372 repo,
2364 revdag,
2373 revdag,
2365 displayer,
2374 displayer,
2366 graphmod.asciiedges,
2375 graphmod.asciiedges,
2367 props={b'nodelen': nodelen},
2376 props={b'nodelen': nodelen},
2368 )
2377 )
General Comments 0
You need to be logged in to leave comments. Login now