##// END OF EJS Templates
phabricator: introduce a `phabricator.retry` option...
marmoute -
r46583:57183111 default
parent child Browse files
Show More
@@ -1,2335 +1,2368 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 # retry failed command N time (default 0). Useful when using the extension
39 # over flakly connection.
40 #
41 # We wait `retry.interval` between each retry, in seconds.
42 # (default 1 second).
43 retry = 3
44 retry.interval = 10
45
38 [auth]
46 [auth]
39 example.schemes = https
47 example.schemes = https
40 example.prefix = phab.example.com
48 example.prefix = phab.example.com
41
49
42 # API token. Get it from https://$HOST/conduit/login/
50 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
51 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
52 """
45
53
46 from __future__ import absolute_import
54 from __future__ import absolute_import
47
55
48 import base64
56 import base64
49 import contextlib
57 import contextlib
50 import hashlib
58 import hashlib
51 import itertools
59 import itertools
52 import json
60 import json
53 import mimetypes
61 import mimetypes
54 import operator
62 import operator
55 import re
63 import re
64 import time
56
65
57 from mercurial.node import bin, nullid, short
66 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
67 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
68 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
69 from mercurial.thirdparty import attr
61 from mercurial import (
70 from mercurial import (
62 cmdutil,
71 cmdutil,
63 context,
72 context,
64 copies,
73 copies,
65 encoding,
74 encoding,
66 error,
75 error,
67 exthelper,
76 exthelper,
68 graphmod,
77 graphmod,
69 httpconnection as httpconnectionmod,
78 httpconnection as httpconnectionmod,
70 localrepo,
79 localrepo,
71 logcmdutil,
80 logcmdutil,
72 match,
81 match,
73 mdiff,
82 mdiff,
74 obsutil,
83 obsutil,
75 parser,
84 parser,
76 patch,
85 patch,
77 phases,
86 phases,
78 pycompat,
87 pycompat,
79 rewriteutil,
88 rewriteutil,
80 scmutil,
89 scmutil,
81 smartset,
90 smartset,
82 tags,
91 tags,
83 templatefilters,
92 templatefilters,
84 templateutil,
93 templateutil,
85 url as urlmod,
94 url as urlmod,
86 util,
95 util,
87 )
96 )
88 from mercurial.utils import (
97 from mercurial.utils import (
89 procutil,
98 procutil,
90 stringutil,
99 stringutil,
91 )
100 )
92 from . import show
101 from . import show
93
102
94
103
95 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
104 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
96 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
105 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
97 # be specifying the version(s) of Mercurial they are tested with, or
106 # be specifying the version(s) of Mercurial they are tested with, or
98 # leave the attribute unspecified.
107 # leave the attribute unspecified.
99 testedwith = b'ships-with-hg-core'
108 testedwith = b'ships-with-hg-core'
100
109
101 eh = exthelper.exthelper()
110 eh = exthelper.exthelper()
102
111
103 cmdtable = eh.cmdtable
112 cmdtable = eh.cmdtable
104 command = eh.command
113 command = eh.command
105 configtable = eh.configtable
114 configtable = eh.configtable
106 templatekeyword = eh.templatekeyword
115 templatekeyword = eh.templatekeyword
107 uisetup = eh.finaluisetup
116 uisetup = eh.finaluisetup
108
117
109 # developer config: phabricator.batchsize
118 # developer config: phabricator.batchsize
110 eh.configitem(
119 eh.configitem(
111 b'phabricator',
120 b'phabricator',
112 b'batchsize',
121 b'batchsize',
113 default=12,
122 default=12,
114 )
123 )
115 eh.configitem(
124 eh.configitem(
116 b'phabricator',
125 b'phabricator',
117 b'callsign',
126 b'callsign',
118 default=None,
127 default=None,
119 )
128 )
120 eh.configitem(
129 eh.configitem(
121 b'phabricator',
130 b'phabricator',
122 b'curlcmd',
131 b'curlcmd',
123 default=None,
132 default=None,
124 )
133 )
125 # developer config: phabricator.debug
134 # developer config: phabricator.debug
126 eh.configitem(
135 eh.configitem(
127 b'phabricator',
136 b'phabricator',
128 b'debug',
137 b'debug',
129 default=False,
138 default=False,
130 )
139 )
131 # developer config: phabricator.repophid
140 # developer config: phabricator.repophid
132 eh.configitem(
141 eh.configitem(
133 b'phabricator',
142 b'phabricator',
134 b'repophid',
143 b'repophid',
135 default=None,
144 default=None,
136 )
145 )
137 eh.configitem(
146 eh.configitem(
138 b'phabricator',
147 b'phabricator',
148 b'retry',
149 default=0,
150 )
151 eh.configitem(
152 b'phabricator',
153 b'retry.interval',
154 default=1,
155 )
156 eh.configitem(
157 b'phabricator',
139 b'url',
158 b'url',
140 default=None,
159 default=None,
141 )
160 )
142 eh.configitem(
161 eh.configitem(
143 b'phabsend',
162 b'phabsend',
144 b'confirm',
163 b'confirm',
145 default=False,
164 default=False,
146 )
165 )
147 eh.configitem(
166 eh.configitem(
148 b'phabimport',
167 b'phabimport',
149 b'secret',
168 b'secret',
150 default=False,
169 default=False,
151 )
170 )
152 eh.configitem(
171 eh.configitem(
153 b'phabimport',
172 b'phabimport',
154 b'obsolete',
173 b'obsolete',
155 default=False,
174 default=False,
156 )
175 )
157
176
158 colortable = {
177 colortable = {
159 b'phabricator.action.created': b'green',
178 b'phabricator.action.created': b'green',
160 b'phabricator.action.skipped': b'magenta',
179 b'phabricator.action.skipped': b'magenta',
161 b'phabricator.action.updated': b'magenta',
180 b'phabricator.action.updated': b'magenta',
162 b'phabricator.drev': b'bold',
181 b'phabricator.drev': b'bold',
163 b'phabricator.status.abandoned': b'magenta dim',
182 b'phabricator.status.abandoned': b'magenta dim',
164 b'phabricator.status.accepted': b'green bold',
183 b'phabricator.status.accepted': b'green bold',
165 b'phabricator.status.closed': b'green',
184 b'phabricator.status.closed': b'green',
166 b'phabricator.status.needsreview': b'yellow',
185 b'phabricator.status.needsreview': b'yellow',
167 b'phabricator.status.needsrevision': b'red',
186 b'phabricator.status.needsrevision': b'red',
168 b'phabricator.status.changesplanned': b'red',
187 b'phabricator.status.changesplanned': b'red',
169 }
188 }
170
189
171 _VCR_FLAGS = [
190 _VCR_FLAGS = [
172 (
191 (
173 b'',
192 b'',
174 b'test-vcr',
193 b'test-vcr',
175 b'',
194 b'',
176 _(
195 _(
177 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
196 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
178 b', otherwise will mock all http requests using the specified vcr file.'
197 b', otherwise will mock all http requests using the specified vcr file.'
179 b' (ADVANCED)'
198 b' (ADVANCED)'
180 ),
199 ),
181 ),
200 ),
182 ]
201 ]
183
202
184
203
185 @eh.wrapfunction(localrepo, "loadhgrc")
204 @eh.wrapfunction(localrepo, "loadhgrc")
186 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
205 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
187 """Load ``.arcconfig`` content into a ui instance on repository open."""
206 """Load ``.arcconfig`` content into a ui instance on repository open."""
188 result = False
207 result = False
189 arcconfig = {}
208 arcconfig = {}
190
209
191 try:
210 try:
192 # json.loads only accepts bytes from 3.6+
211 # json.loads only accepts bytes from 3.6+
193 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
212 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
194 # json.loads only returns unicode strings
213 # json.loads only returns unicode strings
195 arcconfig = pycompat.rapply(
214 arcconfig = pycompat.rapply(
196 lambda x: encoding.unitolocal(x)
215 lambda x: encoding.unitolocal(x)
197 if isinstance(x, pycompat.unicode)
216 if isinstance(x, pycompat.unicode)
198 else x,
217 else x,
199 pycompat.json_loads(rawparams),
218 pycompat.json_loads(rawparams),
200 )
219 )
201
220
202 result = True
221 result = True
203 except ValueError:
222 except ValueError:
204 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
223 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
205 except IOError:
224 except IOError:
206 pass
225 pass
207
226
208 cfg = util.sortdict()
227 cfg = util.sortdict()
209
228
210 if b"repository.callsign" in arcconfig:
229 if b"repository.callsign" in arcconfig:
211 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
230 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
212
231
213 if b"phabricator.uri" in arcconfig:
232 if b"phabricator.uri" in arcconfig:
214 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
233 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
215
234
216 if cfg:
235 if cfg:
217 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
236 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
218
237
219 return (
238 return (
220 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
239 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
221 ) # Load .hg/hgrc
240 ) # Load .hg/hgrc
222
241
223
242
224 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
243 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
225 fullflags = flags + _VCR_FLAGS
244 fullflags = flags + _VCR_FLAGS
226
245
227 def hgmatcher(r1, r2):
246 def hgmatcher(r1, r2):
228 if r1.uri != r2.uri or r1.method != r2.method:
247 if r1.uri != r2.uri or r1.method != r2.method:
229 return False
248 return False
230 r1params = util.urlreq.parseqs(r1.body)
249 r1params = util.urlreq.parseqs(r1.body)
231 r2params = util.urlreq.parseqs(r2.body)
250 r2params = util.urlreq.parseqs(r2.body)
232 for key in r1params:
251 for key in r1params:
233 if key not in r2params:
252 if key not in r2params:
234 return False
253 return False
235 value = r1params[key][0]
254 value = r1params[key][0]
236 # we want to compare json payloads without worrying about ordering
255 # we want to compare json payloads without worrying about ordering
237 if value.startswith(b'{') and value.endswith(b'}'):
256 if value.startswith(b'{') and value.endswith(b'}'):
238 r1json = pycompat.json_loads(value)
257 r1json = pycompat.json_loads(value)
239 r2json = pycompat.json_loads(r2params[key][0])
258 r2json = pycompat.json_loads(r2params[key][0])
240 if r1json != r2json:
259 if r1json != r2json:
241 return False
260 return False
242 elif r2params[key][0] != value:
261 elif r2params[key][0] != value:
243 return False
262 return False
244 return True
263 return True
245
264
246 def sanitiserequest(request):
265 def sanitiserequest(request):
247 request.body = re.sub(
266 request.body = re.sub(
248 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
267 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
249 )
268 )
250 return request
269 return request
251
270
252 def sanitiseresponse(response):
271 def sanitiseresponse(response):
253 if 'set-cookie' in response['headers']:
272 if 'set-cookie' in response['headers']:
254 del response['headers']['set-cookie']
273 del response['headers']['set-cookie']
255 return response
274 return response
256
275
257 def decorate(fn):
276 def decorate(fn):
258 def inner(*args, **kwargs):
277 def inner(*args, **kwargs):
259 vcr = kwargs.pop('test_vcr')
278 vcr = kwargs.pop('test_vcr')
260 if vcr:
279 if vcr:
261 cassette = pycompat.fsdecode(vcr)
280 cassette = pycompat.fsdecode(vcr)
262 import hgdemandimport
281 import hgdemandimport
263
282
264 with hgdemandimport.deactivated():
283 with hgdemandimport.deactivated():
265 import vcr as vcrmod
284 import vcr as vcrmod
266 import vcr.stubs as stubs
285 import vcr.stubs as stubs
267
286
268 vcr = vcrmod.VCR(
287 vcr = vcrmod.VCR(
269 serializer='json',
288 serializer='json',
270 before_record_request=sanitiserequest,
289 before_record_request=sanitiserequest,
271 before_record_response=sanitiseresponse,
290 before_record_response=sanitiseresponse,
272 custom_patches=[
291 custom_patches=[
273 (
292 (
274 urlmod,
293 urlmod,
275 'httpconnection',
294 'httpconnection',
276 stubs.VCRHTTPConnection,
295 stubs.VCRHTTPConnection,
277 ),
296 ),
278 (
297 (
279 urlmod,
298 urlmod,
280 'httpsconnection',
299 'httpsconnection',
281 stubs.VCRHTTPSConnection,
300 stubs.VCRHTTPSConnection,
282 ),
301 ),
283 ],
302 ],
284 )
303 )
285 vcr.register_matcher('hgmatcher', hgmatcher)
304 vcr.register_matcher('hgmatcher', hgmatcher)
286 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
305 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
287 return fn(*args, **kwargs)
306 return fn(*args, **kwargs)
288 return fn(*args, **kwargs)
307 return fn(*args, **kwargs)
289
308
290 cmd = util.checksignature(inner, depth=2)
309 cmd = util.checksignature(inner, depth=2)
291 cmd.__name__ = fn.__name__
310 cmd.__name__ = fn.__name__
292 cmd.__doc__ = fn.__doc__
311 cmd.__doc__ = fn.__doc__
293
312
294 return command(
313 return command(
295 name,
314 name,
296 fullflags,
315 fullflags,
297 spec,
316 spec,
298 helpcategory=helpcategory,
317 helpcategory=helpcategory,
299 optionalrepo=optionalrepo,
318 optionalrepo=optionalrepo,
300 )(cmd)
319 )(cmd)
301
320
302 return decorate
321 return decorate
303
322
304
323
305 def _debug(ui, *msg, **opts):
324 def _debug(ui, *msg, **opts):
306 """write debug output for Phabricator if ``phabricator.debug`` is set
325 """write debug output for Phabricator if ``phabricator.debug`` is set
307
326
308 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
327 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
309 printed with the --debug argument.
328 printed with the --debug argument.
310 """
329 """
311 if ui.configbool(b"phabricator", b"debug"):
330 if ui.configbool(b"phabricator", b"debug"):
312 flag = ui.debugflag
331 flag = ui.debugflag
313 try:
332 try:
314 ui.debugflag = True
333 ui.debugflag = True
315 ui.write(*msg, **opts)
334 ui.write(*msg, **opts)
316 finally:
335 finally:
317 ui.debugflag = flag
336 ui.debugflag = flag
318
337
319
338
320 def urlencodenested(params):
339 def urlencodenested(params):
321 """like urlencode, but works with nested parameters.
340 """like urlencode, but works with nested parameters.
322
341
323 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
342 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
324 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
343 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
325 urlencode. Note: the encoding is consistent with PHP's http_build_query.
344 urlencode. Note: the encoding is consistent with PHP's http_build_query.
326 """
345 """
327 flatparams = util.sortdict()
346 flatparams = util.sortdict()
328
347
329 def process(prefix, obj):
348 def process(prefix, obj):
330 if isinstance(obj, bool):
349 if isinstance(obj, bool):
331 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
350 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
332 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
351 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
333 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
352 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
334 if items is None:
353 if items is None:
335 flatparams[prefix] = obj
354 flatparams[prefix] = obj
336 else:
355 else:
337 for k, v in items(obj):
356 for k, v in items(obj):
338 if prefix:
357 if prefix:
339 process(b'%s[%s]' % (prefix, k), v)
358 process(b'%s[%s]' % (prefix, k), v)
340 else:
359 else:
341 process(k, v)
360 process(k, v)
342
361
343 process(b'', params)
362 process(b'', params)
344 return util.urlreq.urlencode(flatparams)
363 return util.urlreq.urlencode(flatparams)
345
364
346
365
347 def readurltoken(ui):
366 def readurltoken(ui):
348 """return conduit url, token and make sure they exist
367 """return conduit url, token and make sure they exist
349
368
350 Currently read from [auth] config section. In the future, it might
369 Currently read from [auth] config section. In the future, it might
351 make sense to read from .arcconfig and .arcrc as well.
370 make sense to read from .arcconfig and .arcrc as well.
352 """
371 """
353 url = ui.config(b'phabricator', b'url')
372 url = ui.config(b'phabricator', b'url')
354 if not url:
373 if not url:
355 raise error.Abort(
374 raise error.Abort(
356 _(b'config %s.%s is required') % (b'phabricator', b'url')
375 _(b'config %s.%s is required') % (b'phabricator', b'url')
357 )
376 )
358
377
359 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
378 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
360 token = None
379 token = None
361
380
362 if res:
381 if res:
363 group, auth = res
382 group, auth = res
364
383
365 ui.debug(b"using auth.%s.* for authentication\n" % group)
384 ui.debug(b"using auth.%s.* for authentication\n" % group)
366
385
367 token = auth.get(b'phabtoken')
386 token = auth.get(b'phabtoken')
368
387
369 if not token:
388 if not token:
370 raise error.Abort(
389 raise error.Abort(
371 _(b'Can\'t find conduit token associated to %s') % (url,)
390 _(b'Can\'t find conduit token associated to %s') % (url,)
372 )
391 )
373
392
374 return url, token
393 return url, token
375
394
376
395
377 def callconduit(ui, name, params):
396 def callconduit(ui, name, params):
378 """call Conduit API, params is a dict. return json.loads result, or None"""
397 """call Conduit API, params is a dict. return json.loads result, or None"""
379 host, token = readurltoken(ui)
398 host, token = readurltoken(ui)
380 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
399 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
381 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
400 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
382 params = params.copy()
401 params = params.copy()
383 params[b'__conduit__'] = {
402 params[b'__conduit__'] = {
384 b'token': token,
403 b'token': token,
385 }
404 }
386 rawdata = {
405 rawdata = {
387 b'params': templatefilters.json(params),
406 b'params': templatefilters.json(params),
388 b'output': b'json',
407 b'output': b'json',
389 b'__conduit__': 1,
408 b'__conduit__': 1,
390 }
409 }
391 data = urlencodenested(rawdata)
410 data = urlencodenested(rawdata)
392 curlcmd = ui.config(b'phabricator', b'curlcmd')
411 curlcmd = ui.config(b'phabricator', b'curlcmd')
393 if curlcmd:
412 if curlcmd:
394 sin, sout = procutil.popen2(
413 sin, sout = procutil.popen2(
395 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
414 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
396 )
415 )
397 sin.write(data)
416 sin.write(data)
398 sin.close()
417 sin.close()
399 body = sout.read()
418 body = sout.read()
400 else:
419 else:
401 urlopener = urlmod.opener(ui, authinfo)
420 urlopener = urlmod.opener(ui, authinfo)
402 request = util.urlreq.request(pycompat.strurl(url), data=data)
421 request = util.urlreq.request(pycompat.strurl(url), data=data)
403 with contextlib.closing(urlopener.open(request)) as rsp:
422 max_try = ui.configint(b'phabricator', b'retry') + 1
404 body = rsp.read()
423 for try_count in range(max_try):
424 try:
425 with contextlib.closing(urlopener.open(request)) as rsp:
426 body = rsp.read()
427 break
428 except util.urlerr.urlerror as err:
429 if try_count == max_try - 1:
430 raise
431 ui.debug(
432 b'Conduit Request failed (try %d/%d): %r\n'
433 % (try_count + 1, max_try, err)
434 )
435 # failing request might come from overloaded server
436 retry_interval = ui.configint(b'phabricator', b'retry.interval')
437 time.sleep(retry_interval)
405 ui.debug(b'Conduit Response: %s\n' % body)
438 ui.debug(b'Conduit Response: %s\n' % body)
406 parsed = pycompat.rapply(
439 parsed = pycompat.rapply(
407 lambda x: encoding.unitolocal(x)
440 lambda x: encoding.unitolocal(x)
408 if isinstance(x, pycompat.unicode)
441 if isinstance(x, pycompat.unicode)
409 else x,
442 else x,
410 # json.loads only accepts bytes from py3.6+
443 # json.loads only accepts bytes from py3.6+
411 pycompat.json_loads(encoding.unifromlocal(body)),
444 pycompat.json_loads(encoding.unifromlocal(body)),
412 )
445 )
413 if parsed.get(b'error_code'):
446 if parsed.get(b'error_code'):
414 msg = _(b'Conduit Error (%s): %s') % (
447 msg = _(b'Conduit Error (%s): %s') % (
415 parsed[b'error_code'],
448 parsed[b'error_code'],
416 parsed[b'error_info'],
449 parsed[b'error_info'],
417 )
450 )
418 raise error.Abort(msg)
451 raise error.Abort(msg)
419 return parsed[b'result']
452 return parsed[b'result']
420
453
421
454
422 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
455 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
423 def debugcallconduit(ui, repo, name):
456 def debugcallconduit(ui, repo, name):
424 """call Conduit API
457 """call Conduit API
425
458
426 Call parameters are read from stdin as a JSON blob. Result will be written
459 Call parameters are read from stdin as a JSON blob. Result will be written
427 to stdout as a JSON blob.
460 to stdout as a JSON blob.
428 """
461 """
429 # json.loads only accepts bytes from 3.6+
462 # json.loads only accepts bytes from 3.6+
430 rawparams = encoding.unifromlocal(ui.fin.read())
463 rawparams = encoding.unifromlocal(ui.fin.read())
431 # json.loads only returns unicode strings
464 # json.loads only returns unicode strings
432 params = pycompat.rapply(
465 params = pycompat.rapply(
433 lambda x: encoding.unitolocal(x)
466 lambda x: encoding.unitolocal(x)
434 if isinstance(x, pycompat.unicode)
467 if isinstance(x, pycompat.unicode)
435 else x,
468 else x,
436 pycompat.json_loads(rawparams),
469 pycompat.json_loads(rawparams),
437 )
470 )
438 # json.dumps only accepts unicode strings
471 # json.dumps only accepts unicode strings
439 result = pycompat.rapply(
472 result = pycompat.rapply(
440 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
473 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
441 callconduit(ui, name, params),
474 callconduit(ui, name, params),
442 )
475 )
443 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
476 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
444 ui.write(b'%s\n' % encoding.unitolocal(s))
477 ui.write(b'%s\n' % encoding.unitolocal(s))
445
478
446
479
447 def getrepophid(repo):
480 def getrepophid(repo):
448 """given callsign, return repository PHID or None"""
481 """given callsign, return repository PHID or None"""
449 # developer config: phabricator.repophid
482 # developer config: phabricator.repophid
450 repophid = repo.ui.config(b'phabricator', b'repophid')
483 repophid = repo.ui.config(b'phabricator', b'repophid')
451 if repophid:
484 if repophid:
452 return repophid
485 return repophid
453 callsign = repo.ui.config(b'phabricator', b'callsign')
486 callsign = repo.ui.config(b'phabricator', b'callsign')
454 if not callsign:
487 if not callsign:
455 return None
488 return None
456 query = callconduit(
489 query = callconduit(
457 repo.ui,
490 repo.ui,
458 b'diffusion.repository.search',
491 b'diffusion.repository.search',
459 {b'constraints': {b'callsigns': [callsign]}},
492 {b'constraints': {b'callsigns': [callsign]}},
460 )
493 )
461 if len(query[b'data']) == 0:
494 if len(query[b'data']) == 0:
462 return None
495 return None
463 repophid = query[b'data'][0][b'phid']
496 repophid = query[b'data'][0][b'phid']
464 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
497 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
465 return repophid
498 return repophid
466
499
467
500
468 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
501 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
469 _differentialrevisiondescre = re.compile(
502 _differentialrevisiondescre = re.compile(
470 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
503 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
471 )
504 )
472
505
473
506
474 def getoldnodedrevmap(repo, nodelist):
507 def getoldnodedrevmap(repo, nodelist):
475 """find previous nodes that has been sent to Phabricator
508 """find previous nodes that has been sent to Phabricator
476
509
477 return {node: (oldnode, Differential diff, Differential Revision ID)}
510 return {node: (oldnode, Differential diff, Differential Revision ID)}
478 for node in nodelist with known previous sent versions, or associated
511 for node in nodelist with known previous sent versions, or associated
479 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
512 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
480 be ``None``.
513 be ``None``.
481
514
482 Examines commit messages like "Differential Revision:" to get the
515 Examines commit messages like "Differential Revision:" to get the
483 association information.
516 association information.
484
517
485 If such commit message line is not found, examines all precursors and their
518 If such commit message line is not found, examines all precursors and their
486 tags. Tags with format like "D1234" are considered a match and the node
519 tags. Tags with format like "D1234" are considered a match and the node
487 with that tag, and the number after "D" (ex. 1234) will be returned.
520 with that tag, and the number after "D" (ex. 1234) will be returned.
488
521
489 The ``old node``, if not None, is guaranteed to be the last diff of
522 The ``old node``, if not None, is guaranteed to be the last diff of
490 corresponding Differential Revision, and exist in the repo.
523 corresponding Differential Revision, and exist in the repo.
491 """
524 """
492 unfi = repo.unfiltered()
525 unfi = repo.unfiltered()
493 has_node = unfi.changelog.index.has_node
526 has_node = unfi.changelog.index.has_node
494
527
495 result = {} # {node: (oldnode?, lastdiff?, drev)}
528 result = {} # {node: (oldnode?, lastdiff?, drev)}
496 # ordered for test stability when printing new -> old mapping below
529 # ordered for test stability when printing new -> old mapping below
497 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
530 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
498 for node in nodelist:
531 for node in nodelist:
499 ctx = unfi[node]
532 ctx = unfi[node]
500 # For tags like "D123", put them into "toconfirm" to verify later
533 # For tags like "D123", put them into "toconfirm" to verify later
501 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
534 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
502 for n in precnodes:
535 for n in precnodes:
503 if has_node(n):
536 if has_node(n):
504 for tag in unfi.nodetags(n):
537 for tag in unfi.nodetags(n):
505 m = _differentialrevisiontagre.match(tag)
538 m = _differentialrevisiontagre.match(tag)
506 if m:
539 if m:
507 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
540 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
508 break
541 break
509 else:
542 else:
510 continue # move to next predecessor
543 continue # move to next predecessor
511 break # found a tag, stop
544 break # found a tag, stop
512 else:
545 else:
513 # Check commit message
546 # Check commit message
514 m = _differentialrevisiondescre.search(ctx.description())
547 m = _differentialrevisiondescre.search(ctx.description())
515 if m:
548 if m:
516 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
549 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
517
550
518 # Double check if tags are genuine by collecting all old nodes from
551 # Double check if tags are genuine by collecting all old nodes from
519 # Phabricator, and expect precursors overlap with it.
552 # Phabricator, and expect precursors overlap with it.
520 if toconfirm:
553 if toconfirm:
521 drevs = [drev for force, precs, drev in toconfirm.values()]
554 drevs = [drev for force, precs, drev in toconfirm.values()]
522 alldiffs = callconduit(
555 alldiffs = callconduit(
523 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
556 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
524 )
557 )
525
558
526 def getnodes(d, precset):
559 def getnodes(d, precset):
527 # Ignore other nodes that were combined into the Differential
560 # Ignore other nodes that were combined into the Differential
528 # that aren't predecessors of the current local node.
561 # that aren't predecessors of the current local node.
529 return [n for n in getlocalcommits(d) if n in precset]
562 return [n for n in getlocalcommits(d) if n in precset]
530
563
531 for newnode, (force, precset, drev) in toconfirm.items():
564 for newnode, (force, precset, drev) in toconfirm.items():
532 diffs = [
565 diffs = [
533 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
566 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
534 ]
567 ]
535
568
536 # local predecessors known by Phabricator
569 # local predecessors known by Phabricator
537 phprecset = {n for d in diffs for n in getnodes(d, precset)}
570 phprecset = {n for d in diffs for n in getnodes(d, precset)}
538
571
539 # Ignore if precursors (Phabricator and local repo) do not overlap,
572 # Ignore if precursors (Phabricator and local repo) do not overlap,
540 # and force is not set (when commit message says nothing)
573 # and force is not set (when commit message says nothing)
541 if not force and not phprecset:
574 if not force and not phprecset:
542 tagname = b'D%d' % drev
575 tagname = b'D%d' % drev
543 tags.tag(
576 tags.tag(
544 repo,
577 repo,
545 tagname,
578 tagname,
546 nullid,
579 nullid,
547 message=None,
580 message=None,
548 user=None,
581 user=None,
549 date=None,
582 date=None,
550 local=True,
583 local=True,
551 )
584 )
552 unfi.ui.warn(
585 unfi.ui.warn(
553 _(
586 _(
554 b'D%d: local tag removed - does not match '
587 b'D%d: local tag removed - does not match '
555 b'Differential history\n'
588 b'Differential history\n'
556 )
589 )
557 % drev
590 % drev
558 )
591 )
559 continue
592 continue
560
593
561 # Find the last node using Phabricator metadata, and make sure it
594 # Find the last node using Phabricator metadata, and make sure it
562 # exists in the repo
595 # exists in the repo
563 oldnode = lastdiff = None
596 oldnode = lastdiff = None
564 if diffs:
597 if diffs:
565 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
598 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
566 oldnodes = getnodes(lastdiff, precset)
599 oldnodes = getnodes(lastdiff, precset)
567
600
568 _debug(
601 _debug(
569 unfi.ui,
602 unfi.ui,
570 b"%s mapped to old nodes %s\n"
603 b"%s mapped to old nodes %s\n"
571 % (
604 % (
572 short(newnode),
605 short(newnode),
573 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
606 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
574 ),
607 ),
575 )
608 )
576
609
577 # If this commit was the result of `hg fold` after submission,
610 # If this commit was the result of `hg fold` after submission,
578 # and now resubmitted with --fold, the easiest thing to do is
611 # and now resubmitted with --fold, the easiest thing to do is
579 # to leave the node clear. This only results in creating a new
612 # to leave the node clear. This only results in creating a new
580 # diff for the _same_ Differential Revision if this commit is
613 # diff for the _same_ Differential Revision if this commit is
581 # the first or last in the selected range. If we picked a node
614 # the first or last in the selected range. If we picked a node
582 # from the list instead, it would have to be the lowest if at
615 # from the list instead, it would have to be the lowest if at
583 # the beginning of the --fold range, or the highest at the end.
616 # the beginning of the --fold range, or the highest at the end.
584 # Otherwise, one or more of the nodes wouldn't be considered in
617 # Otherwise, one or more of the nodes wouldn't be considered in
585 # the diff, and the Differential wouldn't be properly updated.
618 # the diff, and the Differential wouldn't be properly updated.
586 # If this commit is the result of `hg split` in the same
619 # If this commit is the result of `hg split` in the same
587 # scenario, there is a single oldnode here (and multiple
620 # scenario, there is a single oldnode here (and multiple
588 # newnodes mapped to it). That makes it the same as the normal
621 # newnodes mapped to it). That makes it the same as the normal
589 # case, as the edges of the newnode range cleanly maps to one
622 # case, as the edges of the newnode range cleanly maps to one
590 # oldnode each.
623 # oldnode each.
591 if len(oldnodes) == 1:
624 if len(oldnodes) == 1:
592 oldnode = oldnodes[0]
625 oldnode = oldnodes[0]
593 if oldnode and not has_node(oldnode):
626 if oldnode and not has_node(oldnode):
594 oldnode = None
627 oldnode = None
595
628
596 result[newnode] = (oldnode, lastdiff, drev)
629 result[newnode] = (oldnode, lastdiff, drev)
597
630
598 return result
631 return result
599
632
600
633
601 def getdrevmap(repo, revs):
634 def getdrevmap(repo, revs):
602 """Return a dict mapping each rev in `revs` to their Differential Revision
635 """Return a dict mapping each rev in `revs` to their Differential Revision
603 ID or None.
636 ID or None.
604 """
637 """
605 result = {}
638 result = {}
606 for rev in revs:
639 for rev in revs:
607 result[rev] = None
640 result[rev] = None
608 ctx = repo[rev]
641 ctx = repo[rev]
609 # Check commit message
642 # Check commit message
610 m = _differentialrevisiondescre.search(ctx.description())
643 m = _differentialrevisiondescre.search(ctx.description())
611 if m:
644 if m:
612 result[rev] = int(m.group('id'))
645 result[rev] = int(m.group('id'))
613 continue
646 continue
614 # Check tags
647 # Check tags
615 for tag in repo.nodetags(ctx.node()):
648 for tag in repo.nodetags(ctx.node()):
616 m = _differentialrevisiontagre.match(tag)
649 m = _differentialrevisiontagre.match(tag)
617 if m:
650 if m:
618 result[rev] = int(m.group(1))
651 result[rev] = int(m.group(1))
619 break
652 break
620
653
621 return result
654 return result
622
655
623
656
624 def getdiff(basectx, ctx, diffopts):
657 def getdiff(basectx, ctx, diffopts):
625 """plain-text diff without header (user, commit message, etc)"""
658 """plain-text diff without header (user, commit message, etc)"""
626 output = util.stringio()
659 output = util.stringio()
627 for chunk, _label in patch.diffui(
660 for chunk, _label in patch.diffui(
628 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
661 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
629 ):
662 ):
630 output.write(chunk)
663 output.write(chunk)
631 return output.getvalue()
664 return output.getvalue()
632
665
633
666
634 class DiffChangeType(object):
667 class DiffChangeType(object):
635 ADD = 1
668 ADD = 1
636 CHANGE = 2
669 CHANGE = 2
637 DELETE = 3
670 DELETE = 3
638 MOVE_AWAY = 4
671 MOVE_AWAY = 4
639 COPY_AWAY = 5
672 COPY_AWAY = 5
640 MOVE_HERE = 6
673 MOVE_HERE = 6
641 COPY_HERE = 7
674 COPY_HERE = 7
642 MULTICOPY = 8
675 MULTICOPY = 8
643
676
644
677
645 class DiffFileType(object):
678 class DiffFileType(object):
646 TEXT = 1
679 TEXT = 1
647 IMAGE = 2
680 IMAGE = 2
648 BINARY = 3
681 BINARY = 3
649
682
650
683
651 @attr.s
684 @attr.s
652 class phabhunk(dict):
685 class phabhunk(dict):
653 """Represents a Differential hunk, which is owned by a Differential change"""
686 """Represents a Differential hunk, which is owned by a Differential change"""
654
687
655 oldOffset = attr.ib(default=0) # camelcase-required
688 oldOffset = attr.ib(default=0) # camelcase-required
656 oldLength = attr.ib(default=0) # camelcase-required
689 oldLength = attr.ib(default=0) # camelcase-required
657 newOffset = attr.ib(default=0) # camelcase-required
690 newOffset = attr.ib(default=0) # camelcase-required
658 newLength = attr.ib(default=0) # camelcase-required
691 newLength = attr.ib(default=0) # camelcase-required
659 corpus = attr.ib(default='')
692 corpus = attr.ib(default='')
660 # These get added to the phabchange's equivalents
693 # These get added to the phabchange's equivalents
661 addLines = attr.ib(default=0) # camelcase-required
694 addLines = attr.ib(default=0) # camelcase-required
662 delLines = attr.ib(default=0) # camelcase-required
695 delLines = attr.ib(default=0) # camelcase-required
663
696
664
697
665 @attr.s
698 @attr.s
666 class phabchange(object):
699 class phabchange(object):
667 """Represents a Differential change, owns Differential hunks and owned by a
700 """Represents a Differential change, owns Differential hunks and owned by a
668 Differential diff. Each one represents one file in a diff.
701 Differential diff. Each one represents one file in a diff.
669 """
702 """
670
703
671 currentPath = attr.ib(default=None) # camelcase-required
704 currentPath = attr.ib(default=None) # camelcase-required
672 oldPath = attr.ib(default=None) # camelcase-required
705 oldPath = attr.ib(default=None) # camelcase-required
673 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
706 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
674 metadata = attr.ib(default=attr.Factory(dict))
707 metadata = attr.ib(default=attr.Factory(dict))
675 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
708 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
676 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
709 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
677 type = attr.ib(default=DiffChangeType.CHANGE)
710 type = attr.ib(default=DiffChangeType.CHANGE)
678 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
711 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
679 commitHash = attr.ib(default=None) # camelcase-required
712 commitHash = attr.ib(default=None) # camelcase-required
680 addLines = attr.ib(default=0) # camelcase-required
713 addLines = attr.ib(default=0) # camelcase-required
681 delLines = attr.ib(default=0) # camelcase-required
714 delLines = attr.ib(default=0) # camelcase-required
682 hunks = attr.ib(default=attr.Factory(list))
715 hunks = attr.ib(default=attr.Factory(list))
683
716
684 def copynewmetadatatoold(self):
717 def copynewmetadatatoold(self):
685 for key in list(self.metadata.keys()):
718 for key in list(self.metadata.keys()):
686 newkey = key.replace(b'new:', b'old:')
719 newkey = key.replace(b'new:', b'old:')
687 self.metadata[newkey] = self.metadata[key]
720 self.metadata[newkey] = self.metadata[key]
688
721
689 def addoldmode(self, value):
722 def addoldmode(self, value):
690 self.oldProperties[b'unix:filemode'] = value
723 self.oldProperties[b'unix:filemode'] = value
691
724
692 def addnewmode(self, value):
725 def addnewmode(self, value):
693 self.newProperties[b'unix:filemode'] = value
726 self.newProperties[b'unix:filemode'] = value
694
727
695 def addhunk(self, hunk):
728 def addhunk(self, hunk):
696 if not isinstance(hunk, phabhunk):
729 if not isinstance(hunk, phabhunk):
697 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
730 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
698 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
731 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
699 # It's useful to include these stats since the Phab web UI shows them,
732 # It's useful to include these stats since the Phab web UI shows them,
700 # and uses them to estimate how large a change a Revision is. Also used
733 # and uses them to estimate how large a change a Revision is. Also used
701 # in email subjects for the [+++--] bit.
734 # in email subjects for the [+++--] bit.
702 self.addLines += hunk.addLines
735 self.addLines += hunk.addLines
703 self.delLines += hunk.delLines
736 self.delLines += hunk.delLines
704
737
705
738
706 @attr.s
739 @attr.s
707 class phabdiff(object):
740 class phabdiff(object):
708 """Represents a Differential diff, owns Differential changes. Corresponds
741 """Represents a Differential diff, owns Differential changes. Corresponds
709 to a commit.
742 to a commit.
710 """
743 """
711
744
712 # Doesn't seem to be any reason to send this (output of uname -n)
745 # Doesn't seem to be any reason to send this (output of uname -n)
713 sourceMachine = attr.ib(default=b'') # camelcase-required
746 sourceMachine = attr.ib(default=b'') # camelcase-required
714 sourcePath = attr.ib(default=b'/') # camelcase-required
747 sourcePath = attr.ib(default=b'/') # camelcase-required
715 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
748 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
716 sourceControlPath = attr.ib(default=b'/') # camelcase-required
749 sourceControlPath = attr.ib(default=b'/') # camelcase-required
717 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
750 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
718 branch = attr.ib(default=b'default')
751 branch = attr.ib(default=b'default')
719 bookmark = attr.ib(default=None)
752 bookmark = attr.ib(default=None)
720 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
753 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
721 lintStatus = attr.ib(default=b'none') # camelcase-required
754 lintStatus = attr.ib(default=b'none') # camelcase-required
722 unitStatus = attr.ib(default=b'none') # camelcase-required
755 unitStatus = attr.ib(default=b'none') # camelcase-required
723 changes = attr.ib(default=attr.Factory(dict))
756 changes = attr.ib(default=attr.Factory(dict))
724 repositoryPHID = attr.ib(default=None) # camelcase-required
757 repositoryPHID = attr.ib(default=None) # camelcase-required
725
758
726 def addchange(self, change):
759 def addchange(self, change):
727 if not isinstance(change, phabchange):
760 if not isinstance(change, phabchange):
728 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
761 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
729 self.changes[change.currentPath] = pycompat.byteskwargs(
762 self.changes[change.currentPath] = pycompat.byteskwargs(
730 attr.asdict(change)
763 attr.asdict(change)
731 )
764 )
732
765
733
766
734 def maketext(pchange, basectx, ctx, fname):
767 def maketext(pchange, basectx, ctx, fname):
735 """populate the phabchange for a text file"""
768 """populate the phabchange for a text file"""
736 repo = ctx.repo()
769 repo = ctx.repo()
737 fmatcher = match.exact([fname])
770 fmatcher = match.exact([fname])
738 diffopts = mdiff.diffopts(git=True, context=32767)
771 diffopts = mdiff.diffopts(git=True, context=32767)
739 _pfctx, _fctx, header, fhunks = next(
772 _pfctx, _fctx, header, fhunks = next(
740 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
773 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
741 )
774 )
742
775
743 for fhunk in fhunks:
776 for fhunk in fhunks:
744 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
777 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
745 corpus = b''.join(lines[1:])
778 corpus = b''.join(lines[1:])
746 shunk = list(header)
779 shunk = list(header)
747 shunk.extend(lines)
780 shunk.extend(lines)
748 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
781 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
749 patch.diffstatdata(util.iterlines(shunk))
782 patch.diffstatdata(util.iterlines(shunk))
750 )
783 )
751 pchange.addhunk(
784 pchange.addhunk(
752 phabhunk(
785 phabhunk(
753 oldOffset,
786 oldOffset,
754 oldLength,
787 oldLength,
755 newOffset,
788 newOffset,
756 newLength,
789 newLength,
757 corpus,
790 corpus,
758 addLines,
791 addLines,
759 delLines,
792 delLines,
760 )
793 )
761 )
794 )
762
795
763
796
764 def uploadchunks(fctx, fphid):
797 def uploadchunks(fctx, fphid):
765 """upload large binary files as separate chunks.
798 """upload large binary files as separate chunks.
766 Phab requests chunking over 8MiB, and splits into 4MiB chunks
799 Phab requests chunking over 8MiB, and splits into 4MiB chunks
767 """
800 """
768 ui = fctx.repo().ui
801 ui = fctx.repo().ui
769 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
802 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
770 with ui.makeprogress(
803 with ui.makeprogress(
771 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
804 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
772 ) as progress:
805 ) as progress:
773 for chunk in chunks:
806 for chunk in chunks:
774 progress.increment()
807 progress.increment()
775 if chunk[b'complete']:
808 if chunk[b'complete']:
776 continue
809 continue
777 bstart = int(chunk[b'byteStart'])
810 bstart = int(chunk[b'byteStart'])
778 bend = int(chunk[b'byteEnd'])
811 bend = int(chunk[b'byteEnd'])
779 callconduit(
812 callconduit(
780 ui,
813 ui,
781 b'file.uploadchunk',
814 b'file.uploadchunk',
782 {
815 {
783 b'filePHID': fphid,
816 b'filePHID': fphid,
784 b'byteStart': bstart,
817 b'byteStart': bstart,
785 b'data': base64.b64encode(fctx.data()[bstart:bend]),
818 b'data': base64.b64encode(fctx.data()[bstart:bend]),
786 b'dataEncoding': b'base64',
819 b'dataEncoding': b'base64',
787 },
820 },
788 )
821 )
789
822
790
823
791 def uploadfile(fctx):
824 def uploadfile(fctx):
792 """upload binary files to Phabricator"""
825 """upload binary files to Phabricator"""
793 repo = fctx.repo()
826 repo = fctx.repo()
794 ui = repo.ui
827 ui = repo.ui
795 fname = fctx.path()
828 fname = fctx.path()
796 size = fctx.size()
829 size = fctx.size()
797 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
830 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
798
831
799 # an allocate call is required first to see if an upload is even required
832 # an allocate call is required first to see if an upload is even required
800 # (Phab might already have it) and to determine if chunking is needed
833 # (Phab might already have it) and to determine if chunking is needed
801 allocateparams = {
834 allocateparams = {
802 b'name': fname,
835 b'name': fname,
803 b'contentLength': size,
836 b'contentLength': size,
804 b'contentHash': fhash,
837 b'contentHash': fhash,
805 }
838 }
806 filealloc = callconduit(ui, b'file.allocate', allocateparams)
839 filealloc = callconduit(ui, b'file.allocate', allocateparams)
807 fphid = filealloc[b'filePHID']
840 fphid = filealloc[b'filePHID']
808
841
809 if filealloc[b'upload']:
842 if filealloc[b'upload']:
810 ui.write(_(b'uploading %s\n') % bytes(fctx))
843 ui.write(_(b'uploading %s\n') % bytes(fctx))
811 if not fphid:
844 if not fphid:
812 uploadparams = {
845 uploadparams = {
813 b'name': fname,
846 b'name': fname,
814 b'data_base64': base64.b64encode(fctx.data()),
847 b'data_base64': base64.b64encode(fctx.data()),
815 }
848 }
816 fphid = callconduit(ui, b'file.upload', uploadparams)
849 fphid = callconduit(ui, b'file.upload', uploadparams)
817 else:
850 else:
818 uploadchunks(fctx, fphid)
851 uploadchunks(fctx, fphid)
819 else:
852 else:
820 ui.debug(b'server already has %s\n' % bytes(fctx))
853 ui.debug(b'server already has %s\n' % bytes(fctx))
821
854
822 if not fphid:
855 if not fphid:
823 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
856 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
824
857
825 return fphid
858 return fphid
826
859
827
860
828 def addoldbinary(pchange, oldfctx, fctx):
861 def addoldbinary(pchange, oldfctx, fctx):
829 """add the metadata for the previous version of a binary file to the
862 """add the metadata for the previous version of a binary file to the
830 phabchange for the new version
863 phabchange for the new version
831
864
832 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
865 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
833 version of the file, or None if the file is being removed.
866 version of the file, or None if the file is being removed.
834 """
867 """
835 if not fctx or fctx.cmp(oldfctx):
868 if not fctx or fctx.cmp(oldfctx):
836 # Files differ, add the old one
869 # Files differ, add the old one
837 pchange.metadata[b'old:file:size'] = oldfctx.size()
870 pchange.metadata[b'old:file:size'] = oldfctx.size()
838 mimeguess, _enc = mimetypes.guess_type(
871 mimeguess, _enc = mimetypes.guess_type(
839 encoding.unifromlocal(oldfctx.path())
872 encoding.unifromlocal(oldfctx.path())
840 )
873 )
841 if mimeguess:
874 if mimeguess:
842 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
875 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
843 mimeguess
876 mimeguess
844 )
877 )
845 fphid = uploadfile(oldfctx)
878 fphid = uploadfile(oldfctx)
846 pchange.metadata[b'old:binary-phid'] = fphid
879 pchange.metadata[b'old:binary-phid'] = fphid
847 else:
880 else:
848 # If it's left as IMAGE/BINARY web UI might try to display it
881 # If it's left as IMAGE/BINARY web UI might try to display it
849 pchange.fileType = DiffFileType.TEXT
882 pchange.fileType = DiffFileType.TEXT
850 pchange.copynewmetadatatoold()
883 pchange.copynewmetadatatoold()
851
884
852
885
853 def makebinary(pchange, fctx):
886 def makebinary(pchange, fctx):
854 """populate the phabchange for a binary file"""
887 """populate the phabchange for a binary file"""
855 pchange.fileType = DiffFileType.BINARY
888 pchange.fileType = DiffFileType.BINARY
856 fphid = uploadfile(fctx)
889 fphid = uploadfile(fctx)
857 pchange.metadata[b'new:binary-phid'] = fphid
890 pchange.metadata[b'new:binary-phid'] = fphid
858 pchange.metadata[b'new:file:size'] = fctx.size()
891 pchange.metadata[b'new:file:size'] = fctx.size()
859 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
892 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
860 if mimeguess:
893 if mimeguess:
861 mimeguess = pycompat.bytestr(mimeguess)
894 mimeguess = pycompat.bytestr(mimeguess)
862 pchange.metadata[b'new:file:mime-type'] = mimeguess
895 pchange.metadata[b'new:file:mime-type'] = mimeguess
863 if mimeguess.startswith(b'image/'):
896 if mimeguess.startswith(b'image/'):
864 pchange.fileType = DiffFileType.IMAGE
897 pchange.fileType = DiffFileType.IMAGE
865
898
866
899
867 # Copied from mercurial/patch.py
900 # Copied from mercurial/patch.py
868 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
901 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
869
902
870
903
871 def notutf8(fctx):
904 def notutf8(fctx):
872 """detect non-UTF-8 text files since Phabricator requires them to be marked
905 """detect non-UTF-8 text files since Phabricator requires them to be marked
873 as binary
906 as binary
874 """
907 """
875 try:
908 try:
876 fctx.data().decode('utf-8')
909 fctx.data().decode('utf-8')
877 return False
910 return False
878 except UnicodeDecodeError:
911 except UnicodeDecodeError:
879 fctx.repo().ui.write(
912 fctx.repo().ui.write(
880 _(b'file %s detected as non-UTF-8, marked as binary\n')
913 _(b'file %s detected as non-UTF-8, marked as binary\n')
881 % fctx.path()
914 % fctx.path()
882 )
915 )
883 return True
916 return True
884
917
885
918
886 def addremoved(pdiff, basectx, ctx, removed):
919 def addremoved(pdiff, basectx, ctx, removed):
887 """add removed files to the phabdiff. Shouldn't include moves"""
920 """add removed files to the phabdiff. Shouldn't include moves"""
888 for fname in removed:
921 for fname in removed:
889 pchange = phabchange(
922 pchange = phabchange(
890 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
923 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
891 )
924 )
892 oldfctx = basectx.p1()[fname]
925 oldfctx = basectx.p1()[fname]
893 pchange.addoldmode(gitmode[oldfctx.flags()])
926 pchange.addoldmode(gitmode[oldfctx.flags()])
894 if not (oldfctx.isbinary() or notutf8(oldfctx)):
927 if not (oldfctx.isbinary() or notutf8(oldfctx)):
895 maketext(pchange, basectx, ctx, fname)
928 maketext(pchange, basectx, ctx, fname)
896
929
897 pdiff.addchange(pchange)
930 pdiff.addchange(pchange)
898
931
899
932
900 def addmodified(pdiff, basectx, ctx, modified):
933 def addmodified(pdiff, basectx, ctx, modified):
901 """add modified files to the phabdiff"""
934 """add modified files to the phabdiff"""
902 for fname in modified:
935 for fname in modified:
903 fctx = ctx[fname]
936 fctx = ctx[fname]
904 oldfctx = basectx.p1()[fname]
937 oldfctx = basectx.p1()[fname]
905 pchange = phabchange(currentPath=fname, oldPath=fname)
938 pchange = phabchange(currentPath=fname, oldPath=fname)
906 filemode = gitmode[fctx.flags()]
939 filemode = gitmode[fctx.flags()]
907 originalmode = gitmode[oldfctx.flags()]
940 originalmode = gitmode[oldfctx.flags()]
908 if filemode != originalmode:
941 if filemode != originalmode:
909 pchange.addoldmode(originalmode)
942 pchange.addoldmode(originalmode)
910 pchange.addnewmode(filemode)
943 pchange.addnewmode(filemode)
911
944
912 if (
945 if (
913 fctx.isbinary()
946 fctx.isbinary()
914 or notutf8(fctx)
947 or notutf8(fctx)
915 or oldfctx.isbinary()
948 or oldfctx.isbinary()
916 or notutf8(oldfctx)
949 or notutf8(oldfctx)
917 ):
950 ):
918 makebinary(pchange, fctx)
951 makebinary(pchange, fctx)
919 addoldbinary(pchange, oldfctx, fctx)
952 addoldbinary(pchange, oldfctx, fctx)
920 else:
953 else:
921 maketext(pchange, basectx, ctx, fname)
954 maketext(pchange, basectx, ctx, fname)
922
955
923 pdiff.addchange(pchange)
956 pdiff.addchange(pchange)
924
957
925
958
926 def addadded(pdiff, basectx, ctx, added, removed):
959 def addadded(pdiff, basectx, ctx, added, removed):
927 """add file adds to the phabdiff, both new files and copies/moves"""
960 """add file adds to the phabdiff, both new files and copies/moves"""
928 # Keep track of files that've been recorded as moved/copied, so if there are
961 # Keep track of files that've been recorded as moved/copied, so if there are
929 # additional copies we can mark them (moves get removed from removed)
962 # additional copies we can mark them (moves get removed from removed)
930 copiedchanges = {}
963 copiedchanges = {}
931 movedchanges = {}
964 movedchanges = {}
932
965
933 copy = {}
966 copy = {}
934 if basectx != ctx:
967 if basectx != ctx:
935 copy = copies.pathcopies(basectx.p1(), ctx)
968 copy = copies.pathcopies(basectx.p1(), ctx)
936
969
937 for fname in added:
970 for fname in added:
938 fctx = ctx[fname]
971 fctx = ctx[fname]
939 oldfctx = None
972 oldfctx = None
940 pchange = phabchange(currentPath=fname)
973 pchange = phabchange(currentPath=fname)
941
974
942 filemode = gitmode[fctx.flags()]
975 filemode = gitmode[fctx.flags()]
943
976
944 if copy:
977 if copy:
945 originalfname = copy.get(fname, fname)
978 originalfname = copy.get(fname, fname)
946 else:
979 else:
947 originalfname = fname
980 originalfname = fname
948 if fctx.renamed():
981 if fctx.renamed():
949 originalfname = fctx.renamed()[0]
982 originalfname = fctx.renamed()[0]
950
983
951 renamed = fname != originalfname
984 renamed = fname != originalfname
952
985
953 if renamed:
986 if renamed:
954 oldfctx = basectx.p1()[originalfname]
987 oldfctx = basectx.p1()[originalfname]
955 originalmode = gitmode[oldfctx.flags()]
988 originalmode = gitmode[oldfctx.flags()]
956 pchange.oldPath = originalfname
989 pchange.oldPath = originalfname
957
990
958 if originalfname in removed:
991 if originalfname in removed:
959 origpchange = phabchange(
992 origpchange = phabchange(
960 currentPath=originalfname,
993 currentPath=originalfname,
961 oldPath=originalfname,
994 oldPath=originalfname,
962 type=DiffChangeType.MOVE_AWAY,
995 type=DiffChangeType.MOVE_AWAY,
963 awayPaths=[fname],
996 awayPaths=[fname],
964 )
997 )
965 movedchanges[originalfname] = origpchange
998 movedchanges[originalfname] = origpchange
966 removed.remove(originalfname)
999 removed.remove(originalfname)
967 pchange.type = DiffChangeType.MOVE_HERE
1000 pchange.type = DiffChangeType.MOVE_HERE
968 elif originalfname in movedchanges:
1001 elif originalfname in movedchanges:
969 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1002 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
970 movedchanges[originalfname].awayPaths.append(fname)
1003 movedchanges[originalfname].awayPaths.append(fname)
971 pchange.type = DiffChangeType.COPY_HERE
1004 pchange.type = DiffChangeType.COPY_HERE
972 else: # pure copy
1005 else: # pure copy
973 if originalfname not in copiedchanges:
1006 if originalfname not in copiedchanges:
974 origpchange = phabchange(
1007 origpchange = phabchange(
975 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1008 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
976 )
1009 )
977 copiedchanges[originalfname] = origpchange
1010 copiedchanges[originalfname] = origpchange
978 else:
1011 else:
979 origpchange = copiedchanges[originalfname]
1012 origpchange = copiedchanges[originalfname]
980 origpchange.awayPaths.append(fname)
1013 origpchange.awayPaths.append(fname)
981 pchange.type = DiffChangeType.COPY_HERE
1014 pchange.type = DiffChangeType.COPY_HERE
982
1015
983 if filemode != originalmode:
1016 if filemode != originalmode:
984 pchange.addoldmode(originalmode)
1017 pchange.addoldmode(originalmode)
985 pchange.addnewmode(filemode)
1018 pchange.addnewmode(filemode)
986 else: # Brand-new file
1019 else: # Brand-new file
987 pchange.addnewmode(gitmode[fctx.flags()])
1020 pchange.addnewmode(gitmode[fctx.flags()])
988 pchange.type = DiffChangeType.ADD
1021 pchange.type = DiffChangeType.ADD
989
1022
990 if (
1023 if (
991 fctx.isbinary()
1024 fctx.isbinary()
992 or notutf8(fctx)
1025 or notutf8(fctx)
993 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1026 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
994 ):
1027 ):
995 makebinary(pchange, fctx)
1028 makebinary(pchange, fctx)
996 if renamed:
1029 if renamed:
997 addoldbinary(pchange, oldfctx, fctx)
1030 addoldbinary(pchange, oldfctx, fctx)
998 else:
1031 else:
999 maketext(pchange, basectx, ctx, fname)
1032 maketext(pchange, basectx, ctx, fname)
1000
1033
1001 pdiff.addchange(pchange)
1034 pdiff.addchange(pchange)
1002
1035
1003 for _path, copiedchange in copiedchanges.items():
1036 for _path, copiedchange in copiedchanges.items():
1004 pdiff.addchange(copiedchange)
1037 pdiff.addchange(copiedchange)
1005 for _path, movedchange in movedchanges.items():
1038 for _path, movedchange in movedchanges.items():
1006 pdiff.addchange(movedchange)
1039 pdiff.addchange(movedchange)
1007
1040
1008
1041
1009 def creatediff(basectx, ctx):
1042 def creatediff(basectx, ctx):
1010 """create a Differential Diff"""
1043 """create a Differential Diff"""
1011 repo = ctx.repo()
1044 repo = ctx.repo()
1012 repophid = getrepophid(repo)
1045 repophid = getrepophid(repo)
1013 # Create a "Differential Diff" via "differential.creatediff" API
1046 # Create a "Differential Diff" via "differential.creatediff" API
1014 pdiff = phabdiff(
1047 pdiff = phabdiff(
1015 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1048 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1016 branch=b'%s' % ctx.branch(),
1049 branch=b'%s' % ctx.branch(),
1017 )
1050 )
1018 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1051 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1019 # addadded will remove moved files from removed, so addremoved won't get
1052 # addadded will remove moved files from removed, so addremoved won't get
1020 # them
1053 # them
1021 addadded(pdiff, basectx, ctx, added, removed)
1054 addadded(pdiff, basectx, ctx, added, removed)
1022 addmodified(pdiff, basectx, ctx, modified)
1055 addmodified(pdiff, basectx, ctx, modified)
1023 addremoved(pdiff, basectx, ctx, removed)
1056 addremoved(pdiff, basectx, ctx, removed)
1024 if repophid:
1057 if repophid:
1025 pdiff.repositoryPHID = repophid
1058 pdiff.repositoryPHID = repophid
1026 diff = callconduit(
1059 diff = callconduit(
1027 repo.ui,
1060 repo.ui,
1028 b'differential.creatediff',
1061 b'differential.creatediff',
1029 pycompat.byteskwargs(attr.asdict(pdiff)),
1062 pycompat.byteskwargs(attr.asdict(pdiff)),
1030 )
1063 )
1031 if not diff:
1064 if not diff:
1032 if basectx != ctx:
1065 if basectx != ctx:
1033 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1066 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1034 else:
1067 else:
1035 msg = _(b'cannot create diff for %s') % ctx
1068 msg = _(b'cannot create diff for %s') % ctx
1036 raise error.Abort(msg)
1069 raise error.Abort(msg)
1037 return diff
1070 return diff
1038
1071
1039
1072
1040 def writediffproperties(ctxs, diff):
1073 def writediffproperties(ctxs, diff):
1041 """write metadata to diff so patches could be applied losslessly
1074 """write metadata to diff so patches could be applied losslessly
1042
1075
1043 ``ctxs`` is the list of commits that created the diff, in ascending order.
1076 ``ctxs`` is the list of commits that created the diff, in ascending order.
1044 The list is generally a single commit, but may be several when using
1077 The list is generally a single commit, but may be several when using
1045 ``phabsend --fold``.
1078 ``phabsend --fold``.
1046 """
1079 """
1047 # creatediff returns with a diffid but query returns with an id
1080 # creatediff returns with a diffid but query returns with an id
1048 diffid = diff.get(b'diffid', diff.get(b'id'))
1081 diffid = diff.get(b'diffid', diff.get(b'id'))
1049 basectx = ctxs[0]
1082 basectx = ctxs[0]
1050 tipctx = ctxs[-1]
1083 tipctx = ctxs[-1]
1051
1084
1052 params = {
1085 params = {
1053 b'diff_id': diffid,
1086 b'diff_id': diffid,
1054 b'name': b'hg:meta',
1087 b'name': b'hg:meta',
1055 b'data': templatefilters.json(
1088 b'data': templatefilters.json(
1056 {
1089 {
1057 b'user': tipctx.user(),
1090 b'user': tipctx.user(),
1058 b'date': b'%d %d' % tipctx.date(),
1091 b'date': b'%d %d' % tipctx.date(),
1059 b'branch': tipctx.branch(),
1092 b'branch': tipctx.branch(),
1060 b'node': tipctx.hex(),
1093 b'node': tipctx.hex(),
1061 b'parent': basectx.p1().hex(),
1094 b'parent': basectx.p1().hex(),
1062 }
1095 }
1063 ),
1096 ),
1064 }
1097 }
1065 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1098 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1066
1099
1067 commits = {}
1100 commits = {}
1068 for ctx in ctxs:
1101 for ctx in ctxs:
1069 commits[ctx.hex()] = {
1102 commits[ctx.hex()] = {
1070 b'author': stringutil.person(ctx.user()),
1103 b'author': stringutil.person(ctx.user()),
1071 b'authorEmail': stringutil.email(ctx.user()),
1104 b'authorEmail': stringutil.email(ctx.user()),
1072 b'time': int(ctx.date()[0]),
1105 b'time': int(ctx.date()[0]),
1073 b'commit': ctx.hex(),
1106 b'commit': ctx.hex(),
1074 b'parents': [ctx.p1().hex()],
1107 b'parents': [ctx.p1().hex()],
1075 b'branch': ctx.branch(),
1108 b'branch': ctx.branch(),
1076 }
1109 }
1077 params = {
1110 params = {
1078 b'diff_id': diffid,
1111 b'diff_id': diffid,
1079 b'name': b'local:commits',
1112 b'name': b'local:commits',
1080 b'data': templatefilters.json(commits),
1113 b'data': templatefilters.json(commits),
1081 }
1114 }
1082 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1115 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1083
1116
1084
1117
1085 def createdifferentialrevision(
1118 def createdifferentialrevision(
1086 ctxs,
1119 ctxs,
1087 revid=None,
1120 revid=None,
1088 parentrevphid=None,
1121 parentrevphid=None,
1089 oldbasenode=None,
1122 oldbasenode=None,
1090 oldnode=None,
1123 oldnode=None,
1091 olddiff=None,
1124 olddiff=None,
1092 actions=None,
1125 actions=None,
1093 comment=None,
1126 comment=None,
1094 ):
1127 ):
1095 """create or update a Differential Revision
1128 """create or update a Differential Revision
1096
1129
1097 If revid is None, create a new Differential Revision, otherwise update
1130 If revid is None, create a new Differential Revision, otherwise update
1098 revid. If parentrevphid is not None, set it as a dependency.
1131 revid. If parentrevphid is not None, set it as a dependency.
1099
1132
1100 If there is a single commit for the new Differential Revision, ``ctxs`` will
1133 If there is a single commit for the new Differential Revision, ``ctxs`` will
1101 be a list of that single context. Otherwise, it is a list that covers the
1134 be a list of that single context. Otherwise, it is a list that covers the
1102 range of changes for the differential, where ``ctxs[0]`` is the first change
1135 range of changes for the differential, where ``ctxs[0]`` is the first change
1103 to include and ``ctxs[-1]`` is the last.
1136 to include and ``ctxs[-1]`` is the last.
1104
1137
1105 If oldnode is not None, check if the patch content (without commit message
1138 If oldnode is not None, check if the patch content (without commit message
1106 and metadata) has changed before creating another diff. For a Revision with
1139 and metadata) has changed before creating another diff. For a Revision with
1107 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1140 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1108 Revision covering multiple commits, ``oldbasenode`` corresponds to
1141 Revision covering multiple commits, ``oldbasenode`` corresponds to
1109 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1142 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1110 corresponds to ``ctxs[-1]``.
1143 corresponds to ``ctxs[-1]``.
1111
1144
1112 If actions is not None, they will be appended to the transaction.
1145 If actions is not None, they will be appended to the transaction.
1113 """
1146 """
1114 ctx = ctxs[-1]
1147 ctx = ctxs[-1]
1115 basectx = ctxs[0]
1148 basectx = ctxs[0]
1116
1149
1117 repo = ctx.repo()
1150 repo = ctx.repo()
1118 if oldnode:
1151 if oldnode:
1119 diffopts = mdiff.diffopts(git=True, context=32767)
1152 diffopts = mdiff.diffopts(git=True, context=32767)
1120 unfi = repo.unfiltered()
1153 unfi = repo.unfiltered()
1121 oldctx = unfi[oldnode]
1154 oldctx = unfi[oldnode]
1122 oldbasectx = unfi[oldbasenode]
1155 oldbasectx = unfi[oldbasenode]
1123 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1156 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1124 oldbasectx, oldctx, diffopts
1157 oldbasectx, oldctx, diffopts
1125 )
1158 )
1126 else:
1159 else:
1127 neednewdiff = True
1160 neednewdiff = True
1128
1161
1129 transactions = []
1162 transactions = []
1130 if neednewdiff:
1163 if neednewdiff:
1131 diff = creatediff(basectx, ctx)
1164 diff = creatediff(basectx, ctx)
1132 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1165 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1133 if comment:
1166 if comment:
1134 transactions.append({b'type': b'comment', b'value': comment})
1167 transactions.append({b'type': b'comment', b'value': comment})
1135 else:
1168 else:
1136 # Even if we don't need to upload a new diff because the patch content
1169 # Even if we don't need to upload a new diff because the patch content
1137 # does not change. We might still need to update its metadata so
1170 # does not change. We might still need to update its metadata so
1138 # pushers could know the correct node metadata.
1171 # pushers could know the correct node metadata.
1139 assert olddiff
1172 assert olddiff
1140 diff = olddiff
1173 diff = olddiff
1141 writediffproperties(ctxs, diff)
1174 writediffproperties(ctxs, diff)
1142
1175
1143 # Set the parent Revision every time, so commit re-ordering is picked-up
1176 # Set the parent Revision every time, so commit re-ordering is picked-up
1144 if parentrevphid:
1177 if parentrevphid:
1145 transactions.append(
1178 transactions.append(
1146 {b'type': b'parents.set', b'value': [parentrevphid]}
1179 {b'type': b'parents.set', b'value': [parentrevphid]}
1147 )
1180 )
1148
1181
1149 if actions:
1182 if actions:
1150 transactions += actions
1183 transactions += actions
1151
1184
1152 # When folding multiple local commits into a single review, arcanist will
1185 # When folding multiple local commits into a single review, arcanist will
1153 # take the summary line of the first commit as the title, and then
1186 # take the summary line of the first commit as the title, and then
1154 # concatenate the rest of the remaining messages (including each of their
1187 # concatenate the rest of the remaining messages (including each of their
1155 # first lines) to the rest of the first commit message (each separated by
1188 # first lines) to the rest of the first commit message (each separated by
1156 # an empty line), and use that as the summary field. Do the same here.
1189 # an empty line), and use that as the summary field. Do the same here.
1157 # For commits with only a one line message, there is no summary field, as
1190 # For commits with only a one line message, there is no summary field, as
1158 # this gets assigned to the title.
1191 # this gets assigned to the title.
1159 fields = util.sortdict() # sorted for stable wire protocol in tests
1192 fields = util.sortdict() # sorted for stable wire protocol in tests
1160
1193
1161 for i, _ctx in enumerate(ctxs):
1194 for i, _ctx in enumerate(ctxs):
1162 # Parse commit message and update related fields.
1195 # Parse commit message and update related fields.
1163 desc = _ctx.description()
1196 desc = _ctx.description()
1164 info = callconduit(
1197 info = callconduit(
1165 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1198 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1166 )
1199 )
1167
1200
1168 for k in [b'title', b'summary', b'testPlan']:
1201 for k in [b'title', b'summary', b'testPlan']:
1169 v = info[b'fields'].get(k)
1202 v = info[b'fields'].get(k)
1170 if not v:
1203 if not v:
1171 continue
1204 continue
1172
1205
1173 if i == 0:
1206 if i == 0:
1174 # Title, summary and test plan (if present) are taken verbatim
1207 # Title, summary and test plan (if present) are taken verbatim
1175 # for the first commit.
1208 # for the first commit.
1176 fields[k] = v.rstrip()
1209 fields[k] = v.rstrip()
1177 continue
1210 continue
1178 elif k == b'title':
1211 elif k == b'title':
1179 # Add subsequent titles (i.e. the first line of the commit
1212 # Add subsequent titles (i.e. the first line of the commit
1180 # message) back to the summary.
1213 # message) back to the summary.
1181 k = b'summary'
1214 k = b'summary'
1182
1215
1183 # Append any current field to the existing composite field
1216 # Append any current field to the existing composite field
1184 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1217 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1185
1218
1186 for k, v in fields.items():
1219 for k, v in fields.items():
1187 transactions.append({b'type': k, b'value': v})
1220 transactions.append({b'type': k, b'value': v})
1188
1221
1189 params = {b'transactions': transactions}
1222 params = {b'transactions': transactions}
1190 if revid is not None:
1223 if revid is not None:
1191 # Update an existing Differential Revision
1224 # Update an existing Differential Revision
1192 params[b'objectIdentifier'] = revid
1225 params[b'objectIdentifier'] = revid
1193
1226
1194 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1227 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1195 if not revision:
1228 if not revision:
1196 if len(ctxs) == 1:
1229 if len(ctxs) == 1:
1197 msg = _(b'cannot create revision for %s') % ctx
1230 msg = _(b'cannot create revision for %s') % ctx
1198 else:
1231 else:
1199 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1232 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1200 raise error.Abort(msg)
1233 raise error.Abort(msg)
1201
1234
1202 return revision, diff
1235 return revision, diff
1203
1236
1204
1237
1205 def userphids(ui, names):
1238 def userphids(ui, names):
1206 """convert user names to PHIDs"""
1239 """convert user names to PHIDs"""
1207 names = [name.lower() for name in names]
1240 names = [name.lower() for name in names]
1208 query = {b'constraints': {b'usernames': names}}
1241 query = {b'constraints': {b'usernames': names}}
1209 result = callconduit(ui, b'user.search', query)
1242 result = callconduit(ui, b'user.search', query)
1210 # username not found is not an error of the API. So check if we have missed
1243 # username not found is not an error of the API. So check if we have missed
1211 # some names here.
1244 # some names here.
1212 data = result[b'data']
1245 data = result[b'data']
1213 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1246 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1214 unresolved = set(names) - resolved
1247 unresolved = set(names) - resolved
1215 if unresolved:
1248 if unresolved:
1216 raise error.Abort(
1249 raise error.Abort(
1217 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1250 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1218 )
1251 )
1219 return [entry[b'phid'] for entry in data]
1252 return [entry[b'phid'] for entry in data]
1220
1253
1221
1254
1222 def _print_phabsend_action(ui, ctx, newrevid, action):
1255 def _print_phabsend_action(ui, ctx, newrevid, action):
1223 """print the ``action`` that occurred when posting ``ctx`` for review
1256 """print the ``action`` that occurred when posting ``ctx`` for review
1224
1257
1225 This is a utility function for the sending phase of ``phabsend``, which
1258 This is a utility function for the sending phase of ``phabsend``, which
1226 makes it easier to show a status for all local commits with `--fold``.
1259 makes it easier to show a status for all local commits with `--fold``.
1227 """
1260 """
1228 actiondesc = ui.label(
1261 actiondesc = ui.label(
1229 {
1262 {
1230 b'created': _(b'created'),
1263 b'created': _(b'created'),
1231 b'skipped': _(b'skipped'),
1264 b'skipped': _(b'skipped'),
1232 b'updated': _(b'updated'),
1265 b'updated': _(b'updated'),
1233 }[action],
1266 }[action],
1234 b'phabricator.action.%s' % action,
1267 b'phabricator.action.%s' % action,
1235 )
1268 )
1236 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1269 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1237 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1270 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1238 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1271 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1239
1272
1240
1273
1241 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1274 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1242 """update the local commit list for the ``diff`` associated with ``drevid``
1275 """update the local commit list for the ``diff`` associated with ``drevid``
1243
1276
1244 This is a utility function for the amend phase of ``phabsend``, which
1277 This is a utility function for the amend phase of ``phabsend``, which
1245 converts failures to warning messages.
1278 converts failures to warning messages.
1246 """
1279 """
1247 _debug(
1280 _debug(
1248 unfi.ui,
1281 unfi.ui,
1249 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1282 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1250 )
1283 )
1251
1284
1252 try:
1285 try:
1253 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1286 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1254 except util.urlerr.urlerror:
1287 except util.urlerr.urlerror:
1255 # If it fails just warn and keep going, otherwise the DREV
1288 # If it fails just warn and keep going, otherwise the DREV
1256 # associations will be lost
1289 # associations will be lost
1257 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1290 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1258
1291
1259
1292
1260 @vcrcommand(
1293 @vcrcommand(
1261 b'phabsend',
1294 b'phabsend',
1262 [
1295 [
1263 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1296 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1264 (b'', b'amend', True, _(b'update commit messages')),
1297 (b'', b'amend', True, _(b'update commit messages')),
1265 (b'', b'reviewer', [], _(b'specify reviewers')),
1298 (b'', b'reviewer', [], _(b'specify reviewers')),
1266 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1299 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1267 (
1300 (
1268 b'm',
1301 b'm',
1269 b'comment',
1302 b'comment',
1270 b'',
1303 b'',
1271 _(b'add a comment to Revisions with new/updated Diffs'),
1304 _(b'add a comment to Revisions with new/updated Diffs'),
1272 ),
1305 ),
1273 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1306 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1274 (b'', b'fold', False, _(b'combine the revisions into one review')),
1307 (b'', b'fold', False, _(b'combine the revisions into one review')),
1275 ],
1308 ],
1276 _(b'REV [OPTIONS]'),
1309 _(b'REV [OPTIONS]'),
1277 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1310 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1278 )
1311 )
1279 def phabsend(ui, repo, *revs, **opts):
1312 def phabsend(ui, repo, *revs, **opts):
1280 """upload changesets to Phabricator
1313 """upload changesets to Phabricator
1281
1314
1282 If there are multiple revisions specified, they will be send as a stack
1315 If there are multiple revisions specified, they will be send as a stack
1283 with a linear dependencies relationship using the order specified by the
1316 with a linear dependencies relationship using the order specified by the
1284 revset.
1317 revset.
1285
1318
1286 For the first time uploading changesets, local tags will be created to
1319 For the first time uploading changesets, local tags will be created to
1287 maintain the association. After the first time, phabsend will check
1320 maintain the association. After the first time, phabsend will check
1288 obsstore and tags information so it can figure out whether to update an
1321 obsstore and tags information so it can figure out whether to update an
1289 existing Differential Revision, or create a new one.
1322 existing Differential Revision, or create a new one.
1290
1323
1291 If --amend is set, update commit messages so they have the
1324 If --amend is set, update commit messages so they have the
1292 ``Differential Revision`` URL, remove related tags. This is similar to what
1325 ``Differential Revision`` URL, remove related tags. This is similar to what
1293 arcanist will do, and is more desired in author-push workflows. Otherwise,
1326 arcanist will do, and is more desired in author-push workflows. Otherwise,
1294 use local tags to record the ``Differential Revision`` association.
1327 use local tags to record the ``Differential Revision`` association.
1295
1328
1296 The --confirm option lets you confirm changesets before sending them. You
1329 The --confirm option lets you confirm changesets before sending them. You
1297 can also add following to your configuration file to make it default
1330 can also add following to your configuration file to make it default
1298 behaviour::
1331 behaviour::
1299
1332
1300 [phabsend]
1333 [phabsend]
1301 confirm = true
1334 confirm = true
1302
1335
1303 By default, a separate review will be created for each commit that is
1336 By default, a separate review will be created for each commit that is
1304 selected, and will have the same parent/child relationship in Phabricator.
1337 selected, and will have the same parent/child relationship in Phabricator.
1305 If ``--fold`` is set, multiple commits are rolled up into a single review
1338 If ``--fold`` is set, multiple commits are rolled up into a single review
1306 as if diffed from the parent of the first revision to the last. The commit
1339 as if diffed from the parent of the first revision to the last. The commit
1307 messages are concatenated in the summary field on Phabricator.
1340 messages are concatenated in the summary field on Phabricator.
1308
1341
1309 phabsend will check obsstore and the above association to decide whether to
1342 phabsend will check obsstore and the above association to decide whether to
1310 update an existing Differential Revision, or create a new one.
1343 update an existing Differential Revision, or create a new one.
1311 """
1344 """
1312 opts = pycompat.byteskwargs(opts)
1345 opts = pycompat.byteskwargs(opts)
1313 revs = list(revs) + opts.get(b'rev', [])
1346 revs = list(revs) + opts.get(b'rev', [])
1314 revs = scmutil.revrange(repo, revs)
1347 revs = scmutil.revrange(repo, revs)
1315 revs.sort() # ascending order to preserve topological parent/child in phab
1348 revs.sort() # ascending order to preserve topological parent/child in phab
1316
1349
1317 if not revs:
1350 if not revs:
1318 raise error.Abort(_(b'phabsend requires at least one changeset'))
1351 raise error.Abort(_(b'phabsend requires at least one changeset'))
1319 if opts.get(b'amend'):
1352 if opts.get(b'amend'):
1320 cmdutil.checkunfinished(repo)
1353 cmdutil.checkunfinished(repo)
1321
1354
1322 ctxs = [repo[rev] for rev in revs]
1355 ctxs = [repo[rev] for rev in revs]
1323
1356
1324 if any(c for c in ctxs if c.obsolete()):
1357 if any(c for c in ctxs if c.obsolete()):
1325 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1358 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1326
1359
1327 # Ensure the local commits are an unbroken range. The semantics of the
1360 # Ensure the local commits are an unbroken range. The semantics of the
1328 # --fold option implies this, and the auto restacking of orphans requires
1361 # --fold option implies this, and the auto restacking of orphans requires
1329 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1362 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1330 # get A' as a parent.
1363 # get A' as a parent.
1331 def _fail_nonlinear_revs(revs, revtype):
1364 def _fail_nonlinear_revs(revs, revtype):
1332 badnodes = [repo[r].node() for r in revs]
1365 badnodes = [repo[r].node() for r in revs]
1333 raise error.Abort(
1366 raise error.Abort(
1334 _(b"cannot phabsend multiple %s revisions: %s")
1367 _(b"cannot phabsend multiple %s revisions: %s")
1335 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1368 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1336 hint=_(b"the revisions must form a linear chain"),
1369 hint=_(b"the revisions must form a linear chain"),
1337 )
1370 )
1338
1371
1339 heads = repo.revs(b'heads(%ld)', revs)
1372 heads = repo.revs(b'heads(%ld)', revs)
1340 if len(heads) > 1:
1373 if len(heads) > 1:
1341 _fail_nonlinear_revs(heads, b"head")
1374 _fail_nonlinear_revs(heads, b"head")
1342
1375
1343 roots = repo.revs(b'roots(%ld)', revs)
1376 roots = repo.revs(b'roots(%ld)', revs)
1344 if len(roots) > 1:
1377 if len(roots) > 1:
1345 _fail_nonlinear_revs(roots, b"root")
1378 _fail_nonlinear_revs(roots, b"root")
1346
1379
1347 fold = opts.get(b'fold')
1380 fold = opts.get(b'fold')
1348 if fold:
1381 if fold:
1349 if len(revs) == 1:
1382 if len(revs) == 1:
1350 # TODO: just switch to --no-fold instead?
1383 # TODO: just switch to --no-fold instead?
1351 raise error.Abort(_(b"cannot fold a single revision"))
1384 raise error.Abort(_(b"cannot fold a single revision"))
1352
1385
1353 # There's no clear way to manage multiple commits with a Dxxx tag, so
1386 # There's no clear way to manage multiple commits with a Dxxx tag, so
1354 # require the amend option. (We could append "_nnn", but then it
1387 # require the amend option. (We could append "_nnn", but then it
1355 # becomes jumbled if earlier commits are added to an update.) It should
1388 # becomes jumbled if earlier commits are added to an update.) It should
1356 # lock the repo and ensure that the range is editable, but that would
1389 # lock the repo and ensure that the range is editable, but that would
1357 # make the code pretty convoluted. The default behavior of `arc` is to
1390 # make the code pretty convoluted. The default behavior of `arc` is to
1358 # create a new review anyway.
1391 # create a new review anyway.
1359 if not opts.get(b"amend"):
1392 if not opts.get(b"amend"):
1360 raise error.Abort(_(b"cannot fold with --no-amend"))
1393 raise error.Abort(_(b"cannot fold with --no-amend"))
1361
1394
1362 # It might be possible to bucketize the revisions by the DREV value, and
1395 # It might be possible to bucketize the revisions by the DREV value, and
1363 # iterate over those groups when posting, and then again when amending.
1396 # iterate over those groups when posting, and then again when amending.
1364 # But for simplicity, require all selected revisions to be for the same
1397 # But for simplicity, require all selected revisions to be for the same
1365 # DREV (if present). Adding local revisions to an existing DREV is
1398 # DREV (if present). Adding local revisions to an existing DREV is
1366 # acceptable.
1399 # acceptable.
1367 drevmatchers = [
1400 drevmatchers = [
1368 _differentialrevisiondescre.search(ctx.description())
1401 _differentialrevisiondescre.search(ctx.description())
1369 for ctx in ctxs
1402 for ctx in ctxs
1370 ]
1403 ]
1371 if len({m.group('url') for m in drevmatchers if m}) > 1:
1404 if len({m.group('url') for m in drevmatchers if m}) > 1:
1372 raise error.Abort(
1405 raise error.Abort(
1373 _(b"cannot fold revisions with different DREV values")
1406 _(b"cannot fold revisions with different DREV values")
1374 )
1407 )
1375
1408
1376 # {newnode: (oldnode, olddiff, olddrev}
1409 # {newnode: (oldnode, olddiff, olddrev}
1377 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1410 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1378
1411
1379 confirm = ui.configbool(b'phabsend', b'confirm')
1412 confirm = ui.configbool(b'phabsend', b'confirm')
1380 confirm |= bool(opts.get(b'confirm'))
1413 confirm |= bool(opts.get(b'confirm'))
1381 if confirm:
1414 if confirm:
1382 confirmed = _confirmbeforesend(repo, revs, oldmap)
1415 confirmed = _confirmbeforesend(repo, revs, oldmap)
1383 if not confirmed:
1416 if not confirmed:
1384 raise error.Abort(_(b'phabsend cancelled'))
1417 raise error.Abort(_(b'phabsend cancelled'))
1385
1418
1386 actions = []
1419 actions = []
1387 reviewers = opts.get(b'reviewer', [])
1420 reviewers = opts.get(b'reviewer', [])
1388 blockers = opts.get(b'blocker', [])
1421 blockers = opts.get(b'blocker', [])
1389 phids = []
1422 phids = []
1390 if reviewers:
1423 if reviewers:
1391 phids.extend(userphids(repo.ui, reviewers))
1424 phids.extend(userphids(repo.ui, reviewers))
1392 if blockers:
1425 if blockers:
1393 phids.extend(
1426 phids.extend(
1394 map(
1427 map(
1395 lambda phid: b'blocking(%s)' % phid,
1428 lambda phid: b'blocking(%s)' % phid,
1396 userphids(repo.ui, blockers),
1429 userphids(repo.ui, blockers),
1397 )
1430 )
1398 )
1431 )
1399 if phids:
1432 if phids:
1400 actions.append({b'type': b'reviewers.add', b'value': phids})
1433 actions.append({b'type': b'reviewers.add', b'value': phids})
1401
1434
1402 drevids = [] # [int]
1435 drevids = [] # [int]
1403 diffmap = {} # {newnode: diff}
1436 diffmap = {} # {newnode: diff}
1404
1437
1405 # Send patches one by one so we know their Differential Revision PHIDs and
1438 # Send patches one by one so we know their Differential Revision PHIDs and
1406 # can provide dependency relationship
1439 # can provide dependency relationship
1407 lastrevphid = None
1440 lastrevphid = None
1408 for ctx in ctxs:
1441 for ctx in ctxs:
1409 if fold:
1442 if fold:
1410 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1443 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1411 else:
1444 else:
1412 ui.debug(b'sending rev %d\n' % ctx.rev())
1445 ui.debug(b'sending rev %d\n' % ctx.rev())
1413
1446
1414 # Get Differential Revision ID
1447 # Get Differential Revision ID
1415 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1448 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1416 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1449 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1417
1450
1418 if fold:
1451 if fold:
1419 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1452 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1420 ctxs[-1].node(), (None, None, None)
1453 ctxs[-1].node(), (None, None, None)
1421 )
1454 )
1422
1455
1423 if oldnode != ctx.node() or opts.get(b'amend'):
1456 if oldnode != ctx.node() or opts.get(b'amend'):
1424 # Create or update Differential Revision
1457 # Create or update Differential Revision
1425 revision, diff = createdifferentialrevision(
1458 revision, diff = createdifferentialrevision(
1426 ctxs if fold else [ctx],
1459 ctxs if fold else [ctx],
1427 revid,
1460 revid,
1428 lastrevphid,
1461 lastrevphid,
1429 oldbasenode,
1462 oldbasenode,
1430 oldnode,
1463 oldnode,
1431 olddiff,
1464 olddiff,
1432 actions,
1465 actions,
1433 opts.get(b'comment'),
1466 opts.get(b'comment'),
1434 )
1467 )
1435
1468
1436 if fold:
1469 if fold:
1437 for ctx in ctxs:
1470 for ctx in ctxs:
1438 diffmap[ctx.node()] = diff
1471 diffmap[ctx.node()] = diff
1439 else:
1472 else:
1440 diffmap[ctx.node()] = diff
1473 diffmap[ctx.node()] = diff
1441
1474
1442 newrevid = int(revision[b'object'][b'id'])
1475 newrevid = int(revision[b'object'][b'id'])
1443 newrevphid = revision[b'object'][b'phid']
1476 newrevphid = revision[b'object'][b'phid']
1444 if revid:
1477 if revid:
1445 action = b'updated'
1478 action = b'updated'
1446 else:
1479 else:
1447 action = b'created'
1480 action = b'created'
1448
1481
1449 # Create a local tag to note the association, if commit message
1482 # Create a local tag to note the association, if commit message
1450 # does not have it already
1483 # does not have it already
1451 if not fold:
1484 if not fold:
1452 m = _differentialrevisiondescre.search(ctx.description())
1485 m = _differentialrevisiondescre.search(ctx.description())
1453 if not m or int(m.group('id')) != newrevid:
1486 if not m or int(m.group('id')) != newrevid:
1454 tagname = b'D%d' % newrevid
1487 tagname = b'D%d' % newrevid
1455 tags.tag(
1488 tags.tag(
1456 repo,
1489 repo,
1457 tagname,
1490 tagname,
1458 ctx.node(),
1491 ctx.node(),
1459 message=None,
1492 message=None,
1460 user=None,
1493 user=None,
1461 date=None,
1494 date=None,
1462 local=True,
1495 local=True,
1463 )
1496 )
1464 else:
1497 else:
1465 # Nothing changed. But still set "newrevphid" so the next revision
1498 # Nothing changed. But still set "newrevphid" so the next revision
1466 # could depend on this one and "newrevid" for the summary line.
1499 # could depend on this one and "newrevid" for the summary line.
1467 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1500 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1468 newrevid = revid
1501 newrevid = revid
1469 action = b'skipped'
1502 action = b'skipped'
1470
1503
1471 drevids.append(newrevid)
1504 drevids.append(newrevid)
1472 lastrevphid = newrevphid
1505 lastrevphid = newrevphid
1473
1506
1474 if fold:
1507 if fold:
1475 for c in ctxs:
1508 for c in ctxs:
1476 if oldmap.get(c.node(), (None, None, None))[2]:
1509 if oldmap.get(c.node(), (None, None, None))[2]:
1477 action = b'updated'
1510 action = b'updated'
1478 else:
1511 else:
1479 action = b'created'
1512 action = b'created'
1480 _print_phabsend_action(ui, c, newrevid, action)
1513 _print_phabsend_action(ui, c, newrevid, action)
1481 break
1514 break
1482
1515
1483 _print_phabsend_action(ui, ctx, newrevid, action)
1516 _print_phabsend_action(ui, ctx, newrevid, action)
1484
1517
1485 # Update commit messages and remove tags
1518 # Update commit messages and remove tags
1486 if opts.get(b'amend'):
1519 if opts.get(b'amend'):
1487 unfi = repo.unfiltered()
1520 unfi = repo.unfiltered()
1488 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1521 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1489 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1522 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1490 # Eagerly evaluate commits to restabilize before creating new
1523 # Eagerly evaluate commits to restabilize before creating new
1491 # commits. The selected revisions are excluded because they are
1524 # commits. The selected revisions are excluded because they are
1492 # automatically restacked as part of the submission process.
1525 # automatically restacked as part of the submission process.
1493 restack = [
1526 restack = [
1494 c
1527 c
1495 for c in repo.set(
1528 for c in repo.set(
1496 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1529 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1497 revs,
1530 revs,
1498 revs,
1531 revs,
1499 )
1532 )
1500 ]
1533 ]
1501 wnode = unfi[b'.'].node()
1534 wnode = unfi[b'.'].node()
1502 mapping = {} # {oldnode: [newnode]}
1535 mapping = {} # {oldnode: [newnode]}
1503 newnodes = []
1536 newnodes = []
1504
1537
1505 drevid = drevids[0]
1538 drevid = drevids[0]
1506
1539
1507 for i, rev in enumerate(revs):
1540 for i, rev in enumerate(revs):
1508 old = unfi[rev]
1541 old = unfi[rev]
1509 if not fold:
1542 if not fold:
1510 drevid = drevids[i]
1543 drevid = drevids[i]
1511 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1544 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1512
1545
1513 newdesc = get_amended_desc(drev, old, fold)
1546 newdesc = get_amended_desc(drev, old, fold)
1514 # Make sure commit message contain "Differential Revision"
1547 # Make sure commit message contain "Differential Revision"
1515 if (
1548 if (
1516 old.description() != newdesc
1549 old.description() != newdesc
1517 or old.p1().node() in mapping
1550 or old.p1().node() in mapping
1518 or old.p2().node() in mapping
1551 or old.p2().node() in mapping
1519 ):
1552 ):
1520 if old.phase() == phases.public:
1553 if old.phase() == phases.public:
1521 ui.warn(
1554 ui.warn(
1522 _(b"warning: not updating public commit %s\n")
1555 _(b"warning: not updating public commit %s\n")
1523 % scmutil.formatchangeid(old)
1556 % scmutil.formatchangeid(old)
1524 )
1557 )
1525 continue
1558 continue
1526 parents = [
1559 parents = [
1527 mapping.get(old.p1().node(), (old.p1(),))[0],
1560 mapping.get(old.p1().node(), (old.p1(),))[0],
1528 mapping.get(old.p2().node(), (old.p2(),))[0],
1561 mapping.get(old.p2().node(), (old.p2(),))[0],
1529 ]
1562 ]
1530 newdesc = rewriteutil.update_hash_refs(
1563 newdesc = rewriteutil.update_hash_refs(
1531 repo,
1564 repo,
1532 newdesc,
1565 newdesc,
1533 mapping,
1566 mapping,
1534 )
1567 )
1535 new = context.metadataonlyctx(
1568 new = context.metadataonlyctx(
1536 repo,
1569 repo,
1537 old,
1570 old,
1538 parents=parents,
1571 parents=parents,
1539 text=newdesc,
1572 text=newdesc,
1540 user=old.user(),
1573 user=old.user(),
1541 date=old.date(),
1574 date=old.date(),
1542 extra=old.extra(),
1575 extra=old.extra(),
1543 )
1576 )
1544
1577
1545 newnode = new.commit()
1578 newnode = new.commit()
1546
1579
1547 mapping[old.node()] = [newnode]
1580 mapping[old.node()] = [newnode]
1548
1581
1549 if fold:
1582 if fold:
1550 # Defer updating the (single) Diff until all nodes are
1583 # Defer updating the (single) Diff until all nodes are
1551 # collected. No tags were created, so none need to be
1584 # collected. No tags were created, so none need to be
1552 # removed.
1585 # removed.
1553 newnodes.append(newnode)
1586 newnodes.append(newnode)
1554 continue
1587 continue
1555
1588
1556 _amend_diff_properties(
1589 _amend_diff_properties(
1557 unfi, drevid, [newnode], diffmap[old.node()]
1590 unfi, drevid, [newnode], diffmap[old.node()]
1558 )
1591 )
1559
1592
1560 # Remove local tags since it's no longer necessary
1593 # Remove local tags since it's no longer necessary
1561 tagname = b'D%d' % drevid
1594 tagname = b'D%d' % drevid
1562 if tagname in repo.tags():
1595 if tagname in repo.tags():
1563 tags.tag(
1596 tags.tag(
1564 repo,
1597 repo,
1565 tagname,
1598 tagname,
1566 nullid,
1599 nullid,
1567 message=None,
1600 message=None,
1568 user=None,
1601 user=None,
1569 date=None,
1602 date=None,
1570 local=True,
1603 local=True,
1571 )
1604 )
1572 elif fold:
1605 elif fold:
1573 # When folding multiple commits into one review with
1606 # When folding multiple commits into one review with
1574 # --fold, track even the commits that weren't amended, so
1607 # --fold, track even the commits that weren't amended, so
1575 # that their association isn't lost if the properties are
1608 # that their association isn't lost if the properties are
1576 # rewritten below.
1609 # rewritten below.
1577 newnodes.append(old.node())
1610 newnodes.append(old.node())
1578
1611
1579 # If the submitted commits are public, no amend takes place so
1612 # If the submitted commits are public, no amend takes place so
1580 # there are no newnodes and therefore no diff update to do.
1613 # there are no newnodes and therefore no diff update to do.
1581 if fold and newnodes:
1614 if fold and newnodes:
1582 diff = diffmap[old.node()]
1615 diff = diffmap[old.node()]
1583
1616
1584 # The diff object in diffmap doesn't have the local commits
1617 # The diff object in diffmap doesn't have the local commits
1585 # because that could be returned from differential.creatediff,
1618 # because that could be returned from differential.creatediff,
1586 # not differential.querydiffs. So use the queried diff (if
1619 # not differential.querydiffs. So use the queried diff (if
1587 # present), or force the amend (a new revision is being posted.)
1620 # present), or force the amend (a new revision is being posted.)
1588 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1621 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1589 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1622 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1590 _amend_diff_properties(unfi, drevid, newnodes, diff)
1623 _amend_diff_properties(unfi, drevid, newnodes, diff)
1591 else:
1624 else:
1592 _debug(
1625 _debug(
1593 ui,
1626 ui,
1594 b"local commit list for D%d is already up-to-date\n"
1627 b"local commit list for D%d is already up-to-date\n"
1595 % drevid,
1628 % drevid,
1596 )
1629 )
1597 elif fold:
1630 elif fold:
1598 _debug(ui, b"no newnodes to update\n")
1631 _debug(ui, b"no newnodes to update\n")
1599
1632
1600 # Restack any children of first-time submissions that were orphaned
1633 # Restack any children of first-time submissions that were orphaned
1601 # in the process. The ctx won't report that it is an orphan until
1634 # in the process. The ctx won't report that it is an orphan until
1602 # the cleanup takes place below.
1635 # the cleanup takes place below.
1603 for old in restack:
1636 for old in restack:
1604 parents = [
1637 parents = [
1605 mapping.get(old.p1().node(), (old.p1(),))[0],
1638 mapping.get(old.p1().node(), (old.p1(),))[0],
1606 mapping.get(old.p2().node(), (old.p2(),))[0],
1639 mapping.get(old.p2().node(), (old.p2(),))[0],
1607 ]
1640 ]
1608 new = context.metadataonlyctx(
1641 new = context.metadataonlyctx(
1609 repo,
1642 repo,
1610 old,
1643 old,
1611 parents=parents,
1644 parents=parents,
1612 text=rewriteutil.update_hash_refs(
1645 text=rewriteutil.update_hash_refs(
1613 repo, old.description(), mapping
1646 repo, old.description(), mapping
1614 ),
1647 ),
1615 user=old.user(),
1648 user=old.user(),
1616 date=old.date(),
1649 date=old.date(),
1617 extra=old.extra(),
1650 extra=old.extra(),
1618 )
1651 )
1619
1652
1620 newnode = new.commit()
1653 newnode = new.commit()
1621
1654
1622 # Don't obsolete unselected descendants of nodes that have not
1655 # Don't obsolete unselected descendants of nodes that have not
1623 # been changed in this transaction- that results in an error.
1656 # been changed in this transaction- that results in an error.
1624 if newnode != old.node():
1657 if newnode != old.node():
1625 mapping[old.node()] = [newnode]
1658 mapping[old.node()] = [newnode]
1626 _debug(
1659 _debug(
1627 ui,
1660 ui,
1628 b"restabilizing %s as %s\n"
1661 b"restabilizing %s as %s\n"
1629 % (short(old.node()), short(newnode)),
1662 % (short(old.node()), short(newnode)),
1630 )
1663 )
1631 else:
1664 else:
1632 _debug(
1665 _debug(
1633 ui,
1666 ui,
1634 b"not restabilizing unchanged %s\n" % short(old.node()),
1667 b"not restabilizing unchanged %s\n" % short(old.node()),
1635 )
1668 )
1636
1669
1637 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1670 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1638 if wnode in mapping:
1671 if wnode in mapping:
1639 unfi.setparents(mapping[wnode][0])
1672 unfi.setparents(mapping[wnode][0])
1640
1673
1641
1674
1642 # Map from "hg:meta" keys to header understood by "hg import". The order is
1675 # Map from "hg:meta" keys to header understood by "hg import". The order is
1643 # consistent with "hg export" output.
1676 # consistent with "hg export" output.
1644 _metanamemap = util.sortdict(
1677 _metanamemap = util.sortdict(
1645 [
1678 [
1646 (b'user', b'User'),
1679 (b'user', b'User'),
1647 (b'date', b'Date'),
1680 (b'date', b'Date'),
1648 (b'branch', b'Branch'),
1681 (b'branch', b'Branch'),
1649 (b'node', b'Node ID'),
1682 (b'node', b'Node ID'),
1650 (b'parent', b'Parent '),
1683 (b'parent', b'Parent '),
1651 ]
1684 ]
1652 )
1685 )
1653
1686
1654
1687
1655 def _confirmbeforesend(repo, revs, oldmap):
1688 def _confirmbeforesend(repo, revs, oldmap):
1656 url, token = readurltoken(repo.ui)
1689 url, token = readurltoken(repo.ui)
1657 ui = repo.ui
1690 ui = repo.ui
1658 for rev in revs:
1691 for rev in revs:
1659 ctx = repo[rev]
1692 ctx = repo[rev]
1660 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1693 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1661 if drevid:
1694 if drevid:
1662 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1695 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1663 else:
1696 else:
1664 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1697 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1665
1698
1666 ui.write(
1699 ui.write(
1667 _(b'%s - %s\n')
1700 _(b'%s - %s\n')
1668 % (
1701 % (
1669 drevdesc,
1702 drevdesc,
1670 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1703 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1671 )
1704 )
1672 )
1705 )
1673
1706
1674 if ui.promptchoice(
1707 if ui.promptchoice(
1675 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1708 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1676 ):
1709 ):
1677 return False
1710 return False
1678
1711
1679 return True
1712 return True
1680
1713
1681
1714
1682 _knownstatusnames = {
1715 _knownstatusnames = {
1683 b'accepted',
1716 b'accepted',
1684 b'needsreview',
1717 b'needsreview',
1685 b'needsrevision',
1718 b'needsrevision',
1686 b'closed',
1719 b'closed',
1687 b'abandoned',
1720 b'abandoned',
1688 b'changesplanned',
1721 b'changesplanned',
1689 }
1722 }
1690
1723
1691
1724
1692 def _getstatusname(drev):
1725 def _getstatusname(drev):
1693 """get normalized status name from a Differential Revision"""
1726 """get normalized status name from a Differential Revision"""
1694 return drev[b'statusName'].replace(b' ', b'').lower()
1727 return drev[b'statusName'].replace(b' ', b'').lower()
1695
1728
1696
1729
1697 # Small language to specify differential revisions. Support symbols: (), :X,
1730 # Small language to specify differential revisions. Support symbols: (), :X,
1698 # +, and -.
1731 # +, and -.
1699
1732
1700 _elements = {
1733 _elements = {
1701 # token-type: binding-strength, primary, prefix, infix, suffix
1734 # token-type: binding-strength, primary, prefix, infix, suffix
1702 b'(': (12, None, (b'group', 1, b')'), None, None),
1735 b'(': (12, None, (b'group', 1, b')'), None, None),
1703 b':': (8, None, (b'ancestors', 8), None, None),
1736 b':': (8, None, (b'ancestors', 8), None, None),
1704 b'&': (5, None, None, (b'and_', 5), None),
1737 b'&': (5, None, None, (b'and_', 5), None),
1705 b'+': (4, None, None, (b'add', 4), None),
1738 b'+': (4, None, None, (b'add', 4), None),
1706 b'-': (4, None, None, (b'sub', 4), None),
1739 b'-': (4, None, None, (b'sub', 4), None),
1707 b')': (0, None, None, None, None),
1740 b')': (0, None, None, None, None),
1708 b'symbol': (0, b'symbol', None, None, None),
1741 b'symbol': (0, b'symbol', None, None, None),
1709 b'end': (0, None, None, None, None),
1742 b'end': (0, None, None, None, None),
1710 }
1743 }
1711
1744
1712
1745
1713 def _tokenize(text):
1746 def _tokenize(text):
1714 view = memoryview(text) # zero-copy slice
1747 view = memoryview(text) # zero-copy slice
1715 special = b'():+-& '
1748 special = b'():+-& '
1716 pos = 0
1749 pos = 0
1717 length = len(text)
1750 length = len(text)
1718 while pos < length:
1751 while pos < length:
1719 symbol = b''.join(
1752 symbol = b''.join(
1720 itertools.takewhile(
1753 itertools.takewhile(
1721 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1754 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1722 )
1755 )
1723 )
1756 )
1724 if symbol:
1757 if symbol:
1725 yield (b'symbol', symbol, pos)
1758 yield (b'symbol', symbol, pos)
1726 pos += len(symbol)
1759 pos += len(symbol)
1727 else: # special char, ignore space
1760 else: # special char, ignore space
1728 if text[pos : pos + 1] != b' ':
1761 if text[pos : pos + 1] != b' ':
1729 yield (text[pos : pos + 1], None, pos)
1762 yield (text[pos : pos + 1], None, pos)
1730 pos += 1
1763 pos += 1
1731 yield (b'end', None, pos)
1764 yield (b'end', None, pos)
1732
1765
1733
1766
1734 def _parse(text):
1767 def _parse(text):
1735 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1768 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1736 if pos != len(text):
1769 if pos != len(text):
1737 raise error.ParseError(b'invalid token', pos)
1770 raise error.ParseError(b'invalid token', pos)
1738 return tree
1771 return tree
1739
1772
1740
1773
1741 def _parsedrev(symbol):
1774 def _parsedrev(symbol):
1742 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1775 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1743 if symbol.startswith(b'D') and symbol[1:].isdigit():
1776 if symbol.startswith(b'D') and symbol[1:].isdigit():
1744 return int(symbol[1:])
1777 return int(symbol[1:])
1745 if symbol.isdigit():
1778 if symbol.isdigit():
1746 return int(symbol)
1779 return int(symbol)
1747
1780
1748
1781
1749 def _prefetchdrevs(tree):
1782 def _prefetchdrevs(tree):
1750 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1783 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1751 drevs = set()
1784 drevs = set()
1752 ancestordrevs = set()
1785 ancestordrevs = set()
1753 op = tree[0]
1786 op = tree[0]
1754 if op == b'symbol':
1787 if op == b'symbol':
1755 r = _parsedrev(tree[1])
1788 r = _parsedrev(tree[1])
1756 if r:
1789 if r:
1757 drevs.add(r)
1790 drevs.add(r)
1758 elif op == b'ancestors':
1791 elif op == b'ancestors':
1759 r, a = _prefetchdrevs(tree[1])
1792 r, a = _prefetchdrevs(tree[1])
1760 drevs.update(r)
1793 drevs.update(r)
1761 ancestordrevs.update(r)
1794 ancestordrevs.update(r)
1762 ancestordrevs.update(a)
1795 ancestordrevs.update(a)
1763 else:
1796 else:
1764 for t in tree[1:]:
1797 for t in tree[1:]:
1765 r, a = _prefetchdrevs(t)
1798 r, a = _prefetchdrevs(t)
1766 drevs.update(r)
1799 drevs.update(r)
1767 ancestordrevs.update(a)
1800 ancestordrevs.update(a)
1768 return drevs, ancestordrevs
1801 return drevs, ancestordrevs
1769
1802
1770
1803
1771 def querydrev(ui, spec):
1804 def querydrev(ui, spec):
1772 """return a list of "Differential Revision" dicts
1805 """return a list of "Differential Revision" dicts
1773
1806
1774 spec is a string using a simple query language, see docstring in phabread
1807 spec is a string using a simple query language, see docstring in phabread
1775 for details.
1808 for details.
1776
1809
1777 A "Differential Revision dict" looks like:
1810 A "Differential Revision dict" looks like:
1778
1811
1779 {
1812 {
1780 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1813 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1781 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1814 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1782 "auxiliary": {
1815 "auxiliary": {
1783 "phabricator:depends-on": [
1816 "phabricator:depends-on": [
1784 "PHID-DREV-gbapp366kutjebt7agcd"
1817 "PHID-DREV-gbapp366kutjebt7agcd"
1785 ]
1818 ]
1786 "phabricator:projects": [],
1819 "phabricator:projects": [],
1787 },
1820 },
1788 "branch": "default",
1821 "branch": "default",
1789 "ccs": [],
1822 "ccs": [],
1790 "commits": [],
1823 "commits": [],
1791 "dateCreated": "1499181406",
1824 "dateCreated": "1499181406",
1792 "dateModified": "1499182103",
1825 "dateModified": "1499182103",
1793 "diffs": [
1826 "diffs": [
1794 "3",
1827 "3",
1795 "4",
1828 "4",
1796 ],
1829 ],
1797 "hashes": [],
1830 "hashes": [],
1798 "id": "2",
1831 "id": "2",
1799 "lineCount": "2",
1832 "lineCount": "2",
1800 "phid": "PHID-DREV-672qvysjcczopag46qty",
1833 "phid": "PHID-DREV-672qvysjcczopag46qty",
1801 "properties": {},
1834 "properties": {},
1802 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1835 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1803 "reviewers": [],
1836 "reviewers": [],
1804 "sourcePath": null
1837 "sourcePath": null
1805 "status": "0",
1838 "status": "0",
1806 "statusName": "Needs Review",
1839 "statusName": "Needs Review",
1807 "summary": "",
1840 "summary": "",
1808 "testPlan": "",
1841 "testPlan": "",
1809 "title": "example",
1842 "title": "example",
1810 "uri": "https://phab.example.com/D2",
1843 "uri": "https://phab.example.com/D2",
1811 }
1844 }
1812 """
1845 """
1813 # TODO: replace differential.query and differential.querydiffs with
1846 # TODO: replace differential.query and differential.querydiffs with
1814 # differential.diff.search because the former (and their output) are
1847 # differential.diff.search because the former (and their output) are
1815 # frozen, and planned to be deprecated and removed.
1848 # frozen, and planned to be deprecated and removed.
1816
1849
1817 def fetch(params):
1850 def fetch(params):
1818 """params -> single drev or None"""
1851 """params -> single drev or None"""
1819 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1852 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1820 if key in prefetched:
1853 if key in prefetched:
1821 return prefetched[key]
1854 return prefetched[key]
1822 drevs = callconduit(ui, b'differential.query', params)
1855 drevs = callconduit(ui, b'differential.query', params)
1823 # Fill prefetched with the result
1856 # Fill prefetched with the result
1824 for drev in drevs:
1857 for drev in drevs:
1825 prefetched[drev[b'phid']] = drev
1858 prefetched[drev[b'phid']] = drev
1826 prefetched[int(drev[b'id'])] = drev
1859 prefetched[int(drev[b'id'])] = drev
1827 if key not in prefetched:
1860 if key not in prefetched:
1828 raise error.Abort(
1861 raise error.Abort(
1829 _(b'cannot get Differential Revision %r') % params
1862 _(b'cannot get Differential Revision %r') % params
1830 )
1863 )
1831 return prefetched[key]
1864 return prefetched[key]
1832
1865
1833 def getstack(topdrevids):
1866 def getstack(topdrevids):
1834 """given a top, get a stack from the bottom, [id] -> [id]"""
1867 """given a top, get a stack from the bottom, [id] -> [id]"""
1835 visited = set()
1868 visited = set()
1836 result = []
1869 result = []
1837 queue = [{b'ids': [i]} for i in topdrevids]
1870 queue = [{b'ids': [i]} for i in topdrevids]
1838 while queue:
1871 while queue:
1839 params = queue.pop()
1872 params = queue.pop()
1840 drev = fetch(params)
1873 drev = fetch(params)
1841 if drev[b'id'] in visited:
1874 if drev[b'id'] in visited:
1842 continue
1875 continue
1843 visited.add(drev[b'id'])
1876 visited.add(drev[b'id'])
1844 result.append(int(drev[b'id']))
1877 result.append(int(drev[b'id']))
1845 auxiliary = drev.get(b'auxiliary', {})
1878 auxiliary = drev.get(b'auxiliary', {})
1846 depends = auxiliary.get(b'phabricator:depends-on', [])
1879 depends = auxiliary.get(b'phabricator:depends-on', [])
1847 for phid in depends:
1880 for phid in depends:
1848 queue.append({b'phids': [phid]})
1881 queue.append({b'phids': [phid]})
1849 result.reverse()
1882 result.reverse()
1850 return smartset.baseset(result)
1883 return smartset.baseset(result)
1851
1884
1852 # Initialize prefetch cache
1885 # Initialize prefetch cache
1853 prefetched = {} # {id or phid: drev}
1886 prefetched = {} # {id or phid: drev}
1854
1887
1855 tree = _parse(spec)
1888 tree = _parse(spec)
1856 drevs, ancestordrevs = _prefetchdrevs(tree)
1889 drevs, ancestordrevs = _prefetchdrevs(tree)
1857
1890
1858 # developer config: phabricator.batchsize
1891 # developer config: phabricator.batchsize
1859 batchsize = ui.configint(b'phabricator', b'batchsize')
1892 batchsize = ui.configint(b'phabricator', b'batchsize')
1860
1893
1861 # Prefetch Differential Revisions in batch
1894 # Prefetch Differential Revisions in batch
1862 tofetch = set(drevs)
1895 tofetch = set(drevs)
1863 for r in ancestordrevs:
1896 for r in ancestordrevs:
1864 tofetch.update(range(max(1, r - batchsize), r + 1))
1897 tofetch.update(range(max(1, r - batchsize), r + 1))
1865 if drevs:
1898 if drevs:
1866 fetch({b'ids': list(tofetch)})
1899 fetch({b'ids': list(tofetch)})
1867 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1900 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1868
1901
1869 # Walk through the tree, return smartsets
1902 # Walk through the tree, return smartsets
1870 def walk(tree):
1903 def walk(tree):
1871 op = tree[0]
1904 op = tree[0]
1872 if op == b'symbol':
1905 if op == b'symbol':
1873 drev = _parsedrev(tree[1])
1906 drev = _parsedrev(tree[1])
1874 if drev:
1907 if drev:
1875 return smartset.baseset([drev])
1908 return smartset.baseset([drev])
1876 elif tree[1] in _knownstatusnames:
1909 elif tree[1] in _knownstatusnames:
1877 drevs = [
1910 drevs = [
1878 r
1911 r
1879 for r in validids
1912 for r in validids
1880 if _getstatusname(prefetched[r]) == tree[1]
1913 if _getstatusname(prefetched[r]) == tree[1]
1881 ]
1914 ]
1882 return smartset.baseset(drevs)
1915 return smartset.baseset(drevs)
1883 else:
1916 else:
1884 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1917 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1885 elif op in {b'and_', b'add', b'sub'}:
1918 elif op in {b'and_', b'add', b'sub'}:
1886 assert len(tree) == 3
1919 assert len(tree) == 3
1887 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1920 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1888 elif op == b'group':
1921 elif op == b'group':
1889 return walk(tree[1])
1922 return walk(tree[1])
1890 elif op == b'ancestors':
1923 elif op == b'ancestors':
1891 return getstack(walk(tree[1]))
1924 return getstack(walk(tree[1]))
1892 else:
1925 else:
1893 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1926 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1894
1927
1895 return [prefetched[r] for r in walk(tree)]
1928 return [prefetched[r] for r in walk(tree)]
1896
1929
1897
1930
1898 def getdescfromdrev(drev):
1931 def getdescfromdrev(drev):
1899 """get description (commit message) from "Differential Revision"
1932 """get description (commit message) from "Differential Revision"
1900
1933
1901 This is similar to differential.getcommitmessage API. But we only care
1934 This is similar to differential.getcommitmessage API. But we only care
1902 about limited fields: title, summary, test plan, and URL.
1935 about limited fields: title, summary, test plan, and URL.
1903 """
1936 """
1904 title = drev[b'title']
1937 title = drev[b'title']
1905 summary = drev[b'summary'].rstrip()
1938 summary = drev[b'summary'].rstrip()
1906 testplan = drev[b'testPlan'].rstrip()
1939 testplan = drev[b'testPlan'].rstrip()
1907 if testplan:
1940 if testplan:
1908 testplan = b'Test Plan:\n%s' % testplan
1941 testplan = b'Test Plan:\n%s' % testplan
1909 uri = b'Differential Revision: %s' % drev[b'uri']
1942 uri = b'Differential Revision: %s' % drev[b'uri']
1910 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1943 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1911
1944
1912
1945
1913 def get_amended_desc(drev, ctx, folded):
1946 def get_amended_desc(drev, ctx, folded):
1914 """similar to ``getdescfromdrev``, but supports a folded series of commits
1947 """similar to ``getdescfromdrev``, but supports a folded series of commits
1915
1948
1916 This is used when determining if an individual commit needs to have its
1949 This is used when determining if an individual commit needs to have its
1917 message amended after posting it for review. The determination is made for
1950 message amended after posting it for review. The determination is made for
1918 each individual commit, even when they were folded into one review.
1951 each individual commit, even when they were folded into one review.
1919 """
1952 """
1920 if not folded:
1953 if not folded:
1921 return getdescfromdrev(drev)
1954 return getdescfromdrev(drev)
1922
1955
1923 uri = b'Differential Revision: %s' % drev[b'uri']
1956 uri = b'Differential Revision: %s' % drev[b'uri']
1924
1957
1925 # Since the commit messages were combined when posting multiple commits
1958 # Since the commit messages were combined when posting multiple commits
1926 # with --fold, the fields can't be read from Phabricator here, or *all*
1959 # with --fold, the fields can't be read from Phabricator here, or *all*
1927 # affected local revisions will end up with the same commit message after
1960 # affected local revisions will end up with the same commit message after
1928 # the URI is amended in. Append in the DREV line, or update it if it
1961 # the URI is amended in. Append in the DREV line, or update it if it
1929 # exists. At worst, this means commit message or test plan updates on
1962 # exists. At worst, this means commit message or test plan updates on
1930 # Phabricator aren't propagated back to the repository, but that seems
1963 # Phabricator aren't propagated back to the repository, but that seems
1931 # reasonable for the case where local commits are effectively combined
1964 # reasonable for the case where local commits are effectively combined
1932 # in Phabricator.
1965 # in Phabricator.
1933 m = _differentialrevisiondescre.search(ctx.description())
1966 m = _differentialrevisiondescre.search(ctx.description())
1934 if not m:
1967 if not m:
1935 return b'\n\n'.join([ctx.description(), uri])
1968 return b'\n\n'.join([ctx.description(), uri])
1936
1969
1937 return _differentialrevisiondescre.sub(uri, ctx.description())
1970 return _differentialrevisiondescre.sub(uri, ctx.description())
1938
1971
1939
1972
1940 def getlocalcommits(diff):
1973 def getlocalcommits(diff):
1941 """get the set of local commits from a diff object
1974 """get the set of local commits from a diff object
1942
1975
1943 See ``getdiffmeta()`` for an example diff object.
1976 See ``getdiffmeta()`` for an example diff object.
1944 """
1977 """
1945 props = diff.get(b'properties') or {}
1978 props = diff.get(b'properties') or {}
1946 commits = props.get(b'local:commits') or {}
1979 commits = props.get(b'local:commits') or {}
1947 if len(commits) > 1:
1980 if len(commits) > 1:
1948 return {bin(c) for c in commits.keys()}
1981 return {bin(c) for c in commits.keys()}
1949
1982
1950 # Storing the diff metadata predates storing `local:commits`, so continue
1983 # Storing the diff metadata predates storing `local:commits`, so continue
1951 # to use that in the --no-fold case.
1984 # to use that in the --no-fold case.
1952 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1985 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1953
1986
1954
1987
1955 def getdiffmeta(diff):
1988 def getdiffmeta(diff):
1956 """get commit metadata (date, node, user, p1) from a diff object
1989 """get commit metadata (date, node, user, p1) from a diff object
1957
1990
1958 The metadata could be "hg:meta", sent by phabsend, like:
1991 The metadata could be "hg:meta", sent by phabsend, like:
1959
1992
1960 "properties": {
1993 "properties": {
1961 "hg:meta": {
1994 "hg:meta": {
1962 "branch": "default",
1995 "branch": "default",
1963 "date": "1499571514 25200",
1996 "date": "1499571514 25200",
1964 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1997 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1965 "user": "Foo Bar <foo@example.com>",
1998 "user": "Foo Bar <foo@example.com>",
1966 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1999 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1967 }
2000 }
1968 }
2001 }
1969
2002
1970 Or converted from "local:commits", sent by "arc", like:
2003 Or converted from "local:commits", sent by "arc", like:
1971
2004
1972 "properties": {
2005 "properties": {
1973 "local:commits": {
2006 "local:commits": {
1974 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2007 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1975 "author": "Foo Bar",
2008 "author": "Foo Bar",
1976 "authorEmail": "foo@example.com"
2009 "authorEmail": "foo@example.com"
1977 "branch": "default",
2010 "branch": "default",
1978 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2011 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1979 "local": "1000",
2012 "local": "1000",
1980 "message": "...",
2013 "message": "...",
1981 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2014 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1982 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2015 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1983 "summary": "...",
2016 "summary": "...",
1984 "tag": "",
2017 "tag": "",
1985 "time": 1499546314,
2018 "time": 1499546314,
1986 }
2019 }
1987 }
2020 }
1988 }
2021 }
1989
2022
1990 Note: metadata extracted from "local:commits" will lose time zone
2023 Note: metadata extracted from "local:commits" will lose time zone
1991 information.
2024 information.
1992 """
2025 """
1993 props = diff.get(b'properties') or {}
2026 props = diff.get(b'properties') or {}
1994 meta = props.get(b'hg:meta')
2027 meta = props.get(b'hg:meta')
1995 if not meta:
2028 if not meta:
1996 if props.get(b'local:commits'):
2029 if props.get(b'local:commits'):
1997 commit = sorted(props[b'local:commits'].values())[0]
2030 commit = sorted(props[b'local:commits'].values())[0]
1998 meta = {}
2031 meta = {}
1999 if b'author' in commit and b'authorEmail' in commit:
2032 if b'author' in commit and b'authorEmail' in commit:
2000 meta[b'user'] = b'%s <%s>' % (
2033 meta[b'user'] = b'%s <%s>' % (
2001 commit[b'author'],
2034 commit[b'author'],
2002 commit[b'authorEmail'],
2035 commit[b'authorEmail'],
2003 )
2036 )
2004 if b'time' in commit:
2037 if b'time' in commit:
2005 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2038 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2006 if b'branch' in commit:
2039 if b'branch' in commit:
2007 meta[b'branch'] = commit[b'branch']
2040 meta[b'branch'] = commit[b'branch']
2008 node = commit.get(b'commit', commit.get(b'rev'))
2041 node = commit.get(b'commit', commit.get(b'rev'))
2009 if node:
2042 if node:
2010 meta[b'node'] = node
2043 meta[b'node'] = node
2011 if len(commit.get(b'parents', ())) >= 1:
2044 if len(commit.get(b'parents', ())) >= 1:
2012 meta[b'parent'] = commit[b'parents'][0]
2045 meta[b'parent'] = commit[b'parents'][0]
2013 else:
2046 else:
2014 meta = {}
2047 meta = {}
2015 if b'date' not in meta and b'dateCreated' in diff:
2048 if b'date' not in meta and b'dateCreated' in diff:
2016 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2049 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2017 if b'branch' not in meta and diff.get(b'branch'):
2050 if b'branch' not in meta and diff.get(b'branch'):
2018 meta[b'branch'] = diff[b'branch']
2051 meta[b'branch'] = diff[b'branch']
2019 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2052 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2020 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2053 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2021 return meta
2054 return meta
2022
2055
2023
2056
2024 def _getdrevs(ui, stack, specs):
2057 def _getdrevs(ui, stack, specs):
2025 """convert user supplied DREVSPECs into "Differential Revision" dicts
2058 """convert user supplied DREVSPECs into "Differential Revision" dicts
2026
2059
2027 See ``hg help phabread`` for how to specify each DREVSPEC.
2060 See ``hg help phabread`` for how to specify each DREVSPEC.
2028 """
2061 """
2029 if len(specs) > 0:
2062 if len(specs) > 0:
2030
2063
2031 def _formatspec(s):
2064 def _formatspec(s):
2032 if stack:
2065 if stack:
2033 s = b':(%s)' % s
2066 s = b':(%s)' % s
2034 return b'(%s)' % s
2067 return b'(%s)' % s
2035
2068
2036 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2069 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2037
2070
2038 drevs = querydrev(ui, spec)
2071 drevs = querydrev(ui, spec)
2039 if drevs:
2072 if drevs:
2040 return drevs
2073 return drevs
2041
2074
2042 raise error.Abort(_(b"empty DREVSPEC set"))
2075 raise error.Abort(_(b"empty DREVSPEC set"))
2043
2076
2044
2077
2045 def readpatch(ui, drevs, write):
2078 def readpatch(ui, drevs, write):
2046 """generate plain-text patch readable by 'hg import'
2079 """generate plain-text patch readable by 'hg import'
2047
2080
2048 write takes a list of (DREV, bytes), where DREV is the differential number
2081 write takes a list of (DREV, bytes), where DREV is the differential number
2049 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2082 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2050 to be imported. drevs is what "querydrev" returns, results of
2083 to be imported. drevs is what "querydrev" returns, results of
2051 "differential.query".
2084 "differential.query".
2052 """
2085 """
2053 # Prefetch hg:meta property for all diffs
2086 # Prefetch hg:meta property for all diffs
2054 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2087 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2055 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2088 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2056
2089
2057 patches = []
2090 patches = []
2058
2091
2059 # Generate patch for each drev
2092 # Generate patch for each drev
2060 for drev in drevs:
2093 for drev in drevs:
2061 ui.note(_(b'reading D%s\n') % drev[b'id'])
2094 ui.note(_(b'reading D%s\n') % drev[b'id'])
2062
2095
2063 diffid = max(int(v) for v in drev[b'diffs'])
2096 diffid = max(int(v) for v in drev[b'diffs'])
2064 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2097 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2065 desc = getdescfromdrev(drev)
2098 desc = getdescfromdrev(drev)
2066 header = b'# HG changeset patch\n'
2099 header = b'# HG changeset patch\n'
2067
2100
2068 # Try to preserve metadata from hg:meta property. Write hg patch
2101 # Try to preserve metadata from hg:meta property. Write hg patch
2069 # headers that can be read by the "import" command. See patchheadermap
2102 # headers that can be read by the "import" command. See patchheadermap
2070 # and extract in mercurial/patch.py for supported headers.
2103 # and extract in mercurial/patch.py for supported headers.
2071 meta = getdiffmeta(diffs[b'%d' % diffid])
2104 meta = getdiffmeta(diffs[b'%d' % diffid])
2072 for k in _metanamemap.keys():
2105 for k in _metanamemap.keys():
2073 if k in meta:
2106 if k in meta:
2074 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2107 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2075
2108
2076 content = b'%s%s\n%s' % (header, desc, body)
2109 content = b'%s%s\n%s' % (header, desc, body)
2077 patches.append((drev[b'id'], content))
2110 patches.append((drev[b'id'], content))
2078
2111
2079 # Write patches to the supplied callback
2112 # Write patches to the supplied callback
2080 write(patches)
2113 write(patches)
2081
2114
2082
2115
2083 @vcrcommand(
2116 @vcrcommand(
2084 b'phabread',
2117 b'phabread',
2085 [(b'', b'stack', False, _(b'read dependencies'))],
2118 [(b'', b'stack', False, _(b'read dependencies'))],
2086 _(b'DREVSPEC... [OPTIONS]'),
2119 _(b'DREVSPEC... [OPTIONS]'),
2087 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2120 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2088 optionalrepo=True,
2121 optionalrepo=True,
2089 )
2122 )
2090 def phabread(ui, repo, *specs, **opts):
2123 def phabread(ui, repo, *specs, **opts):
2091 """print patches from Phabricator suitable for importing
2124 """print patches from Phabricator suitable for importing
2092
2125
2093 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2126 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2094 the number ``123``. It could also have common operators like ``+``, ``-``,
2127 the number ``123``. It could also have common operators like ``+``, ``-``,
2095 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2128 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2096 select a stack. If multiple DREVSPEC values are given, the result is the
2129 select a stack. If multiple DREVSPEC values are given, the result is the
2097 union of each individually evaluated value. No attempt is currently made
2130 union of each individually evaluated value. No attempt is currently made
2098 to reorder the values to run from parent to child.
2131 to reorder the values to run from parent to child.
2099
2132
2100 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2133 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2101 could be used to filter patches by status. For performance reason, they
2134 could be used to filter patches by status. For performance reason, they
2102 only represent a subset of non-status selections and cannot be used alone.
2135 only represent a subset of non-status selections and cannot be used alone.
2103
2136
2104 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2137 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2105 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2138 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2106 stack up to D9.
2139 stack up to D9.
2107
2140
2108 If --stack is given, follow dependencies information and read all patches.
2141 If --stack is given, follow dependencies information and read all patches.
2109 It is equivalent to the ``:`` operator.
2142 It is equivalent to the ``:`` operator.
2110 """
2143 """
2111 opts = pycompat.byteskwargs(opts)
2144 opts = pycompat.byteskwargs(opts)
2112 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2145 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2113
2146
2114 def _write(patches):
2147 def _write(patches):
2115 for drev, content in patches:
2148 for drev, content in patches:
2116 ui.write(content)
2149 ui.write(content)
2117
2150
2118 readpatch(ui, drevs, _write)
2151 readpatch(ui, drevs, _write)
2119
2152
2120
2153
2121 @vcrcommand(
2154 @vcrcommand(
2122 b'phabimport',
2155 b'phabimport',
2123 [(b'', b'stack', False, _(b'import dependencies as well'))],
2156 [(b'', b'stack', False, _(b'import dependencies as well'))],
2124 _(b'DREVSPEC... [OPTIONS]'),
2157 _(b'DREVSPEC... [OPTIONS]'),
2125 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2158 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2126 )
2159 )
2127 def phabimport(ui, repo, *specs, **opts):
2160 def phabimport(ui, repo, *specs, **opts):
2128 """import patches from Phabricator for the specified Differential Revisions
2161 """import patches from Phabricator for the specified Differential Revisions
2129
2162
2130 The patches are read and applied starting at the parent of the working
2163 The patches are read and applied starting at the parent of the working
2131 directory.
2164 directory.
2132
2165
2133 See ``hg help phabread`` for how to specify DREVSPEC.
2166 See ``hg help phabread`` for how to specify DREVSPEC.
2134 """
2167 """
2135 opts = pycompat.byteskwargs(opts)
2168 opts = pycompat.byteskwargs(opts)
2136
2169
2137 # --bypass avoids losing exec and symlink bits when importing on Windows,
2170 # --bypass avoids losing exec and symlink bits when importing on Windows,
2138 # and allows importing with a dirty wdir. It also aborts instead of leaving
2171 # and allows importing with a dirty wdir. It also aborts instead of leaving
2139 # rejects.
2172 # rejects.
2140 opts[b'bypass'] = True
2173 opts[b'bypass'] = True
2141
2174
2142 # Mandatory default values, synced with commands.import
2175 # Mandatory default values, synced with commands.import
2143 opts[b'strip'] = 1
2176 opts[b'strip'] = 1
2144 opts[b'prefix'] = b''
2177 opts[b'prefix'] = b''
2145 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2178 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2146 opts[b'obsolete'] = False
2179 opts[b'obsolete'] = False
2147
2180
2148 if ui.configbool(b'phabimport', b'secret'):
2181 if ui.configbool(b'phabimport', b'secret'):
2149 opts[b'secret'] = True
2182 opts[b'secret'] = True
2150 if ui.configbool(b'phabimport', b'obsolete'):
2183 if ui.configbool(b'phabimport', b'obsolete'):
2151 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2184 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2152
2185
2153 def _write(patches):
2186 def _write(patches):
2154 parents = repo[None].parents()
2187 parents = repo[None].parents()
2155
2188
2156 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2189 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2157 for drev, contents in patches:
2190 for drev, contents in patches:
2158 ui.status(_(b'applying patch from D%s\n') % drev)
2191 ui.status(_(b'applying patch from D%s\n') % drev)
2159
2192
2160 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2193 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2161 msg, node, rej = cmdutil.tryimportone(
2194 msg, node, rej = cmdutil.tryimportone(
2162 ui,
2195 ui,
2163 repo,
2196 repo,
2164 patchdata,
2197 patchdata,
2165 parents,
2198 parents,
2166 opts,
2199 opts,
2167 [],
2200 [],
2168 None, # Never update wdir to another revision
2201 None, # Never update wdir to another revision
2169 )
2202 )
2170
2203
2171 if not node:
2204 if not node:
2172 raise error.Abort(_(b'D%s: no diffs found') % drev)
2205 raise error.Abort(_(b'D%s: no diffs found') % drev)
2173
2206
2174 ui.note(msg + b'\n')
2207 ui.note(msg + b'\n')
2175 parents = [repo[node]]
2208 parents = [repo[node]]
2176
2209
2177 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2210 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2178
2211
2179 readpatch(repo.ui, drevs, _write)
2212 readpatch(repo.ui, drevs, _write)
2180
2213
2181
2214
2182 @vcrcommand(
2215 @vcrcommand(
2183 b'phabupdate',
2216 b'phabupdate',
2184 [
2217 [
2185 (b'', b'accept', False, _(b'accept revisions')),
2218 (b'', b'accept', False, _(b'accept revisions')),
2186 (b'', b'reject', False, _(b'reject revisions')),
2219 (b'', b'reject', False, _(b'reject revisions')),
2187 (b'', b'request-review', False, _(b'request review on revisions')),
2220 (b'', b'request-review', False, _(b'request review on revisions')),
2188 (b'', b'abandon', False, _(b'abandon revisions')),
2221 (b'', b'abandon', False, _(b'abandon revisions')),
2189 (b'', b'reclaim', False, _(b'reclaim revisions')),
2222 (b'', b'reclaim', False, _(b'reclaim revisions')),
2190 (b'', b'close', False, _(b'close revisions')),
2223 (b'', b'close', False, _(b'close revisions')),
2191 (b'', b'reopen', False, _(b'reopen revisions')),
2224 (b'', b'reopen', False, _(b'reopen revisions')),
2192 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2225 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2193 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2226 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2194 (b'', b'commandeer', False, _(b'commandeer revisions')),
2227 (b'', b'commandeer', False, _(b'commandeer revisions')),
2195 (b'm', b'comment', b'', _(b'comment on the last revision')),
2228 (b'm', b'comment', b'', _(b'comment on the last revision')),
2196 ],
2229 ],
2197 _(b'DREVSPEC... [OPTIONS]'),
2230 _(b'DREVSPEC... [OPTIONS]'),
2198 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2231 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2199 optionalrepo=True,
2232 optionalrepo=True,
2200 )
2233 )
2201 def phabupdate(ui, repo, *specs, **opts):
2234 def phabupdate(ui, repo, *specs, **opts):
2202 """update Differential Revision in batch
2235 """update Differential Revision in batch
2203
2236
2204 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2237 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2205 """
2238 """
2206 opts = pycompat.byteskwargs(opts)
2239 opts = pycompat.byteskwargs(opts)
2207 transactions = [
2240 transactions = [
2208 b'abandon',
2241 b'abandon',
2209 b'accept',
2242 b'accept',
2210 b'close',
2243 b'close',
2211 b'commandeer',
2244 b'commandeer',
2212 b'plan-changes',
2245 b'plan-changes',
2213 b'reclaim',
2246 b'reclaim',
2214 b'reject',
2247 b'reject',
2215 b'reopen',
2248 b'reopen',
2216 b'request-review',
2249 b'request-review',
2217 b'resign',
2250 b'resign',
2218 ]
2251 ]
2219 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2252 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2220 if len(flags) > 1:
2253 if len(flags) > 1:
2221 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2254 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2222
2255
2223 actions = []
2256 actions = []
2224 for f in flags:
2257 for f in flags:
2225 actions.append({b'type': f, b'value': True})
2258 actions.append({b'type': f, b'value': True})
2226
2259
2227 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2260 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2228 for i, drev in enumerate(drevs):
2261 for i, drev in enumerate(drevs):
2229 if i + 1 == len(drevs) and opts.get(b'comment'):
2262 if i + 1 == len(drevs) and opts.get(b'comment'):
2230 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2263 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2231 if actions:
2264 if actions:
2232 params = {
2265 params = {
2233 b'objectIdentifier': drev[b'phid'],
2266 b'objectIdentifier': drev[b'phid'],
2234 b'transactions': actions,
2267 b'transactions': actions,
2235 }
2268 }
2236 callconduit(ui, b'differential.revision.edit', params)
2269 callconduit(ui, b'differential.revision.edit', params)
2237
2270
2238
2271
2239 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2272 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2240 def template_review(context, mapping):
2273 def template_review(context, mapping):
2241 """:phabreview: Object describing the review for this changeset.
2274 """:phabreview: Object describing the review for this changeset.
2242 Has attributes `url` and `id`.
2275 Has attributes `url` and `id`.
2243 """
2276 """
2244 ctx = context.resource(mapping, b'ctx')
2277 ctx = context.resource(mapping, b'ctx')
2245 m = _differentialrevisiondescre.search(ctx.description())
2278 m = _differentialrevisiondescre.search(ctx.description())
2246 if m:
2279 if m:
2247 return templateutil.hybriddict(
2280 return templateutil.hybriddict(
2248 {
2281 {
2249 b'url': m.group('url'),
2282 b'url': m.group('url'),
2250 b'id': b"D%s" % m.group('id'),
2283 b'id': b"D%s" % m.group('id'),
2251 }
2284 }
2252 )
2285 )
2253 else:
2286 else:
2254 tags = ctx.repo().nodetags(ctx.node())
2287 tags = ctx.repo().nodetags(ctx.node())
2255 for t in tags:
2288 for t in tags:
2256 if _differentialrevisiontagre.match(t):
2289 if _differentialrevisiontagre.match(t):
2257 url = ctx.repo().ui.config(b'phabricator', b'url')
2290 url = ctx.repo().ui.config(b'phabricator', b'url')
2258 if not url.endswith(b'/'):
2291 if not url.endswith(b'/'):
2259 url += b'/'
2292 url += b'/'
2260 url += t
2293 url += t
2261
2294
2262 return templateutil.hybriddict(
2295 return templateutil.hybriddict(
2263 {
2296 {
2264 b'url': url,
2297 b'url': url,
2265 b'id': t,
2298 b'id': t,
2266 }
2299 }
2267 )
2300 )
2268 return None
2301 return None
2269
2302
2270
2303
2271 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2304 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2272 def template_status(context, mapping):
2305 def template_status(context, mapping):
2273 """:phabstatus: String. Status of Phabricator differential."""
2306 """:phabstatus: String. Status of Phabricator differential."""
2274 ctx = context.resource(mapping, b'ctx')
2307 ctx = context.resource(mapping, b'ctx')
2275 repo = context.resource(mapping, b'repo')
2308 repo = context.resource(mapping, b'repo')
2276 ui = context.resource(mapping, b'ui')
2309 ui = context.resource(mapping, b'ui')
2277
2310
2278 rev = ctx.rev()
2311 rev = ctx.rev()
2279 try:
2312 try:
2280 drevid = getdrevmap(repo, [rev])[rev]
2313 drevid = getdrevmap(repo, [rev])[rev]
2281 except KeyError:
2314 except KeyError:
2282 return None
2315 return None
2283 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2316 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2284 for drev in drevs:
2317 for drev in drevs:
2285 if int(drev[b'id']) == drevid:
2318 if int(drev[b'id']) == drevid:
2286 return templateutil.hybriddict(
2319 return templateutil.hybriddict(
2287 {
2320 {
2288 b'url': drev[b'uri'],
2321 b'url': drev[b'uri'],
2289 b'status': drev[b'statusName'],
2322 b'status': drev[b'statusName'],
2290 }
2323 }
2291 )
2324 )
2292 return None
2325 return None
2293
2326
2294
2327
2295 @show.showview(b'phabstatus', csettopic=b'work')
2328 @show.showview(b'phabstatus', csettopic=b'work')
2296 def phabstatusshowview(ui, repo, displayer):
2329 def phabstatusshowview(ui, repo, displayer):
2297 """Phabricator differiential status"""
2330 """Phabricator differiential status"""
2298 revs = repo.revs('sort(_underway(), topo)')
2331 revs = repo.revs('sort(_underway(), topo)')
2299 drevmap = getdrevmap(repo, revs)
2332 drevmap = getdrevmap(repo, revs)
2300 unknownrevs, drevids, revsbydrevid = [], set(), {}
2333 unknownrevs, drevids, revsbydrevid = [], set(), {}
2301 for rev, drevid in pycompat.iteritems(drevmap):
2334 for rev, drevid in pycompat.iteritems(drevmap):
2302 if drevid is not None:
2335 if drevid is not None:
2303 drevids.add(drevid)
2336 drevids.add(drevid)
2304 revsbydrevid.setdefault(drevid, set()).add(rev)
2337 revsbydrevid.setdefault(drevid, set()).add(rev)
2305 else:
2338 else:
2306 unknownrevs.append(rev)
2339 unknownrevs.append(rev)
2307
2340
2308 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2341 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2309 drevsbyrev = {}
2342 drevsbyrev = {}
2310 for drev in drevs:
2343 for drev in drevs:
2311 for rev in revsbydrevid[int(drev[b'id'])]:
2344 for rev in revsbydrevid[int(drev[b'id'])]:
2312 drevsbyrev[rev] = drev
2345 drevsbyrev[rev] = drev
2313
2346
2314 def phabstatus(ctx):
2347 def phabstatus(ctx):
2315 drev = drevsbyrev[ctx.rev()]
2348 drev = drevsbyrev[ctx.rev()]
2316 status = ui.label(
2349 status = ui.label(
2317 b'%(statusName)s' % drev,
2350 b'%(statusName)s' % drev,
2318 b'phabricator.status.%s' % _getstatusname(drev),
2351 b'phabricator.status.%s' % _getstatusname(drev),
2319 )
2352 )
2320 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2353 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2321
2354
2322 revs -= smartset.baseset(unknownrevs)
2355 revs -= smartset.baseset(unknownrevs)
2323 revdag = graphmod.dagwalker(repo, revs)
2356 revdag = graphmod.dagwalker(repo, revs)
2324
2357
2325 ui.setconfig(b'experimental', b'graphshorten', True)
2358 ui.setconfig(b'experimental', b'graphshorten', True)
2326 displayer._exthook = phabstatus
2359 displayer._exthook = phabstatus
2327 nodelen = show.longestshortest(repo, revs)
2360 nodelen = show.longestshortest(repo, revs)
2328 logcmdutil.displaygraph(
2361 logcmdutil.displaygraph(
2329 ui,
2362 ui,
2330 repo,
2363 repo,
2331 revdag,
2364 revdag,
2332 displayer,
2365 displayer,
2333 graphmod.asciiedges,
2366 graphmod.asciiedges,
2334 props={b'nodelen': nodelen},
2367 props={b'nodelen': nodelen},
2335 )
2368 )
General Comments 0
You need to be logged in to leave comments. Login now