##// END OF EJS Templates
typing: disable a bogus attribute-error warning in phabricator...
Matt Harbison -
r50758:92743e6d default
parent child Browse files
Show More
@@ -1,2398 +1,2401 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 # retry failed command N time (default 0). Useful when using the extension
38 # retry failed command N time (default 0). Useful when using the extension
39 # over flakly connection.
39 # over flakly connection.
40 #
40 #
41 # We wait `retry.interval` between each retry, in seconds.
41 # We wait `retry.interval` between each retry, in seconds.
42 # (default 1 second).
42 # (default 1 second).
43 retry = 3
43 retry = 3
44 retry.interval = 10
44 retry.interval = 10
45
45
46 # the retry option can combine well with the http.timeout one.
46 # the retry option can combine well with the http.timeout one.
47 #
47 #
48 # For example to give up on http request after 20 seconds:
48 # For example to give up on http request after 20 seconds:
49 [http]
49 [http]
50 timeout=20
50 timeout=20
51
51
52 [auth]
52 [auth]
53 example.schemes = https
53 example.schemes = https
54 example.prefix = phab.example.com
54 example.prefix = phab.example.com
55
55
56 # API token. Get it from https://$HOST/conduit/login/
56 # API token. Get it from https://$HOST/conduit/login/
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
58 """
58 """
59
59
60
60
61 import base64
61 import base64
62 import contextlib
62 import contextlib
63 import hashlib
63 import hashlib
64 import io
64 import io
65 import itertools
65 import itertools
66 import json
66 import json
67 import mimetypes
67 import mimetypes
68 import operator
68 import operator
69 import re
69 import re
70 import time
70 import time
71
71
72 from mercurial.node import bin, short
72 from mercurial.node import bin, short
73 from mercurial.i18n import _
73 from mercurial.i18n import _
74 from mercurial.pycompat import getattr
74 from mercurial.pycompat import getattr
75 from mercurial.thirdparty import attr
75 from mercurial.thirdparty import attr
76 from mercurial import (
76 from mercurial import (
77 cmdutil,
77 cmdutil,
78 context,
78 context,
79 copies,
79 copies,
80 encoding,
80 encoding,
81 error,
81 error,
82 exthelper,
82 exthelper,
83 graphmod,
83 graphmod,
84 httpconnection as httpconnectionmod,
84 httpconnection as httpconnectionmod,
85 localrepo,
85 localrepo,
86 logcmdutil,
86 logcmdutil,
87 match,
87 match,
88 mdiff,
88 mdiff,
89 obsutil,
89 obsutil,
90 parser,
90 parser,
91 patch,
91 patch,
92 phases,
92 phases,
93 pycompat,
93 pycompat,
94 rewriteutil,
94 rewriteutil,
95 scmutil,
95 scmutil,
96 smartset,
96 smartset,
97 tags,
97 tags,
98 templatefilters,
98 templatefilters,
99 templateutil,
99 templateutil,
100 url as urlmod,
100 url as urlmod,
101 util,
101 util,
102 )
102 )
103 from mercurial.utils import (
103 from mercurial.utils import (
104 procutil,
104 procutil,
105 stringutil,
105 stringutil,
106 urlutil,
106 urlutil,
107 )
107 )
108 from . import show
108 from . import show
109
109
110
110
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 # be specifying the version(s) of Mercurial they are tested with, or
113 # be specifying the version(s) of Mercurial they are tested with, or
114 # leave the attribute unspecified.
114 # leave the attribute unspecified.
115 testedwith = b'ships-with-hg-core'
115 testedwith = b'ships-with-hg-core'
116
116
117 eh = exthelper.exthelper()
117 eh = exthelper.exthelper()
118
118
119 cmdtable = eh.cmdtable
119 cmdtable = eh.cmdtable
120 command = eh.command
120 command = eh.command
121 configtable = eh.configtable
121 configtable = eh.configtable
122 templatekeyword = eh.templatekeyword
122 templatekeyword = eh.templatekeyword
123 uisetup = eh.finaluisetup
123 uisetup = eh.finaluisetup
124
124
125 # developer config: phabricator.batchsize
125 # developer config: phabricator.batchsize
126 eh.configitem(
126 eh.configitem(
127 b'phabricator',
127 b'phabricator',
128 b'batchsize',
128 b'batchsize',
129 default=12,
129 default=12,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabricator',
132 b'phabricator',
133 b'callsign',
133 b'callsign',
134 default=None,
134 default=None,
135 )
135 )
136 eh.configitem(
136 eh.configitem(
137 b'phabricator',
137 b'phabricator',
138 b'curlcmd',
138 b'curlcmd',
139 default=None,
139 default=None,
140 )
140 )
141 # developer config: phabricator.debug
141 # developer config: phabricator.debug
142 eh.configitem(
142 eh.configitem(
143 b'phabricator',
143 b'phabricator',
144 b'debug',
144 b'debug',
145 default=False,
145 default=False,
146 )
146 )
147 # developer config: phabricator.repophid
147 # developer config: phabricator.repophid
148 eh.configitem(
148 eh.configitem(
149 b'phabricator',
149 b'phabricator',
150 b'repophid',
150 b'repophid',
151 default=None,
151 default=None,
152 )
152 )
153 eh.configitem(
153 eh.configitem(
154 b'phabricator',
154 b'phabricator',
155 b'retry',
155 b'retry',
156 default=0,
156 default=0,
157 )
157 )
158 eh.configitem(
158 eh.configitem(
159 b'phabricator',
159 b'phabricator',
160 b'retry.interval',
160 b'retry.interval',
161 default=1,
161 default=1,
162 )
162 )
163 eh.configitem(
163 eh.configitem(
164 b'phabricator',
164 b'phabricator',
165 b'url',
165 b'url',
166 default=None,
166 default=None,
167 )
167 )
168 eh.configitem(
168 eh.configitem(
169 b'phabsend',
169 b'phabsend',
170 b'confirm',
170 b'confirm',
171 default=False,
171 default=False,
172 )
172 )
173 eh.configitem(
173 eh.configitem(
174 b'phabimport',
174 b'phabimport',
175 b'secret',
175 b'secret',
176 default=False,
176 default=False,
177 )
177 )
178 eh.configitem(
178 eh.configitem(
179 b'phabimport',
179 b'phabimport',
180 b'obsolete',
180 b'obsolete',
181 default=False,
181 default=False,
182 )
182 )
183
183
184 colortable = {
184 colortable = {
185 b'phabricator.action.created': b'green',
185 b'phabricator.action.created': b'green',
186 b'phabricator.action.skipped': b'magenta',
186 b'phabricator.action.skipped': b'magenta',
187 b'phabricator.action.updated': b'magenta',
187 b'phabricator.action.updated': b'magenta',
188 b'phabricator.drev': b'bold',
188 b'phabricator.drev': b'bold',
189 b'phabricator.status.abandoned': b'magenta dim',
189 b'phabricator.status.abandoned': b'magenta dim',
190 b'phabricator.status.accepted': b'green bold',
190 b'phabricator.status.accepted': b'green bold',
191 b'phabricator.status.closed': b'green',
191 b'phabricator.status.closed': b'green',
192 b'phabricator.status.needsreview': b'yellow',
192 b'phabricator.status.needsreview': b'yellow',
193 b'phabricator.status.needsrevision': b'red',
193 b'phabricator.status.needsrevision': b'red',
194 b'phabricator.status.changesplanned': b'red',
194 b'phabricator.status.changesplanned': b'red',
195 }
195 }
196
196
197 _VCR_FLAGS = [
197 _VCR_FLAGS = [
198 (
198 (
199 b'',
199 b'',
200 b'test-vcr',
200 b'test-vcr',
201 b'',
201 b'',
202 _(
202 _(
203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
204 b', otherwise will mock all http requests using the specified vcr file.'
204 b', otherwise will mock all http requests using the specified vcr file.'
205 b' (ADVANCED)'
205 b' (ADVANCED)'
206 ),
206 ),
207 ),
207 ),
208 ]
208 ]
209
209
210
210
211 @eh.wrapfunction(localrepo, "loadhgrc")
211 @eh.wrapfunction(localrepo, "loadhgrc")
212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
213 """Load ``.arcconfig`` content into a ui instance on repository open."""
213 """Load ``.arcconfig`` content into a ui instance on repository open."""
214 result = False
214 result = False
215 arcconfig = {}
215 arcconfig = {}
216
216
217 try:
217 try:
218 # json.loads only accepts bytes from 3.6+
218 # json.loads only accepts bytes from 3.6+
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
220 # json.loads only returns unicode strings
220 # json.loads only returns unicode strings
221 arcconfig = pycompat.rapply(
221 arcconfig = pycompat.rapply(
222 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
222 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
223 pycompat.json_loads(rawparams),
223 pycompat.json_loads(rawparams),
224 )
224 )
225
225
226 result = True
226 result = True
227 except ValueError:
227 except ValueError:
228 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
228 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
229 except IOError:
229 except IOError:
230 pass
230 pass
231
231
232 cfg = util.sortdict()
232 cfg = util.sortdict()
233
233
234 if b"repository.callsign" in arcconfig:
234 if b"repository.callsign" in arcconfig:
235 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
235 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
236
236
237 if b"phabricator.uri" in arcconfig:
237 if b"phabricator.uri" in arcconfig:
238 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
238 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
239
239
240 if cfg:
240 if cfg:
241 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
241 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
242
242
243 return (
243 return (
244 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
244 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
245 ) # Load .hg/hgrc
245 ) # Load .hg/hgrc
246
246
247
247
248 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
248 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
249 fullflags = flags + _VCR_FLAGS
249 fullflags = flags + _VCR_FLAGS
250
250
251 def hgmatcher(r1, r2):
251 def hgmatcher(r1, r2):
252 if r1.uri != r2.uri or r1.method != r2.method:
252 if r1.uri != r2.uri or r1.method != r2.method:
253 return False
253 return False
254 r1params = util.urlreq.parseqs(r1.body)
254 r1params = util.urlreq.parseqs(r1.body)
255 r2params = util.urlreq.parseqs(r2.body)
255 r2params = util.urlreq.parseqs(r2.body)
256 for key in r1params:
256 for key in r1params:
257 if key not in r2params:
257 if key not in r2params:
258 return False
258 return False
259 value = r1params[key][0]
259 value = r1params[key][0]
260 # we want to compare json payloads without worrying about ordering
260 # we want to compare json payloads without worrying about ordering
261 if value.startswith(b'{') and value.endswith(b'}'):
261 if value.startswith(b'{') and value.endswith(b'}'):
262 r1json = pycompat.json_loads(value)
262 r1json = pycompat.json_loads(value)
263 r2json = pycompat.json_loads(r2params[key][0])
263 r2json = pycompat.json_loads(r2params[key][0])
264 if r1json != r2json:
264 if r1json != r2json:
265 return False
265 return False
266 elif r2params[key][0] != value:
266 elif r2params[key][0] != value:
267 return False
267 return False
268 return True
268 return True
269
269
270 def sanitiserequest(request):
270 def sanitiserequest(request):
271 request.body = re.sub(
271 request.body = re.sub(
272 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
272 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
273 )
273 )
274 return request
274 return request
275
275
276 def sanitiseresponse(response):
276 def sanitiseresponse(response):
277 if 'set-cookie' in response['headers']:
277 if 'set-cookie' in response['headers']:
278 del response['headers']['set-cookie']
278 del response['headers']['set-cookie']
279 return response
279 return response
280
280
281 def decorate(fn):
281 def decorate(fn):
282 def inner(*args, **kwargs):
282 def inner(*args, **kwargs):
283 vcr = kwargs.pop('test_vcr')
283 vcr = kwargs.pop('test_vcr')
284 if vcr:
284 if vcr:
285 cassette = pycompat.fsdecode(vcr)
285 cassette = pycompat.fsdecode(vcr)
286 import hgdemandimport
286 import hgdemandimport
287
287
288 with hgdemandimport.deactivated():
288 with hgdemandimport.deactivated():
289 # pytype: disable=import-error
289 # pytype: disable=import-error
290 import vcr as vcrmod
290 import vcr as vcrmod
291 import vcr.stubs as stubs
291 import vcr.stubs as stubs
292
292
293 # pytype: enable=import-error
293 # pytype: enable=import-error
294
294
295 vcr = vcrmod.VCR(
295 vcr = vcrmod.VCR(
296 serializer='json',
296 serializer='json',
297 before_record_request=sanitiserequest,
297 before_record_request=sanitiserequest,
298 before_record_response=sanitiseresponse,
298 before_record_response=sanitiseresponse,
299 custom_patches=[
299 custom_patches=[
300 (
300 (
301 urlmod,
301 urlmod,
302 'httpconnection',
302 'httpconnection',
303 stubs.VCRHTTPConnection,
303 stubs.VCRHTTPConnection,
304 ),
304 ),
305 (
305 (
306 urlmod,
306 urlmod,
307 'httpsconnection',
307 'httpsconnection',
308 stubs.VCRHTTPSConnection,
308 stubs.VCRHTTPSConnection,
309 ),
309 ),
310 ],
310 ],
311 )
311 )
312 vcr.register_matcher('hgmatcher', hgmatcher)
312 vcr.register_matcher('hgmatcher', hgmatcher)
313 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
313 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
314 return fn(*args, **kwargs)
314 return fn(*args, **kwargs)
315 return fn(*args, **kwargs)
315 return fn(*args, **kwargs)
316
316
317 cmd = util.checksignature(inner, depth=2)
317 cmd = util.checksignature(inner, depth=2)
318 cmd.__name__ = fn.__name__
318 cmd.__name__ = fn.__name__
319 cmd.__doc__ = fn.__doc__
319 cmd.__doc__ = fn.__doc__
320
320
321 return command(
321 return command(
322 name,
322 name,
323 fullflags,
323 fullflags,
324 spec,
324 spec,
325 helpcategory=helpcategory,
325 helpcategory=helpcategory,
326 optionalrepo=optionalrepo,
326 optionalrepo=optionalrepo,
327 )(cmd)
327 )(cmd)
328
328
329 return decorate
329 return decorate
330
330
331
331
332 def _debug(ui, *msg, **opts):
332 def _debug(ui, *msg, **opts):
333 """write debug output for Phabricator if ``phabricator.debug`` is set
333 """write debug output for Phabricator if ``phabricator.debug`` is set
334
334
335 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
335 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
336 printed with the --debug argument.
336 printed with the --debug argument.
337 """
337 """
338 if ui.configbool(b"phabricator", b"debug"):
338 if ui.configbool(b"phabricator", b"debug"):
339 flag = ui.debugflag
339 flag = ui.debugflag
340 try:
340 try:
341 ui.debugflag = True
341 ui.debugflag = True
342 ui.write(*msg, **opts)
342 ui.write(*msg, **opts)
343 finally:
343 finally:
344 ui.debugflag = flag
344 ui.debugflag = flag
345
345
346
346
347 def urlencodenested(params):
347 def urlencodenested(params):
348 """like urlencode, but works with nested parameters.
348 """like urlencode, but works with nested parameters.
349
349
350 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
350 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
351 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
351 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
352 urlencode. Note: the encoding is consistent with PHP's http_build_query.
352 urlencode. Note: the encoding is consistent with PHP's http_build_query.
353 """
353 """
354 flatparams = util.sortdict()
354 flatparams = util.sortdict()
355
355
356 def process(prefix, obj):
356 def process(prefix: bytes, obj):
357 if isinstance(obj, bool):
357 if isinstance(obj, bool):
358 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
358 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
359 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
359 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
360 # .items() will only be called for a dict type
361 # pytype: disable=attribute-error
360 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
362 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
363 # pytype: enable=attribute-error
361 if items is None:
364 if items is None:
362 flatparams[prefix] = obj
365 flatparams[prefix] = obj
363 else:
366 else:
364 for k, v in items(obj):
367 for k, v in items(obj):
365 if prefix:
368 if prefix:
366 process(b'%s[%s]' % (prefix, k), v)
369 process(b'%s[%s]' % (prefix, k), v)
367 else:
370 else:
368 process(k, v)
371 process(k, v)
369
372
370 process(b'', params)
373 process(b'', params)
371 return urlutil.urlreq.urlencode(flatparams)
374 return urlutil.urlreq.urlencode(flatparams)
372
375
373
376
374 def readurltoken(ui):
377 def readurltoken(ui):
375 """return conduit url, token and make sure they exist
378 """return conduit url, token and make sure they exist
376
379
377 Currently read from [auth] config section. In the future, it might
380 Currently read from [auth] config section. In the future, it might
378 make sense to read from .arcconfig and .arcrc as well.
381 make sense to read from .arcconfig and .arcrc as well.
379 """
382 """
380 url = ui.config(b'phabricator', b'url')
383 url = ui.config(b'phabricator', b'url')
381 if not url:
384 if not url:
382 raise error.Abort(
385 raise error.Abort(
383 _(b'config %s.%s is required') % (b'phabricator', b'url')
386 _(b'config %s.%s is required') % (b'phabricator', b'url')
384 )
387 )
385
388
386 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
389 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
387 token = None
390 token = None
388
391
389 if res:
392 if res:
390 group, auth = res
393 group, auth = res
391
394
392 ui.debug(b"using auth.%s.* for authentication\n" % group)
395 ui.debug(b"using auth.%s.* for authentication\n" % group)
393
396
394 token = auth.get(b'phabtoken')
397 token = auth.get(b'phabtoken')
395
398
396 if not token:
399 if not token:
397 raise error.Abort(
400 raise error.Abort(
398 _(b'Can\'t find conduit token associated to %s') % (url,)
401 _(b'Can\'t find conduit token associated to %s') % (url,)
399 )
402 )
400
403
401 return url, token
404 return url, token
402
405
403
406
404 def callconduit(ui, name, params):
407 def callconduit(ui, name, params):
405 """call Conduit API, params is a dict. return json.loads result, or None"""
408 """call Conduit API, params is a dict. return json.loads result, or None"""
406 host, token = readurltoken(ui)
409 host, token = readurltoken(ui)
407 url, authinfo = urlutil.url(b'/'.join([host, b'api', name])).authinfo()
410 url, authinfo = urlutil.url(b'/'.join([host, b'api', name])).authinfo()
408 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
411 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
409 params = params.copy()
412 params = params.copy()
410 params[b'__conduit__'] = {
413 params[b'__conduit__'] = {
411 b'token': token,
414 b'token': token,
412 }
415 }
413 rawdata = {
416 rawdata = {
414 b'params': templatefilters.json(params),
417 b'params': templatefilters.json(params),
415 b'output': b'json',
418 b'output': b'json',
416 b'__conduit__': 1,
419 b'__conduit__': 1,
417 }
420 }
418 data = urlencodenested(rawdata)
421 data = urlencodenested(rawdata)
419 curlcmd = ui.config(b'phabricator', b'curlcmd')
422 curlcmd = ui.config(b'phabricator', b'curlcmd')
420 if curlcmd:
423 if curlcmd:
421 sin, sout = procutil.popen2(
424 sin, sout = procutil.popen2(
422 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
425 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
423 )
426 )
424 sin.write(data)
427 sin.write(data)
425 sin.close()
428 sin.close()
426 body = sout.read()
429 body = sout.read()
427 else:
430 else:
428 urlopener = urlmod.opener(ui, authinfo)
431 urlopener = urlmod.opener(ui, authinfo)
429 request = util.urlreq.request(pycompat.strurl(url), data=data)
432 request = util.urlreq.request(pycompat.strurl(url), data=data)
430 max_try = ui.configint(b'phabricator', b'retry') + 1
433 max_try = ui.configint(b'phabricator', b'retry') + 1
431 timeout = ui.configwith(float, b'http', b'timeout')
434 timeout = ui.configwith(float, b'http', b'timeout')
432 for try_count in range(max_try):
435 for try_count in range(max_try):
433 try:
436 try:
434 with contextlib.closing(
437 with contextlib.closing(
435 urlopener.open(request, timeout=timeout)
438 urlopener.open(request, timeout=timeout)
436 ) as rsp:
439 ) as rsp:
437 body = rsp.read()
440 body = rsp.read()
438 break
441 break
439 except util.urlerr.urlerror as err:
442 except util.urlerr.urlerror as err:
440 if try_count == max_try - 1:
443 if try_count == max_try - 1:
441 raise
444 raise
442 ui.debug(
445 ui.debug(
443 b'Conduit Request failed (try %d/%d): %r\n'
446 b'Conduit Request failed (try %d/%d): %r\n'
444 % (try_count + 1, max_try, err)
447 % (try_count + 1, max_try, err)
445 )
448 )
446 # failing request might come from overloaded server
449 # failing request might come from overloaded server
447 retry_interval = ui.configint(b'phabricator', b'retry.interval')
450 retry_interval = ui.configint(b'phabricator', b'retry.interval')
448 time.sleep(retry_interval)
451 time.sleep(retry_interval)
449 ui.debug(b'Conduit Response: %s\n' % body)
452 ui.debug(b'Conduit Response: %s\n' % body)
450 parsed = pycompat.rapply(
453 parsed = pycompat.rapply(
451 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
454 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
452 # json.loads only accepts bytes from py3.6+
455 # json.loads only accepts bytes from py3.6+
453 pycompat.json_loads(encoding.unifromlocal(body)),
456 pycompat.json_loads(encoding.unifromlocal(body)),
454 )
457 )
455 if parsed.get(b'error_code'):
458 if parsed.get(b'error_code'):
456 msg = _(b'Conduit Error (%s): %s') % (
459 msg = _(b'Conduit Error (%s): %s') % (
457 parsed[b'error_code'],
460 parsed[b'error_code'],
458 parsed[b'error_info'],
461 parsed[b'error_info'],
459 )
462 )
460 raise error.Abort(msg)
463 raise error.Abort(msg)
461 return parsed[b'result']
464 return parsed[b'result']
462
465
463
466
464 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
467 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
465 def debugcallconduit(ui, repo, name):
468 def debugcallconduit(ui, repo, name):
466 """call Conduit API
469 """call Conduit API
467
470
468 Call parameters are read from stdin as a JSON blob. Result will be written
471 Call parameters are read from stdin as a JSON blob. Result will be written
469 to stdout as a JSON blob.
472 to stdout as a JSON blob.
470 """
473 """
471 # json.loads only accepts bytes from 3.6+
474 # json.loads only accepts bytes from 3.6+
472 rawparams = encoding.unifromlocal(ui.fin.read())
475 rawparams = encoding.unifromlocal(ui.fin.read())
473 # json.loads only returns unicode strings
476 # json.loads only returns unicode strings
474 params = pycompat.rapply(
477 params = pycompat.rapply(
475 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
478 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
476 pycompat.json_loads(rawparams),
479 pycompat.json_loads(rawparams),
477 )
480 )
478 # json.dumps only accepts unicode strings
481 # json.dumps only accepts unicode strings
479 result = pycompat.rapply(
482 result = pycompat.rapply(
480 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
483 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
481 callconduit(ui, name, params),
484 callconduit(ui, name, params),
482 )
485 )
483 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
486 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
484 ui.write(b'%s\n' % encoding.unitolocal(s))
487 ui.write(b'%s\n' % encoding.unitolocal(s))
485
488
486
489
487 def getrepophid(repo):
490 def getrepophid(repo):
488 """given callsign, return repository PHID or None"""
491 """given callsign, return repository PHID or None"""
489 # developer config: phabricator.repophid
492 # developer config: phabricator.repophid
490 repophid = repo.ui.config(b'phabricator', b'repophid')
493 repophid = repo.ui.config(b'phabricator', b'repophid')
491 if repophid:
494 if repophid:
492 return repophid
495 return repophid
493 callsign = repo.ui.config(b'phabricator', b'callsign')
496 callsign = repo.ui.config(b'phabricator', b'callsign')
494 if not callsign:
497 if not callsign:
495 return None
498 return None
496 query = callconduit(
499 query = callconduit(
497 repo.ui,
500 repo.ui,
498 b'diffusion.repository.search',
501 b'diffusion.repository.search',
499 {b'constraints': {b'callsigns': [callsign]}},
502 {b'constraints': {b'callsigns': [callsign]}},
500 )
503 )
501 if len(query[b'data']) == 0:
504 if len(query[b'data']) == 0:
502 return None
505 return None
503 repophid = query[b'data'][0][b'phid']
506 repophid = query[b'data'][0][b'phid']
504 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
507 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
505 return repophid
508 return repophid
506
509
507
510
508 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
511 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
509 _differentialrevisiondescre = re.compile(
512 _differentialrevisiondescre = re.compile(
510 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
513 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
511 )
514 )
512
515
513
516
514 def getoldnodedrevmap(repo, nodelist):
517 def getoldnodedrevmap(repo, nodelist):
515 """find previous nodes that has been sent to Phabricator
518 """find previous nodes that has been sent to Phabricator
516
519
517 return {node: (oldnode, Differential diff, Differential Revision ID)}
520 return {node: (oldnode, Differential diff, Differential Revision ID)}
518 for node in nodelist with known previous sent versions, or associated
521 for node in nodelist with known previous sent versions, or associated
519 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
522 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
520 be ``None``.
523 be ``None``.
521
524
522 Examines commit messages like "Differential Revision:" to get the
525 Examines commit messages like "Differential Revision:" to get the
523 association information.
526 association information.
524
527
525 If such commit message line is not found, examines all precursors and their
528 If such commit message line is not found, examines all precursors and their
526 tags. Tags with format like "D1234" are considered a match and the node
529 tags. Tags with format like "D1234" are considered a match and the node
527 with that tag, and the number after "D" (ex. 1234) will be returned.
530 with that tag, and the number after "D" (ex. 1234) will be returned.
528
531
529 The ``old node``, if not None, is guaranteed to be the last diff of
532 The ``old node``, if not None, is guaranteed to be the last diff of
530 corresponding Differential Revision, and exist in the repo.
533 corresponding Differential Revision, and exist in the repo.
531 """
534 """
532 unfi = repo.unfiltered()
535 unfi = repo.unfiltered()
533 has_node = unfi.changelog.index.has_node
536 has_node = unfi.changelog.index.has_node
534
537
535 result = {} # {node: (oldnode?, lastdiff?, drev)}
538 result = {} # {node: (oldnode?, lastdiff?, drev)}
536 # ordered for test stability when printing new -> old mapping below
539 # ordered for test stability when printing new -> old mapping below
537 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
540 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
538 for node in nodelist:
541 for node in nodelist:
539 ctx = unfi[node]
542 ctx = unfi[node]
540 # For tags like "D123", put them into "toconfirm" to verify later
543 # For tags like "D123", put them into "toconfirm" to verify later
541 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
544 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
542 for n in precnodes:
545 for n in precnodes:
543 if has_node(n):
546 if has_node(n):
544 for tag in unfi.nodetags(n):
547 for tag in unfi.nodetags(n):
545 m = _differentialrevisiontagre.match(tag)
548 m = _differentialrevisiontagre.match(tag)
546 if m:
549 if m:
547 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
550 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
548 break
551 break
549 else:
552 else:
550 continue # move to next predecessor
553 continue # move to next predecessor
551 break # found a tag, stop
554 break # found a tag, stop
552 else:
555 else:
553 # Check commit message
556 # Check commit message
554 m = _differentialrevisiondescre.search(ctx.description())
557 m = _differentialrevisiondescre.search(ctx.description())
555 if m:
558 if m:
556 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
559 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
557
560
558 # Double check if tags are genuine by collecting all old nodes from
561 # Double check if tags are genuine by collecting all old nodes from
559 # Phabricator, and expect precursors overlap with it.
562 # Phabricator, and expect precursors overlap with it.
560 if toconfirm:
563 if toconfirm:
561 drevs = [drev for force, precs, drev in toconfirm.values()]
564 drevs = [drev for force, precs, drev in toconfirm.values()]
562 alldiffs = callconduit(
565 alldiffs = callconduit(
563 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
566 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
564 )
567 )
565
568
566 def getnodes(d, precset):
569 def getnodes(d, precset):
567 # Ignore other nodes that were combined into the Differential
570 # Ignore other nodes that were combined into the Differential
568 # that aren't predecessors of the current local node.
571 # that aren't predecessors of the current local node.
569 return [n for n in getlocalcommits(d) if n in precset]
572 return [n for n in getlocalcommits(d) if n in precset]
570
573
571 for newnode, (force, precset, drev) in toconfirm.items():
574 for newnode, (force, precset, drev) in toconfirm.items():
572 diffs = [
575 diffs = [
573 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
576 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
574 ]
577 ]
575
578
576 # local predecessors known by Phabricator
579 # local predecessors known by Phabricator
577 phprecset = {n for d in diffs for n in getnodes(d, precset)}
580 phprecset = {n for d in diffs for n in getnodes(d, precset)}
578
581
579 # Ignore if precursors (Phabricator and local repo) do not overlap,
582 # Ignore if precursors (Phabricator and local repo) do not overlap,
580 # and force is not set (when commit message says nothing)
583 # and force is not set (when commit message says nothing)
581 if not force and not phprecset:
584 if not force and not phprecset:
582 tagname = b'D%d' % drev
585 tagname = b'D%d' % drev
583 tags.tag(
586 tags.tag(
584 repo,
587 repo,
585 tagname,
588 tagname,
586 repo.nullid,
589 repo.nullid,
587 message=None,
590 message=None,
588 user=None,
591 user=None,
589 date=None,
592 date=None,
590 local=True,
593 local=True,
591 )
594 )
592 unfi.ui.warn(
595 unfi.ui.warn(
593 _(
596 _(
594 b'D%d: local tag removed - does not match '
597 b'D%d: local tag removed - does not match '
595 b'Differential history\n'
598 b'Differential history\n'
596 )
599 )
597 % drev
600 % drev
598 )
601 )
599 continue
602 continue
600
603
601 # Find the last node using Phabricator metadata, and make sure it
604 # Find the last node using Phabricator metadata, and make sure it
602 # exists in the repo
605 # exists in the repo
603 oldnode = lastdiff = None
606 oldnode = lastdiff = None
604 if diffs:
607 if diffs:
605 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
608 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
606 oldnodes = getnodes(lastdiff, precset)
609 oldnodes = getnodes(lastdiff, precset)
607
610
608 _debug(
611 _debug(
609 unfi.ui,
612 unfi.ui,
610 b"%s mapped to old nodes %s\n"
613 b"%s mapped to old nodes %s\n"
611 % (
614 % (
612 short(newnode),
615 short(newnode),
613 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
616 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
614 ),
617 ),
615 )
618 )
616
619
617 # If this commit was the result of `hg fold` after submission,
620 # If this commit was the result of `hg fold` after submission,
618 # and now resubmitted with --fold, the easiest thing to do is
621 # and now resubmitted with --fold, the easiest thing to do is
619 # to leave the node clear. This only results in creating a new
622 # to leave the node clear. This only results in creating a new
620 # diff for the _same_ Differential Revision if this commit is
623 # diff for the _same_ Differential Revision if this commit is
621 # the first or last in the selected range. If we picked a node
624 # the first or last in the selected range. If we picked a node
622 # from the list instead, it would have to be the lowest if at
625 # from the list instead, it would have to be the lowest if at
623 # the beginning of the --fold range, or the highest at the end.
626 # the beginning of the --fold range, or the highest at the end.
624 # Otherwise, one or more of the nodes wouldn't be considered in
627 # Otherwise, one or more of the nodes wouldn't be considered in
625 # the diff, and the Differential wouldn't be properly updated.
628 # the diff, and the Differential wouldn't be properly updated.
626 # If this commit is the result of `hg split` in the same
629 # If this commit is the result of `hg split` in the same
627 # scenario, there is a single oldnode here (and multiple
630 # scenario, there is a single oldnode here (and multiple
628 # newnodes mapped to it). That makes it the same as the normal
631 # newnodes mapped to it). That makes it the same as the normal
629 # case, as the edges of the newnode range cleanly maps to one
632 # case, as the edges of the newnode range cleanly maps to one
630 # oldnode each.
633 # oldnode each.
631 if len(oldnodes) == 1:
634 if len(oldnodes) == 1:
632 oldnode = oldnodes[0]
635 oldnode = oldnodes[0]
633 if oldnode and not has_node(oldnode):
636 if oldnode and not has_node(oldnode):
634 oldnode = None
637 oldnode = None
635
638
636 result[newnode] = (oldnode, lastdiff, drev)
639 result[newnode] = (oldnode, lastdiff, drev)
637
640
638 return result
641 return result
639
642
640
643
641 def getdrevmap(repo, revs):
644 def getdrevmap(repo, revs):
642 """Return a dict mapping each rev in `revs` to their Differential Revision
645 """Return a dict mapping each rev in `revs` to their Differential Revision
643 ID or None.
646 ID or None.
644 """
647 """
645 result = {}
648 result = {}
646 for rev in revs:
649 for rev in revs:
647 result[rev] = None
650 result[rev] = None
648 ctx = repo[rev]
651 ctx = repo[rev]
649 # Check commit message
652 # Check commit message
650 m = _differentialrevisiondescre.search(ctx.description())
653 m = _differentialrevisiondescre.search(ctx.description())
651 if m:
654 if m:
652 result[rev] = int(m.group('id'))
655 result[rev] = int(m.group('id'))
653 continue
656 continue
654 # Check tags
657 # Check tags
655 for tag in repo.nodetags(ctx.node()):
658 for tag in repo.nodetags(ctx.node()):
656 m = _differentialrevisiontagre.match(tag)
659 m = _differentialrevisiontagre.match(tag)
657 if m:
660 if m:
658 result[rev] = int(m.group(1))
661 result[rev] = int(m.group(1))
659 break
662 break
660
663
661 return result
664 return result
662
665
663
666
664 def getdiff(basectx, ctx, diffopts):
667 def getdiff(basectx, ctx, diffopts):
665 """plain-text diff without header (user, commit message, etc)"""
668 """plain-text diff without header (user, commit message, etc)"""
666 output = util.stringio()
669 output = util.stringio()
667 for chunk, _label in patch.diffui(
670 for chunk, _label in patch.diffui(
668 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
671 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
669 ):
672 ):
670 output.write(chunk)
673 output.write(chunk)
671 return output.getvalue()
674 return output.getvalue()
672
675
673
676
674 class DiffChangeType:
677 class DiffChangeType:
675 ADD = 1
678 ADD = 1
676 CHANGE = 2
679 CHANGE = 2
677 DELETE = 3
680 DELETE = 3
678 MOVE_AWAY = 4
681 MOVE_AWAY = 4
679 COPY_AWAY = 5
682 COPY_AWAY = 5
680 MOVE_HERE = 6
683 MOVE_HERE = 6
681 COPY_HERE = 7
684 COPY_HERE = 7
682 MULTICOPY = 8
685 MULTICOPY = 8
683
686
684
687
685 class DiffFileType:
688 class DiffFileType:
686 TEXT = 1
689 TEXT = 1
687 IMAGE = 2
690 IMAGE = 2
688 BINARY = 3
691 BINARY = 3
689
692
690
693
691 @attr.s
694 @attr.s
692 class phabhunk(dict):
695 class phabhunk(dict):
693 """Represents a Differential hunk, which is owned by a Differential change"""
696 """Represents a Differential hunk, which is owned by a Differential change"""
694
697
695 oldOffset = attr.ib(default=0) # camelcase-required
698 oldOffset = attr.ib(default=0) # camelcase-required
696 oldLength = attr.ib(default=0) # camelcase-required
699 oldLength = attr.ib(default=0) # camelcase-required
697 newOffset = attr.ib(default=0) # camelcase-required
700 newOffset = attr.ib(default=0) # camelcase-required
698 newLength = attr.ib(default=0) # camelcase-required
701 newLength = attr.ib(default=0) # camelcase-required
699 corpus = attr.ib(default='')
702 corpus = attr.ib(default='')
700 # These get added to the phabchange's equivalents
703 # These get added to the phabchange's equivalents
701 addLines = attr.ib(default=0) # camelcase-required
704 addLines = attr.ib(default=0) # camelcase-required
702 delLines = attr.ib(default=0) # camelcase-required
705 delLines = attr.ib(default=0) # camelcase-required
703
706
704
707
705 @attr.s
708 @attr.s
706 class phabchange:
709 class phabchange:
707 """Represents a Differential change, owns Differential hunks and owned by a
710 """Represents a Differential change, owns Differential hunks and owned by a
708 Differential diff. Each one represents one file in a diff.
711 Differential diff. Each one represents one file in a diff.
709 """
712 """
710
713
711 currentPath = attr.ib(default=None) # camelcase-required
714 currentPath = attr.ib(default=None) # camelcase-required
712 oldPath = attr.ib(default=None) # camelcase-required
715 oldPath = attr.ib(default=None) # camelcase-required
713 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
716 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
714 metadata = attr.ib(default=attr.Factory(dict))
717 metadata = attr.ib(default=attr.Factory(dict))
715 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
718 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
716 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
717 type = attr.ib(default=DiffChangeType.CHANGE)
720 type = attr.ib(default=DiffChangeType.CHANGE)
718 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
721 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
719 commitHash = attr.ib(default=None) # camelcase-required
722 commitHash = attr.ib(default=None) # camelcase-required
720 addLines = attr.ib(default=0) # camelcase-required
723 addLines = attr.ib(default=0) # camelcase-required
721 delLines = attr.ib(default=0) # camelcase-required
724 delLines = attr.ib(default=0) # camelcase-required
722 hunks = attr.ib(default=attr.Factory(list))
725 hunks = attr.ib(default=attr.Factory(list))
723
726
724 def copynewmetadatatoold(self):
727 def copynewmetadatatoold(self):
725 for key in list(self.metadata.keys()):
728 for key in list(self.metadata.keys()):
726 newkey = key.replace(b'new:', b'old:')
729 newkey = key.replace(b'new:', b'old:')
727 self.metadata[newkey] = self.metadata[key]
730 self.metadata[newkey] = self.metadata[key]
728
731
729 def addoldmode(self, value):
732 def addoldmode(self, value):
730 self.oldProperties[b'unix:filemode'] = value
733 self.oldProperties[b'unix:filemode'] = value
731
734
732 def addnewmode(self, value):
735 def addnewmode(self, value):
733 self.newProperties[b'unix:filemode'] = value
736 self.newProperties[b'unix:filemode'] = value
734
737
735 def addhunk(self, hunk):
738 def addhunk(self, hunk):
736 if not isinstance(hunk, phabhunk):
739 if not isinstance(hunk, phabhunk):
737 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
740 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
738 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
741 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
739 # It's useful to include these stats since the Phab web UI shows them,
742 # It's useful to include these stats since the Phab web UI shows them,
740 # and uses them to estimate how large a change a Revision is. Also used
743 # and uses them to estimate how large a change a Revision is. Also used
741 # in email subjects for the [+++--] bit.
744 # in email subjects for the [+++--] bit.
742 self.addLines += hunk.addLines
745 self.addLines += hunk.addLines
743 self.delLines += hunk.delLines
746 self.delLines += hunk.delLines
744
747
745
748
746 @attr.s
749 @attr.s
747 class phabdiff:
750 class phabdiff:
748 """Represents a Differential diff, owns Differential changes. Corresponds
751 """Represents a Differential diff, owns Differential changes. Corresponds
749 to a commit.
752 to a commit.
750 """
753 """
751
754
752 # Doesn't seem to be any reason to send this (output of uname -n)
755 # Doesn't seem to be any reason to send this (output of uname -n)
753 sourceMachine = attr.ib(default=b'') # camelcase-required
756 sourceMachine = attr.ib(default=b'') # camelcase-required
754 sourcePath = attr.ib(default=b'/') # camelcase-required
757 sourcePath = attr.ib(default=b'/') # camelcase-required
755 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
758 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
756 sourceControlPath = attr.ib(default=b'/') # camelcase-required
759 sourceControlPath = attr.ib(default=b'/') # camelcase-required
757 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
760 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
758 branch = attr.ib(default=b'default')
761 branch = attr.ib(default=b'default')
759 bookmark = attr.ib(default=None)
762 bookmark = attr.ib(default=None)
760 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
763 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
761 lintStatus = attr.ib(default=b'none') # camelcase-required
764 lintStatus = attr.ib(default=b'none') # camelcase-required
762 unitStatus = attr.ib(default=b'none') # camelcase-required
765 unitStatus = attr.ib(default=b'none') # camelcase-required
763 changes = attr.ib(default=attr.Factory(dict))
766 changes = attr.ib(default=attr.Factory(dict))
764 repositoryPHID = attr.ib(default=None) # camelcase-required
767 repositoryPHID = attr.ib(default=None) # camelcase-required
765
768
766 def addchange(self, change):
769 def addchange(self, change):
767 if not isinstance(change, phabchange):
770 if not isinstance(change, phabchange):
768 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
771 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
769 self.changes[change.currentPath] = pycompat.byteskwargs(
772 self.changes[change.currentPath] = pycompat.byteskwargs(
770 attr.asdict(change)
773 attr.asdict(change)
771 )
774 )
772
775
773
776
774 def maketext(pchange, basectx, ctx, fname):
777 def maketext(pchange, basectx, ctx, fname):
775 """populate the phabchange for a text file"""
778 """populate the phabchange for a text file"""
776 repo = ctx.repo()
779 repo = ctx.repo()
777 fmatcher = match.exact([fname])
780 fmatcher = match.exact([fname])
778 diffopts = mdiff.diffopts(git=True, context=32767)
781 diffopts = mdiff.diffopts(git=True, context=32767)
779 _pfctx, _fctx, header, fhunks = next(
782 _pfctx, _fctx, header, fhunks = next(
780 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
783 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
781 )
784 )
782
785
783 for fhunk in fhunks:
786 for fhunk in fhunks:
784 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
787 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
785 corpus = b''.join(lines[1:])
788 corpus = b''.join(lines[1:])
786 shunk = list(header)
789 shunk = list(header)
787 shunk.extend(lines)
790 shunk.extend(lines)
788 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
791 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
789 patch.diffstatdata(util.iterlines(shunk))
792 patch.diffstatdata(util.iterlines(shunk))
790 )
793 )
791 pchange.addhunk(
794 pchange.addhunk(
792 phabhunk(
795 phabhunk(
793 oldOffset,
796 oldOffset,
794 oldLength,
797 oldLength,
795 newOffset,
798 newOffset,
796 newLength,
799 newLength,
797 corpus,
800 corpus,
798 addLines,
801 addLines,
799 delLines,
802 delLines,
800 )
803 )
801 )
804 )
802
805
803
806
804 def uploadchunks(fctx, fphid):
807 def uploadchunks(fctx, fphid):
805 """upload large binary files as separate chunks.
808 """upload large binary files as separate chunks.
806 Phab requests chunking over 8MiB, and splits into 4MiB chunks
809 Phab requests chunking over 8MiB, and splits into 4MiB chunks
807 """
810 """
808 ui = fctx.repo().ui
811 ui = fctx.repo().ui
809 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
812 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
810 with ui.makeprogress(
813 with ui.makeprogress(
811 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
814 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
812 ) as progress:
815 ) as progress:
813 for chunk in chunks:
816 for chunk in chunks:
814 progress.increment()
817 progress.increment()
815 if chunk[b'complete']:
818 if chunk[b'complete']:
816 continue
819 continue
817 bstart = int(chunk[b'byteStart'])
820 bstart = int(chunk[b'byteStart'])
818 bend = int(chunk[b'byteEnd'])
821 bend = int(chunk[b'byteEnd'])
819 callconduit(
822 callconduit(
820 ui,
823 ui,
821 b'file.uploadchunk',
824 b'file.uploadchunk',
822 {
825 {
823 b'filePHID': fphid,
826 b'filePHID': fphid,
824 b'byteStart': bstart,
827 b'byteStart': bstart,
825 b'data': base64.b64encode(fctx.data()[bstart:bend]),
828 b'data': base64.b64encode(fctx.data()[bstart:bend]),
826 b'dataEncoding': b'base64',
829 b'dataEncoding': b'base64',
827 },
830 },
828 )
831 )
829
832
830
833
831 def uploadfile(fctx):
834 def uploadfile(fctx):
832 """upload binary files to Phabricator"""
835 """upload binary files to Phabricator"""
833 repo = fctx.repo()
836 repo = fctx.repo()
834 ui = repo.ui
837 ui = repo.ui
835 fname = fctx.path()
838 fname = fctx.path()
836 size = fctx.size()
839 size = fctx.size()
837 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
840 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
838
841
839 # an allocate call is required first to see if an upload is even required
842 # an allocate call is required first to see if an upload is even required
840 # (Phab might already have it) and to determine if chunking is needed
843 # (Phab might already have it) and to determine if chunking is needed
841 allocateparams = {
844 allocateparams = {
842 b'name': fname,
845 b'name': fname,
843 b'contentLength': size,
846 b'contentLength': size,
844 b'contentHash': fhash,
847 b'contentHash': fhash,
845 }
848 }
846 filealloc = callconduit(ui, b'file.allocate', allocateparams)
849 filealloc = callconduit(ui, b'file.allocate', allocateparams)
847 fphid = filealloc[b'filePHID']
850 fphid = filealloc[b'filePHID']
848
851
849 if filealloc[b'upload']:
852 if filealloc[b'upload']:
850 ui.write(_(b'uploading %s\n') % bytes(fctx))
853 ui.write(_(b'uploading %s\n') % bytes(fctx))
851 if not fphid:
854 if not fphid:
852 uploadparams = {
855 uploadparams = {
853 b'name': fname,
856 b'name': fname,
854 b'data_base64': base64.b64encode(fctx.data()),
857 b'data_base64': base64.b64encode(fctx.data()),
855 }
858 }
856 fphid = callconduit(ui, b'file.upload', uploadparams)
859 fphid = callconduit(ui, b'file.upload', uploadparams)
857 else:
860 else:
858 uploadchunks(fctx, fphid)
861 uploadchunks(fctx, fphid)
859 else:
862 else:
860 ui.debug(b'server already has %s\n' % bytes(fctx))
863 ui.debug(b'server already has %s\n' % bytes(fctx))
861
864
862 if not fphid:
865 if not fphid:
863 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
866 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
864
867
865 return fphid
868 return fphid
866
869
867
870
868 def addoldbinary(pchange, oldfctx, fctx):
871 def addoldbinary(pchange, oldfctx, fctx):
869 """add the metadata for the previous version of a binary file to the
872 """add the metadata for the previous version of a binary file to the
870 phabchange for the new version
873 phabchange for the new version
871
874
872 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
875 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
873 version of the file, or None if the file is being removed.
876 version of the file, or None if the file is being removed.
874 """
877 """
875 if not fctx or fctx.cmp(oldfctx):
878 if not fctx or fctx.cmp(oldfctx):
876 # Files differ, add the old one
879 # Files differ, add the old one
877 pchange.metadata[b'old:file:size'] = oldfctx.size()
880 pchange.metadata[b'old:file:size'] = oldfctx.size()
878 mimeguess, _enc = mimetypes.guess_type(
881 mimeguess, _enc = mimetypes.guess_type(
879 encoding.unifromlocal(oldfctx.path())
882 encoding.unifromlocal(oldfctx.path())
880 )
883 )
881 if mimeguess:
884 if mimeguess:
882 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
885 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
883 mimeguess
886 mimeguess
884 )
887 )
885 fphid = uploadfile(oldfctx)
888 fphid = uploadfile(oldfctx)
886 pchange.metadata[b'old:binary-phid'] = fphid
889 pchange.metadata[b'old:binary-phid'] = fphid
887 else:
890 else:
888 # If it's left as IMAGE/BINARY web UI might try to display it
891 # If it's left as IMAGE/BINARY web UI might try to display it
889 pchange.fileType = DiffFileType.TEXT
892 pchange.fileType = DiffFileType.TEXT
890 pchange.copynewmetadatatoold()
893 pchange.copynewmetadatatoold()
891
894
892
895
893 def makebinary(pchange, fctx):
896 def makebinary(pchange, fctx):
894 """populate the phabchange for a binary file"""
897 """populate the phabchange for a binary file"""
895 pchange.fileType = DiffFileType.BINARY
898 pchange.fileType = DiffFileType.BINARY
896 fphid = uploadfile(fctx)
899 fphid = uploadfile(fctx)
897 pchange.metadata[b'new:binary-phid'] = fphid
900 pchange.metadata[b'new:binary-phid'] = fphid
898 pchange.metadata[b'new:file:size'] = fctx.size()
901 pchange.metadata[b'new:file:size'] = fctx.size()
899 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
902 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
900 if mimeguess:
903 if mimeguess:
901 mimeguess = pycompat.bytestr(mimeguess)
904 mimeguess = pycompat.bytestr(mimeguess)
902 pchange.metadata[b'new:file:mime-type'] = mimeguess
905 pchange.metadata[b'new:file:mime-type'] = mimeguess
903 if mimeguess.startswith(b'image/'):
906 if mimeguess.startswith(b'image/'):
904 pchange.fileType = DiffFileType.IMAGE
907 pchange.fileType = DiffFileType.IMAGE
905
908
906
909
907 # Copied from mercurial/patch.py
910 # Copied from mercurial/patch.py
908 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
911 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
909
912
910
913
911 def notutf8(fctx):
914 def notutf8(fctx):
912 """detect non-UTF-8 text files since Phabricator requires them to be marked
915 """detect non-UTF-8 text files since Phabricator requires them to be marked
913 as binary
916 as binary
914 """
917 """
915 try:
918 try:
916 fctx.data().decode('utf-8')
919 fctx.data().decode('utf-8')
917 return False
920 return False
918 except UnicodeDecodeError:
921 except UnicodeDecodeError:
919 fctx.repo().ui.write(
922 fctx.repo().ui.write(
920 _(b'file %s detected as non-UTF-8, marked as binary\n')
923 _(b'file %s detected as non-UTF-8, marked as binary\n')
921 % fctx.path()
924 % fctx.path()
922 )
925 )
923 return True
926 return True
924
927
925
928
926 def addremoved(pdiff, basectx, ctx, removed):
929 def addremoved(pdiff, basectx, ctx, removed):
927 """add removed files to the phabdiff. Shouldn't include moves"""
930 """add removed files to the phabdiff. Shouldn't include moves"""
928 for fname in removed:
931 for fname in removed:
929 pchange = phabchange(
932 pchange = phabchange(
930 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
933 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
931 )
934 )
932 oldfctx = basectx.p1()[fname]
935 oldfctx = basectx.p1()[fname]
933 pchange.addoldmode(gitmode[oldfctx.flags()])
936 pchange.addoldmode(gitmode[oldfctx.flags()])
934 if not (oldfctx.isbinary() or notutf8(oldfctx)):
937 if not (oldfctx.isbinary() or notutf8(oldfctx)):
935 maketext(pchange, basectx, ctx, fname)
938 maketext(pchange, basectx, ctx, fname)
936
939
937 pdiff.addchange(pchange)
940 pdiff.addchange(pchange)
938
941
939
942
940 def addmodified(pdiff, basectx, ctx, modified):
943 def addmodified(pdiff, basectx, ctx, modified):
941 """add modified files to the phabdiff"""
944 """add modified files to the phabdiff"""
942 for fname in modified:
945 for fname in modified:
943 fctx = ctx[fname]
946 fctx = ctx[fname]
944 oldfctx = basectx.p1()[fname]
947 oldfctx = basectx.p1()[fname]
945 pchange = phabchange(currentPath=fname, oldPath=fname)
948 pchange = phabchange(currentPath=fname, oldPath=fname)
946 filemode = gitmode[fctx.flags()]
949 filemode = gitmode[fctx.flags()]
947 originalmode = gitmode[oldfctx.flags()]
950 originalmode = gitmode[oldfctx.flags()]
948 if filemode != originalmode:
951 if filemode != originalmode:
949 pchange.addoldmode(originalmode)
952 pchange.addoldmode(originalmode)
950 pchange.addnewmode(filemode)
953 pchange.addnewmode(filemode)
951
954
952 if (
955 if (
953 fctx.isbinary()
956 fctx.isbinary()
954 or notutf8(fctx)
957 or notutf8(fctx)
955 or oldfctx.isbinary()
958 or oldfctx.isbinary()
956 or notutf8(oldfctx)
959 or notutf8(oldfctx)
957 ):
960 ):
958 makebinary(pchange, fctx)
961 makebinary(pchange, fctx)
959 addoldbinary(pchange, oldfctx, fctx)
962 addoldbinary(pchange, oldfctx, fctx)
960 else:
963 else:
961 maketext(pchange, basectx, ctx, fname)
964 maketext(pchange, basectx, ctx, fname)
962
965
963 pdiff.addchange(pchange)
966 pdiff.addchange(pchange)
964
967
965
968
966 def addadded(pdiff, basectx, ctx, added, removed):
969 def addadded(pdiff, basectx, ctx, added, removed):
967 """add file adds to the phabdiff, both new files and copies/moves"""
970 """add file adds to the phabdiff, both new files and copies/moves"""
968 # Keep track of files that've been recorded as moved/copied, so if there are
971 # Keep track of files that've been recorded as moved/copied, so if there are
969 # additional copies we can mark them (moves get removed from removed)
972 # additional copies we can mark them (moves get removed from removed)
970 copiedchanges = {}
973 copiedchanges = {}
971 movedchanges = {}
974 movedchanges = {}
972
975
973 copy = {}
976 copy = {}
974 if basectx != ctx:
977 if basectx != ctx:
975 copy = copies.pathcopies(basectx.p1(), ctx)
978 copy = copies.pathcopies(basectx.p1(), ctx)
976
979
977 for fname in added:
980 for fname in added:
978 fctx = ctx[fname]
981 fctx = ctx[fname]
979 oldfctx = None
982 oldfctx = None
980 pchange = phabchange(currentPath=fname)
983 pchange = phabchange(currentPath=fname)
981
984
982 filemode = gitmode[fctx.flags()]
985 filemode = gitmode[fctx.flags()]
983
986
984 if copy:
987 if copy:
985 originalfname = copy.get(fname, fname)
988 originalfname = copy.get(fname, fname)
986 else:
989 else:
987 originalfname = fname
990 originalfname = fname
988 if fctx.renamed():
991 if fctx.renamed():
989 originalfname = fctx.renamed()[0]
992 originalfname = fctx.renamed()[0]
990
993
991 renamed = fname != originalfname
994 renamed = fname != originalfname
992
995
993 if renamed:
996 if renamed:
994 oldfctx = basectx.p1()[originalfname]
997 oldfctx = basectx.p1()[originalfname]
995 originalmode = gitmode[oldfctx.flags()]
998 originalmode = gitmode[oldfctx.flags()]
996 pchange.oldPath = originalfname
999 pchange.oldPath = originalfname
997
1000
998 if originalfname in removed:
1001 if originalfname in removed:
999 origpchange = phabchange(
1002 origpchange = phabchange(
1000 currentPath=originalfname,
1003 currentPath=originalfname,
1001 oldPath=originalfname,
1004 oldPath=originalfname,
1002 type=DiffChangeType.MOVE_AWAY,
1005 type=DiffChangeType.MOVE_AWAY,
1003 awayPaths=[fname],
1006 awayPaths=[fname],
1004 )
1007 )
1005 movedchanges[originalfname] = origpchange
1008 movedchanges[originalfname] = origpchange
1006 removed.remove(originalfname)
1009 removed.remove(originalfname)
1007 pchange.type = DiffChangeType.MOVE_HERE
1010 pchange.type = DiffChangeType.MOVE_HERE
1008 elif originalfname in movedchanges:
1011 elif originalfname in movedchanges:
1009 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1012 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1010 movedchanges[originalfname].awayPaths.append(fname)
1013 movedchanges[originalfname].awayPaths.append(fname)
1011 pchange.type = DiffChangeType.COPY_HERE
1014 pchange.type = DiffChangeType.COPY_HERE
1012 else: # pure copy
1015 else: # pure copy
1013 if originalfname not in copiedchanges:
1016 if originalfname not in copiedchanges:
1014 origpchange = phabchange(
1017 origpchange = phabchange(
1015 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1018 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1016 )
1019 )
1017 copiedchanges[originalfname] = origpchange
1020 copiedchanges[originalfname] = origpchange
1018 else:
1021 else:
1019 origpchange = copiedchanges[originalfname]
1022 origpchange = copiedchanges[originalfname]
1020 origpchange.awayPaths.append(fname)
1023 origpchange.awayPaths.append(fname)
1021 pchange.type = DiffChangeType.COPY_HERE
1024 pchange.type = DiffChangeType.COPY_HERE
1022
1025
1023 if filemode != originalmode:
1026 if filemode != originalmode:
1024 pchange.addoldmode(originalmode)
1027 pchange.addoldmode(originalmode)
1025 pchange.addnewmode(filemode)
1028 pchange.addnewmode(filemode)
1026 else: # Brand-new file
1029 else: # Brand-new file
1027 pchange.addnewmode(gitmode[fctx.flags()])
1030 pchange.addnewmode(gitmode[fctx.flags()])
1028 pchange.type = DiffChangeType.ADD
1031 pchange.type = DiffChangeType.ADD
1029
1032
1030 if (
1033 if (
1031 fctx.isbinary()
1034 fctx.isbinary()
1032 or notutf8(fctx)
1035 or notutf8(fctx)
1033 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1036 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1034 ):
1037 ):
1035 makebinary(pchange, fctx)
1038 makebinary(pchange, fctx)
1036 if renamed:
1039 if renamed:
1037 addoldbinary(pchange, oldfctx, fctx)
1040 addoldbinary(pchange, oldfctx, fctx)
1038 else:
1041 else:
1039 maketext(pchange, basectx, ctx, fname)
1042 maketext(pchange, basectx, ctx, fname)
1040
1043
1041 pdiff.addchange(pchange)
1044 pdiff.addchange(pchange)
1042
1045
1043 for _path, copiedchange in copiedchanges.items():
1046 for _path, copiedchange in copiedchanges.items():
1044 pdiff.addchange(copiedchange)
1047 pdiff.addchange(copiedchange)
1045 for _path, movedchange in movedchanges.items():
1048 for _path, movedchange in movedchanges.items():
1046 pdiff.addchange(movedchange)
1049 pdiff.addchange(movedchange)
1047
1050
1048
1051
1049 def creatediff(basectx, ctx):
1052 def creatediff(basectx, ctx):
1050 """create a Differential Diff"""
1053 """create a Differential Diff"""
1051 repo = ctx.repo()
1054 repo = ctx.repo()
1052 repophid = getrepophid(repo)
1055 repophid = getrepophid(repo)
1053 # Create a "Differential Diff" via "differential.creatediff" API
1056 # Create a "Differential Diff" via "differential.creatediff" API
1054 pdiff = phabdiff(
1057 pdiff = phabdiff(
1055 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1058 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1056 branch=b'%s' % ctx.branch(),
1059 branch=b'%s' % ctx.branch(),
1057 )
1060 )
1058 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1061 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1059 # addadded will remove moved files from removed, so addremoved won't get
1062 # addadded will remove moved files from removed, so addremoved won't get
1060 # them
1063 # them
1061 addadded(pdiff, basectx, ctx, added, removed)
1064 addadded(pdiff, basectx, ctx, added, removed)
1062 addmodified(pdiff, basectx, ctx, modified)
1065 addmodified(pdiff, basectx, ctx, modified)
1063 addremoved(pdiff, basectx, ctx, removed)
1066 addremoved(pdiff, basectx, ctx, removed)
1064 if repophid:
1067 if repophid:
1065 pdiff.repositoryPHID = repophid
1068 pdiff.repositoryPHID = repophid
1066 diff = callconduit(
1069 diff = callconduit(
1067 repo.ui,
1070 repo.ui,
1068 b'differential.creatediff',
1071 b'differential.creatediff',
1069 pycompat.byteskwargs(attr.asdict(pdiff)),
1072 pycompat.byteskwargs(attr.asdict(pdiff)),
1070 )
1073 )
1071 if not diff:
1074 if not diff:
1072 if basectx != ctx:
1075 if basectx != ctx:
1073 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1076 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1074 else:
1077 else:
1075 msg = _(b'cannot create diff for %s') % ctx
1078 msg = _(b'cannot create diff for %s') % ctx
1076 raise error.Abort(msg)
1079 raise error.Abort(msg)
1077 return diff
1080 return diff
1078
1081
1079
1082
1080 def writediffproperties(ctxs, diff):
1083 def writediffproperties(ctxs, diff):
1081 """write metadata to diff so patches could be applied losslessly
1084 """write metadata to diff so patches could be applied losslessly
1082
1085
1083 ``ctxs`` is the list of commits that created the diff, in ascending order.
1086 ``ctxs`` is the list of commits that created the diff, in ascending order.
1084 The list is generally a single commit, but may be several when using
1087 The list is generally a single commit, but may be several when using
1085 ``phabsend --fold``.
1088 ``phabsend --fold``.
1086 """
1089 """
1087 # creatediff returns with a diffid but query returns with an id
1090 # creatediff returns with a diffid but query returns with an id
1088 diffid = diff.get(b'diffid', diff.get(b'id'))
1091 diffid = diff.get(b'diffid', diff.get(b'id'))
1089 basectx = ctxs[0]
1092 basectx = ctxs[0]
1090 tipctx = ctxs[-1]
1093 tipctx = ctxs[-1]
1091
1094
1092 params = {
1095 params = {
1093 b'diff_id': diffid,
1096 b'diff_id': diffid,
1094 b'name': b'hg:meta',
1097 b'name': b'hg:meta',
1095 b'data': templatefilters.json(
1098 b'data': templatefilters.json(
1096 {
1099 {
1097 b'user': tipctx.user(),
1100 b'user': tipctx.user(),
1098 b'date': b'%d %d' % tipctx.date(),
1101 b'date': b'%d %d' % tipctx.date(),
1099 b'branch': tipctx.branch(),
1102 b'branch': tipctx.branch(),
1100 b'node': tipctx.hex(),
1103 b'node': tipctx.hex(),
1101 b'parent': basectx.p1().hex(),
1104 b'parent': basectx.p1().hex(),
1102 }
1105 }
1103 ),
1106 ),
1104 }
1107 }
1105 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1108 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1106
1109
1107 commits = {}
1110 commits = {}
1108 for ctx in ctxs:
1111 for ctx in ctxs:
1109 commits[ctx.hex()] = {
1112 commits[ctx.hex()] = {
1110 b'author': stringutil.person(ctx.user()),
1113 b'author': stringutil.person(ctx.user()),
1111 b'authorEmail': stringutil.email(ctx.user()),
1114 b'authorEmail': stringutil.email(ctx.user()),
1112 b'time': int(ctx.date()[0]),
1115 b'time': int(ctx.date()[0]),
1113 b'commit': ctx.hex(),
1116 b'commit': ctx.hex(),
1114 b'parents': [ctx.p1().hex()],
1117 b'parents': [ctx.p1().hex()],
1115 b'branch': ctx.branch(),
1118 b'branch': ctx.branch(),
1116 }
1119 }
1117 params = {
1120 params = {
1118 b'diff_id': diffid,
1121 b'diff_id': diffid,
1119 b'name': b'local:commits',
1122 b'name': b'local:commits',
1120 b'data': templatefilters.json(commits),
1123 b'data': templatefilters.json(commits),
1121 }
1124 }
1122 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1125 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1123
1126
1124
1127
1125 def createdifferentialrevision(
1128 def createdifferentialrevision(
1126 ctxs,
1129 ctxs,
1127 revid=None,
1130 revid=None,
1128 parentrevphid=None,
1131 parentrevphid=None,
1129 oldbasenode=None,
1132 oldbasenode=None,
1130 oldnode=None,
1133 oldnode=None,
1131 olddiff=None,
1134 olddiff=None,
1132 actions=None,
1135 actions=None,
1133 comment=None,
1136 comment=None,
1134 ):
1137 ):
1135 """create or update a Differential Revision
1138 """create or update a Differential Revision
1136
1139
1137 If revid is None, create a new Differential Revision, otherwise update
1140 If revid is None, create a new Differential Revision, otherwise update
1138 revid. If parentrevphid is not None, set it as a dependency.
1141 revid. If parentrevphid is not None, set it as a dependency.
1139
1142
1140 If there is a single commit for the new Differential Revision, ``ctxs`` will
1143 If there is a single commit for the new Differential Revision, ``ctxs`` will
1141 be a list of that single context. Otherwise, it is a list that covers the
1144 be a list of that single context. Otherwise, it is a list that covers the
1142 range of changes for the differential, where ``ctxs[0]`` is the first change
1145 range of changes for the differential, where ``ctxs[0]`` is the first change
1143 to include and ``ctxs[-1]`` is the last.
1146 to include and ``ctxs[-1]`` is the last.
1144
1147
1145 If oldnode is not None, check if the patch content (without commit message
1148 If oldnode is not None, check if the patch content (without commit message
1146 and metadata) has changed before creating another diff. For a Revision with
1149 and metadata) has changed before creating another diff. For a Revision with
1147 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1150 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1148 Revision covering multiple commits, ``oldbasenode`` corresponds to
1151 Revision covering multiple commits, ``oldbasenode`` corresponds to
1149 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1152 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1150 corresponds to ``ctxs[-1]``.
1153 corresponds to ``ctxs[-1]``.
1151
1154
1152 If actions is not None, they will be appended to the transaction.
1155 If actions is not None, they will be appended to the transaction.
1153 """
1156 """
1154 ctx = ctxs[-1]
1157 ctx = ctxs[-1]
1155 basectx = ctxs[0]
1158 basectx = ctxs[0]
1156
1159
1157 repo = ctx.repo()
1160 repo = ctx.repo()
1158 if oldnode:
1161 if oldnode:
1159 diffopts = mdiff.diffopts(git=True, context=32767)
1162 diffopts = mdiff.diffopts(git=True, context=32767)
1160 unfi = repo.unfiltered()
1163 unfi = repo.unfiltered()
1161 oldctx = unfi[oldnode]
1164 oldctx = unfi[oldnode]
1162 oldbasectx = unfi[oldbasenode]
1165 oldbasectx = unfi[oldbasenode]
1163 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1166 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1164 oldbasectx, oldctx, diffopts
1167 oldbasectx, oldctx, diffopts
1165 )
1168 )
1166 else:
1169 else:
1167 neednewdiff = True
1170 neednewdiff = True
1168
1171
1169 transactions = []
1172 transactions = []
1170 if neednewdiff:
1173 if neednewdiff:
1171 diff = creatediff(basectx, ctx)
1174 diff = creatediff(basectx, ctx)
1172 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1175 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1173 if comment:
1176 if comment:
1174 transactions.append({b'type': b'comment', b'value': comment})
1177 transactions.append({b'type': b'comment', b'value': comment})
1175 else:
1178 else:
1176 # Even if we don't need to upload a new diff because the patch content
1179 # Even if we don't need to upload a new diff because the patch content
1177 # does not change. We might still need to update its metadata so
1180 # does not change. We might still need to update its metadata so
1178 # pushers could know the correct node metadata.
1181 # pushers could know the correct node metadata.
1179 assert olddiff
1182 assert olddiff
1180 diff = olddiff
1183 diff = olddiff
1181 writediffproperties(ctxs, diff)
1184 writediffproperties(ctxs, diff)
1182
1185
1183 # Set the parent Revision every time, so commit re-ordering is picked-up
1186 # Set the parent Revision every time, so commit re-ordering is picked-up
1184 if parentrevphid:
1187 if parentrevphid:
1185 transactions.append(
1188 transactions.append(
1186 {b'type': b'parents.set', b'value': [parentrevphid]}
1189 {b'type': b'parents.set', b'value': [parentrevphid]}
1187 )
1190 )
1188
1191
1189 if actions:
1192 if actions:
1190 transactions += actions
1193 transactions += actions
1191
1194
1192 # When folding multiple local commits into a single review, arcanist will
1195 # When folding multiple local commits into a single review, arcanist will
1193 # take the summary line of the first commit as the title, and then
1196 # take the summary line of the first commit as the title, and then
1194 # concatenate the rest of the remaining messages (including each of their
1197 # concatenate the rest of the remaining messages (including each of their
1195 # first lines) to the rest of the first commit message (each separated by
1198 # first lines) to the rest of the first commit message (each separated by
1196 # an empty line), and use that as the summary field. Do the same here.
1199 # an empty line), and use that as the summary field. Do the same here.
1197 # For commits with only a one line message, there is no summary field, as
1200 # For commits with only a one line message, there is no summary field, as
1198 # this gets assigned to the title.
1201 # this gets assigned to the title.
1199 fields = util.sortdict() # sorted for stable wire protocol in tests
1202 fields = util.sortdict() # sorted for stable wire protocol in tests
1200
1203
1201 for i, _ctx in enumerate(ctxs):
1204 for i, _ctx in enumerate(ctxs):
1202 # Parse commit message and update related fields.
1205 # Parse commit message and update related fields.
1203 desc = _ctx.description()
1206 desc = _ctx.description()
1204 info = callconduit(
1207 info = callconduit(
1205 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1208 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1206 )
1209 )
1207
1210
1208 for k in [b'title', b'summary', b'testPlan']:
1211 for k in [b'title', b'summary', b'testPlan']:
1209 v = info[b'fields'].get(k)
1212 v = info[b'fields'].get(k)
1210 if not v:
1213 if not v:
1211 continue
1214 continue
1212
1215
1213 if i == 0:
1216 if i == 0:
1214 # Title, summary and test plan (if present) are taken verbatim
1217 # Title, summary and test plan (if present) are taken verbatim
1215 # for the first commit.
1218 # for the first commit.
1216 fields[k] = v.rstrip()
1219 fields[k] = v.rstrip()
1217 continue
1220 continue
1218 elif k == b'title':
1221 elif k == b'title':
1219 # Add subsequent titles (i.e. the first line of the commit
1222 # Add subsequent titles (i.e. the first line of the commit
1220 # message) back to the summary.
1223 # message) back to the summary.
1221 k = b'summary'
1224 k = b'summary'
1222
1225
1223 # Append any current field to the existing composite field
1226 # Append any current field to the existing composite field
1224 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1227 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1225
1228
1226 for k, v in fields.items():
1229 for k, v in fields.items():
1227 transactions.append({b'type': k, b'value': v})
1230 transactions.append({b'type': k, b'value': v})
1228
1231
1229 params = {b'transactions': transactions}
1232 params = {b'transactions': transactions}
1230 if revid is not None:
1233 if revid is not None:
1231 # Update an existing Differential Revision
1234 # Update an existing Differential Revision
1232 params[b'objectIdentifier'] = revid
1235 params[b'objectIdentifier'] = revid
1233
1236
1234 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1237 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1235 if not revision:
1238 if not revision:
1236 if len(ctxs) == 1:
1239 if len(ctxs) == 1:
1237 msg = _(b'cannot create revision for %s') % ctx
1240 msg = _(b'cannot create revision for %s') % ctx
1238 else:
1241 else:
1239 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1242 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1240 raise error.Abort(msg)
1243 raise error.Abort(msg)
1241
1244
1242 return revision, diff
1245 return revision, diff
1243
1246
1244
1247
1245 def userphids(ui, names):
1248 def userphids(ui, names):
1246 """convert user names to PHIDs"""
1249 """convert user names to PHIDs"""
1247 names = [name.lower() for name in names]
1250 names = [name.lower() for name in names]
1248 query = {b'constraints': {b'usernames': names}}
1251 query = {b'constraints': {b'usernames': names}}
1249 result = callconduit(ui, b'user.search', query)
1252 result = callconduit(ui, b'user.search', query)
1250 # username not found is not an error of the API. So check if we have missed
1253 # username not found is not an error of the API. So check if we have missed
1251 # some names here.
1254 # some names here.
1252 data = result[b'data']
1255 data = result[b'data']
1253 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1256 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1254 unresolved = set(names) - resolved
1257 unresolved = set(names) - resolved
1255 if unresolved:
1258 if unresolved:
1256 raise error.Abort(
1259 raise error.Abort(
1257 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1260 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1258 )
1261 )
1259 return [entry[b'phid'] for entry in data]
1262 return [entry[b'phid'] for entry in data]
1260
1263
1261
1264
1262 def _print_phabsend_action(ui, ctx, newrevid, action):
1265 def _print_phabsend_action(ui, ctx, newrevid, action):
1263 """print the ``action`` that occurred when posting ``ctx`` for review
1266 """print the ``action`` that occurred when posting ``ctx`` for review
1264
1267
1265 This is a utility function for the sending phase of ``phabsend``, which
1268 This is a utility function for the sending phase of ``phabsend``, which
1266 makes it easier to show a status for all local commits with `--fold``.
1269 makes it easier to show a status for all local commits with `--fold``.
1267 """
1270 """
1268 actiondesc = ui.label(
1271 actiondesc = ui.label(
1269 {
1272 {
1270 b'created': _(b'created'),
1273 b'created': _(b'created'),
1271 b'skipped': _(b'skipped'),
1274 b'skipped': _(b'skipped'),
1272 b'updated': _(b'updated'),
1275 b'updated': _(b'updated'),
1273 }[action],
1276 }[action],
1274 b'phabricator.action.%s' % action,
1277 b'phabricator.action.%s' % action,
1275 )
1278 )
1276 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1279 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1277 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1280 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1278 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1281 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1279
1282
1280
1283
1281 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1284 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1282 """update the local commit list for the ``diff`` associated with ``drevid``
1285 """update the local commit list for the ``diff`` associated with ``drevid``
1283
1286
1284 This is a utility function for the amend phase of ``phabsend``, which
1287 This is a utility function for the amend phase of ``phabsend``, which
1285 converts failures to warning messages.
1288 converts failures to warning messages.
1286 """
1289 """
1287 _debug(
1290 _debug(
1288 unfi.ui,
1291 unfi.ui,
1289 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1292 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1290 )
1293 )
1291
1294
1292 try:
1295 try:
1293 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1296 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1294 except util.urlerr.urlerror:
1297 except util.urlerr.urlerror:
1295 # If it fails just warn and keep going, otherwise the DREV
1298 # If it fails just warn and keep going, otherwise the DREV
1296 # associations will be lost
1299 # associations will be lost
1297 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1300 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1298
1301
1299
1302
1300 @vcrcommand(
1303 @vcrcommand(
1301 b'phabsend',
1304 b'phabsend',
1302 [
1305 [
1303 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1306 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1304 (b'', b'amend', True, _(b'update commit messages')),
1307 (b'', b'amend', True, _(b'update commit messages')),
1305 (b'', b'reviewer', [], _(b'specify reviewers')),
1308 (b'', b'reviewer', [], _(b'specify reviewers')),
1306 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1309 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1307 (
1310 (
1308 b'm',
1311 b'm',
1309 b'comment',
1312 b'comment',
1310 b'',
1313 b'',
1311 _(b'add a comment to Revisions with new/updated Diffs'),
1314 _(b'add a comment to Revisions with new/updated Diffs'),
1312 ),
1315 ),
1313 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1316 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1314 (b'', b'fold', False, _(b'combine the revisions into one review')),
1317 (b'', b'fold', False, _(b'combine the revisions into one review')),
1315 ],
1318 ],
1316 _(b'REV [OPTIONS]'),
1319 _(b'REV [OPTIONS]'),
1317 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1320 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1318 )
1321 )
1319 def phabsend(ui, repo, *revs, **opts):
1322 def phabsend(ui, repo, *revs, **opts):
1320 """upload changesets to Phabricator
1323 """upload changesets to Phabricator
1321
1324
1322 If there are multiple revisions specified, they will be send as a stack
1325 If there are multiple revisions specified, they will be send as a stack
1323 with a linear dependencies relationship using the order specified by the
1326 with a linear dependencies relationship using the order specified by the
1324 revset.
1327 revset.
1325
1328
1326 For the first time uploading changesets, local tags will be created to
1329 For the first time uploading changesets, local tags will be created to
1327 maintain the association. After the first time, phabsend will check
1330 maintain the association. After the first time, phabsend will check
1328 obsstore and tags information so it can figure out whether to update an
1331 obsstore and tags information so it can figure out whether to update an
1329 existing Differential Revision, or create a new one.
1332 existing Differential Revision, or create a new one.
1330
1333
1331 If --amend is set, update commit messages so they have the
1334 If --amend is set, update commit messages so they have the
1332 ``Differential Revision`` URL, remove related tags. This is similar to what
1335 ``Differential Revision`` URL, remove related tags. This is similar to what
1333 arcanist will do, and is more desired in author-push workflows. Otherwise,
1336 arcanist will do, and is more desired in author-push workflows. Otherwise,
1334 use local tags to record the ``Differential Revision`` association.
1337 use local tags to record the ``Differential Revision`` association.
1335
1338
1336 The --confirm option lets you confirm changesets before sending them. You
1339 The --confirm option lets you confirm changesets before sending them. You
1337 can also add following to your configuration file to make it default
1340 can also add following to your configuration file to make it default
1338 behaviour::
1341 behaviour::
1339
1342
1340 [phabsend]
1343 [phabsend]
1341 confirm = true
1344 confirm = true
1342
1345
1343 By default, a separate review will be created for each commit that is
1346 By default, a separate review will be created for each commit that is
1344 selected, and will have the same parent/child relationship in Phabricator.
1347 selected, and will have the same parent/child relationship in Phabricator.
1345 If ``--fold`` is set, multiple commits are rolled up into a single review
1348 If ``--fold`` is set, multiple commits are rolled up into a single review
1346 as if diffed from the parent of the first revision to the last. The commit
1349 as if diffed from the parent of the first revision to the last. The commit
1347 messages are concatenated in the summary field on Phabricator.
1350 messages are concatenated in the summary field on Phabricator.
1348
1351
1349 phabsend will check obsstore and the above association to decide whether to
1352 phabsend will check obsstore and the above association to decide whether to
1350 update an existing Differential Revision, or create a new one.
1353 update an existing Differential Revision, or create a new one.
1351 """
1354 """
1352 opts = pycompat.byteskwargs(opts)
1355 opts = pycompat.byteskwargs(opts)
1353 revs = list(revs) + opts.get(b'rev', [])
1356 revs = list(revs) + opts.get(b'rev', [])
1354 revs = logcmdutil.revrange(repo, revs)
1357 revs = logcmdutil.revrange(repo, revs)
1355 revs.sort() # ascending order to preserve topological parent/child in phab
1358 revs.sort() # ascending order to preserve topological parent/child in phab
1356
1359
1357 if not revs:
1360 if not revs:
1358 raise error.Abort(_(b'phabsend requires at least one changeset'))
1361 raise error.Abort(_(b'phabsend requires at least one changeset'))
1359 if opts.get(b'amend'):
1362 if opts.get(b'amend'):
1360 cmdutil.checkunfinished(repo)
1363 cmdutil.checkunfinished(repo)
1361
1364
1362 ctxs = [repo[rev] for rev in revs]
1365 ctxs = [repo[rev] for rev in revs]
1363
1366
1364 if any(c for c in ctxs if c.obsolete()):
1367 if any(c for c in ctxs if c.obsolete()):
1365 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1368 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1366
1369
1367 # Ensure the local commits are an unbroken range. The semantics of the
1370 # Ensure the local commits are an unbroken range. The semantics of the
1368 # --fold option implies this, and the auto restacking of orphans requires
1371 # --fold option implies this, and the auto restacking of orphans requires
1369 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1372 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1370 # get A' as a parent.
1373 # get A' as a parent.
1371 def _fail_nonlinear_revs(revs, revtype):
1374 def _fail_nonlinear_revs(revs, revtype):
1372 badnodes = [repo[r].node() for r in revs]
1375 badnodes = [repo[r].node() for r in revs]
1373 raise error.Abort(
1376 raise error.Abort(
1374 _(b"cannot phabsend multiple %s revisions: %s")
1377 _(b"cannot phabsend multiple %s revisions: %s")
1375 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1378 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1376 hint=_(b"the revisions must form a linear chain"),
1379 hint=_(b"the revisions must form a linear chain"),
1377 )
1380 )
1378
1381
1379 heads = repo.revs(b'heads(%ld)', revs)
1382 heads = repo.revs(b'heads(%ld)', revs)
1380 if len(heads) > 1:
1383 if len(heads) > 1:
1381 _fail_nonlinear_revs(heads, b"head")
1384 _fail_nonlinear_revs(heads, b"head")
1382
1385
1383 roots = repo.revs(b'roots(%ld)', revs)
1386 roots = repo.revs(b'roots(%ld)', revs)
1384 if len(roots) > 1:
1387 if len(roots) > 1:
1385 _fail_nonlinear_revs(roots, b"root")
1388 _fail_nonlinear_revs(roots, b"root")
1386
1389
1387 fold = opts.get(b'fold')
1390 fold = opts.get(b'fold')
1388 if fold:
1391 if fold:
1389 if len(revs) == 1:
1392 if len(revs) == 1:
1390 # TODO: just switch to --no-fold instead?
1393 # TODO: just switch to --no-fold instead?
1391 raise error.Abort(_(b"cannot fold a single revision"))
1394 raise error.Abort(_(b"cannot fold a single revision"))
1392
1395
1393 # There's no clear way to manage multiple commits with a Dxxx tag, so
1396 # There's no clear way to manage multiple commits with a Dxxx tag, so
1394 # require the amend option. (We could append "_nnn", but then it
1397 # require the amend option. (We could append "_nnn", but then it
1395 # becomes jumbled if earlier commits are added to an update.) It should
1398 # becomes jumbled if earlier commits are added to an update.) It should
1396 # lock the repo and ensure that the range is editable, but that would
1399 # lock the repo and ensure that the range is editable, but that would
1397 # make the code pretty convoluted. The default behavior of `arc` is to
1400 # make the code pretty convoluted. The default behavior of `arc` is to
1398 # create a new review anyway.
1401 # create a new review anyway.
1399 if not opts.get(b"amend"):
1402 if not opts.get(b"amend"):
1400 raise error.Abort(_(b"cannot fold with --no-amend"))
1403 raise error.Abort(_(b"cannot fold with --no-amend"))
1401
1404
1402 # It might be possible to bucketize the revisions by the DREV value, and
1405 # It might be possible to bucketize the revisions by the DREV value, and
1403 # iterate over those groups when posting, and then again when amending.
1406 # iterate over those groups when posting, and then again when amending.
1404 # But for simplicity, require all selected revisions to be for the same
1407 # But for simplicity, require all selected revisions to be for the same
1405 # DREV (if present). Adding local revisions to an existing DREV is
1408 # DREV (if present). Adding local revisions to an existing DREV is
1406 # acceptable.
1409 # acceptable.
1407 drevmatchers = [
1410 drevmatchers = [
1408 _differentialrevisiondescre.search(ctx.description())
1411 _differentialrevisiondescre.search(ctx.description())
1409 for ctx in ctxs
1412 for ctx in ctxs
1410 ]
1413 ]
1411 if len({m.group('url') for m in drevmatchers if m}) > 1:
1414 if len({m.group('url') for m in drevmatchers if m}) > 1:
1412 raise error.Abort(
1415 raise error.Abort(
1413 _(b"cannot fold revisions with different DREV values")
1416 _(b"cannot fold revisions with different DREV values")
1414 )
1417 )
1415
1418
1416 # {newnode: (oldnode, olddiff, olddrev}
1419 # {newnode: (oldnode, olddiff, olddrev}
1417 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1420 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1418
1421
1419 confirm = ui.configbool(b'phabsend', b'confirm')
1422 confirm = ui.configbool(b'phabsend', b'confirm')
1420 confirm |= bool(opts.get(b'confirm'))
1423 confirm |= bool(opts.get(b'confirm'))
1421 if confirm:
1424 if confirm:
1422 confirmed = _confirmbeforesend(repo, revs, oldmap)
1425 confirmed = _confirmbeforesend(repo, revs, oldmap)
1423 if not confirmed:
1426 if not confirmed:
1424 raise error.Abort(_(b'phabsend cancelled'))
1427 raise error.Abort(_(b'phabsend cancelled'))
1425
1428
1426 actions = []
1429 actions = []
1427 reviewers = opts.get(b'reviewer', [])
1430 reviewers = opts.get(b'reviewer', [])
1428 blockers = opts.get(b'blocker', [])
1431 blockers = opts.get(b'blocker', [])
1429 phids = []
1432 phids = []
1430 if reviewers:
1433 if reviewers:
1431 phids.extend(userphids(repo.ui, reviewers))
1434 phids.extend(userphids(repo.ui, reviewers))
1432 if blockers:
1435 if blockers:
1433 phids.extend(
1436 phids.extend(
1434 map(
1437 map(
1435 lambda phid: b'blocking(%s)' % phid,
1438 lambda phid: b'blocking(%s)' % phid,
1436 userphids(repo.ui, blockers),
1439 userphids(repo.ui, blockers),
1437 )
1440 )
1438 )
1441 )
1439 if phids:
1442 if phids:
1440 actions.append({b'type': b'reviewers.add', b'value': phids})
1443 actions.append({b'type': b'reviewers.add', b'value': phids})
1441
1444
1442 drevids = [] # [int]
1445 drevids = [] # [int]
1443 diffmap = {} # {newnode: diff}
1446 diffmap = {} # {newnode: diff}
1444
1447
1445 # Send patches one by one so we know their Differential Revision PHIDs and
1448 # Send patches one by one so we know their Differential Revision PHIDs and
1446 # can provide dependency relationship
1449 # can provide dependency relationship
1447 lastrevphid = None
1450 lastrevphid = None
1448 for ctx in ctxs:
1451 for ctx in ctxs:
1449 if fold:
1452 if fold:
1450 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1453 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1451 else:
1454 else:
1452 ui.debug(b'sending rev %d\n' % ctx.rev())
1455 ui.debug(b'sending rev %d\n' % ctx.rev())
1453
1456
1454 # Get Differential Revision ID
1457 # Get Differential Revision ID
1455 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1458 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1456 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1459 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1457
1460
1458 if fold:
1461 if fold:
1459 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1462 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1460 ctxs[-1].node(), (None, None, None)
1463 ctxs[-1].node(), (None, None, None)
1461 )
1464 )
1462
1465
1463 if oldnode != ctx.node() or opts.get(b'amend'):
1466 if oldnode != ctx.node() or opts.get(b'amend'):
1464 # Create or update Differential Revision
1467 # Create or update Differential Revision
1465 revision, diff = createdifferentialrevision(
1468 revision, diff = createdifferentialrevision(
1466 ctxs if fold else [ctx],
1469 ctxs if fold else [ctx],
1467 revid,
1470 revid,
1468 lastrevphid,
1471 lastrevphid,
1469 oldbasenode,
1472 oldbasenode,
1470 oldnode,
1473 oldnode,
1471 olddiff,
1474 olddiff,
1472 actions,
1475 actions,
1473 opts.get(b'comment'),
1476 opts.get(b'comment'),
1474 )
1477 )
1475
1478
1476 if fold:
1479 if fold:
1477 for ctx in ctxs:
1480 for ctx in ctxs:
1478 diffmap[ctx.node()] = diff
1481 diffmap[ctx.node()] = diff
1479 else:
1482 else:
1480 diffmap[ctx.node()] = diff
1483 diffmap[ctx.node()] = diff
1481
1484
1482 newrevid = int(revision[b'object'][b'id'])
1485 newrevid = int(revision[b'object'][b'id'])
1483 newrevphid = revision[b'object'][b'phid']
1486 newrevphid = revision[b'object'][b'phid']
1484 if revid:
1487 if revid:
1485 action = b'updated'
1488 action = b'updated'
1486 else:
1489 else:
1487 action = b'created'
1490 action = b'created'
1488
1491
1489 # Create a local tag to note the association, if commit message
1492 # Create a local tag to note the association, if commit message
1490 # does not have it already
1493 # does not have it already
1491 if not fold:
1494 if not fold:
1492 m = _differentialrevisiondescre.search(ctx.description())
1495 m = _differentialrevisiondescre.search(ctx.description())
1493 if not m or int(m.group('id')) != newrevid:
1496 if not m or int(m.group('id')) != newrevid:
1494 tagname = b'D%d' % newrevid
1497 tagname = b'D%d' % newrevid
1495 tags.tag(
1498 tags.tag(
1496 repo,
1499 repo,
1497 tagname,
1500 tagname,
1498 ctx.node(),
1501 ctx.node(),
1499 message=None,
1502 message=None,
1500 user=None,
1503 user=None,
1501 date=None,
1504 date=None,
1502 local=True,
1505 local=True,
1503 )
1506 )
1504 else:
1507 else:
1505 # Nothing changed. But still set "newrevphid" so the next revision
1508 # Nothing changed. But still set "newrevphid" so the next revision
1506 # could depend on this one and "newrevid" for the summary line.
1509 # could depend on this one and "newrevid" for the summary line.
1507 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1510 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1508 newrevid = revid
1511 newrevid = revid
1509 action = b'skipped'
1512 action = b'skipped'
1510
1513
1511 drevids.append(newrevid)
1514 drevids.append(newrevid)
1512 lastrevphid = newrevphid
1515 lastrevphid = newrevphid
1513
1516
1514 if fold:
1517 if fold:
1515 for c in ctxs:
1518 for c in ctxs:
1516 if oldmap.get(c.node(), (None, None, None))[2]:
1519 if oldmap.get(c.node(), (None, None, None))[2]:
1517 action = b'updated'
1520 action = b'updated'
1518 else:
1521 else:
1519 action = b'created'
1522 action = b'created'
1520 _print_phabsend_action(ui, c, newrevid, action)
1523 _print_phabsend_action(ui, c, newrevid, action)
1521 break
1524 break
1522
1525
1523 _print_phabsend_action(ui, ctx, newrevid, action)
1526 _print_phabsend_action(ui, ctx, newrevid, action)
1524
1527
1525 # Update commit messages and remove tags
1528 # Update commit messages and remove tags
1526 if opts.get(b'amend'):
1529 if opts.get(b'amend'):
1527 unfi = repo.unfiltered()
1530 unfi = repo.unfiltered()
1528 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1531 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1529 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1532 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1530 # Eagerly evaluate commits to restabilize before creating new
1533 # Eagerly evaluate commits to restabilize before creating new
1531 # commits. The selected revisions are excluded because they are
1534 # commits. The selected revisions are excluded because they are
1532 # automatically restacked as part of the submission process.
1535 # automatically restacked as part of the submission process.
1533 restack = [
1536 restack = [
1534 c
1537 c
1535 for c in repo.set(
1538 for c in repo.set(
1536 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1539 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1537 revs,
1540 revs,
1538 revs,
1541 revs,
1539 )
1542 )
1540 ]
1543 ]
1541 wnode = unfi[b'.'].node()
1544 wnode = unfi[b'.'].node()
1542 mapping = {} # {oldnode: [newnode]}
1545 mapping = {} # {oldnode: [newnode]}
1543 newnodes = []
1546 newnodes = []
1544
1547
1545 drevid = drevids[0]
1548 drevid = drevids[0]
1546
1549
1547 for i, rev in enumerate(revs):
1550 for i, rev in enumerate(revs):
1548 old = unfi[rev]
1551 old = unfi[rev]
1549 if not fold:
1552 if not fold:
1550 drevid = drevids[i]
1553 drevid = drevids[i]
1551 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1554 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1552
1555
1553 newdesc = get_amended_desc(drev, old, fold)
1556 newdesc = get_amended_desc(drev, old, fold)
1554 # Make sure commit message contain "Differential Revision"
1557 # Make sure commit message contain "Differential Revision"
1555 if (
1558 if (
1556 old.description() != newdesc
1559 old.description() != newdesc
1557 or old.p1().node() in mapping
1560 or old.p1().node() in mapping
1558 or old.p2().node() in mapping
1561 or old.p2().node() in mapping
1559 ):
1562 ):
1560 if old.phase() == phases.public:
1563 if old.phase() == phases.public:
1561 ui.warn(
1564 ui.warn(
1562 _(b"warning: not updating public commit %s\n")
1565 _(b"warning: not updating public commit %s\n")
1563 % scmutil.formatchangeid(old)
1566 % scmutil.formatchangeid(old)
1564 )
1567 )
1565 continue
1568 continue
1566 parents = [
1569 parents = [
1567 mapping.get(old.p1().node(), (old.p1(),))[0],
1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1568 mapping.get(old.p2().node(), (old.p2(),))[0],
1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1569 ]
1572 ]
1570 newdesc = rewriteutil.update_hash_refs(
1573 newdesc = rewriteutil.update_hash_refs(
1571 repo,
1574 repo,
1572 newdesc,
1575 newdesc,
1573 mapping,
1576 mapping,
1574 )
1577 )
1575 new = context.metadataonlyctx(
1578 new = context.metadataonlyctx(
1576 repo,
1579 repo,
1577 old,
1580 old,
1578 parents=parents,
1581 parents=parents,
1579 text=newdesc,
1582 text=newdesc,
1580 user=old.user(),
1583 user=old.user(),
1581 date=old.date(),
1584 date=old.date(),
1582 extra=old.extra(),
1585 extra=old.extra(),
1583 )
1586 )
1584
1587
1585 newnode = new.commit()
1588 newnode = new.commit()
1586
1589
1587 mapping[old.node()] = [newnode]
1590 mapping[old.node()] = [newnode]
1588
1591
1589 if fold:
1592 if fold:
1590 # Defer updating the (single) Diff until all nodes are
1593 # Defer updating the (single) Diff until all nodes are
1591 # collected. No tags were created, so none need to be
1594 # collected. No tags were created, so none need to be
1592 # removed.
1595 # removed.
1593 newnodes.append(newnode)
1596 newnodes.append(newnode)
1594 continue
1597 continue
1595
1598
1596 _amend_diff_properties(
1599 _amend_diff_properties(
1597 unfi, drevid, [newnode], diffmap[old.node()]
1600 unfi, drevid, [newnode], diffmap[old.node()]
1598 )
1601 )
1599
1602
1600 # Remove local tags since it's no longer necessary
1603 # Remove local tags since it's no longer necessary
1601 tagname = b'D%d' % drevid
1604 tagname = b'D%d' % drevid
1602 if tagname in repo.tags():
1605 if tagname in repo.tags():
1603 tags.tag(
1606 tags.tag(
1604 repo,
1607 repo,
1605 tagname,
1608 tagname,
1606 repo.nullid,
1609 repo.nullid,
1607 message=None,
1610 message=None,
1608 user=None,
1611 user=None,
1609 date=None,
1612 date=None,
1610 local=True,
1613 local=True,
1611 )
1614 )
1612 elif fold:
1615 elif fold:
1613 # When folding multiple commits into one review with
1616 # When folding multiple commits into one review with
1614 # --fold, track even the commits that weren't amended, so
1617 # --fold, track even the commits that weren't amended, so
1615 # that their association isn't lost if the properties are
1618 # that their association isn't lost if the properties are
1616 # rewritten below.
1619 # rewritten below.
1617 newnodes.append(old.node())
1620 newnodes.append(old.node())
1618
1621
1619 # If the submitted commits are public, no amend takes place so
1622 # If the submitted commits are public, no amend takes place so
1620 # there are no newnodes and therefore no diff update to do.
1623 # there are no newnodes and therefore no diff update to do.
1621 if fold and newnodes:
1624 if fold and newnodes:
1622 diff = diffmap[old.node()]
1625 diff = diffmap[old.node()]
1623
1626
1624 # The diff object in diffmap doesn't have the local commits
1627 # The diff object in diffmap doesn't have the local commits
1625 # because that could be returned from differential.creatediff,
1628 # because that could be returned from differential.creatediff,
1626 # not differential.querydiffs. So use the queried diff (if
1629 # not differential.querydiffs. So use the queried diff (if
1627 # present), or force the amend (a new revision is being posted.)
1630 # present), or force the amend (a new revision is being posted.)
1628 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1631 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1629 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1632 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1630 _amend_diff_properties(unfi, drevid, newnodes, diff)
1633 _amend_diff_properties(unfi, drevid, newnodes, diff)
1631 else:
1634 else:
1632 _debug(
1635 _debug(
1633 ui,
1636 ui,
1634 b"local commit list for D%d is already up-to-date\n"
1637 b"local commit list for D%d is already up-to-date\n"
1635 % drevid,
1638 % drevid,
1636 )
1639 )
1637 elif fold:
1640 elif fold:
1638 _debug(ui, b"no newnodes to update\n")
1641 _debug(ui, b"no newnodes to update\n")
1639
1642
1640 # Restack any children of first-time submissions that were orphaned
1643 # Restack any children of first-time submissions that were orphaned
1641 # in the process. The ctx won't report that it is an orphan until
1644 # in the process. The ctx won't report that it is an orphan until
1642 # the cleanup takes place below.
1645 # the cleanup takes place below.
1643 for old in restack:
1646 for old in restack:
1644 parents = [
1647 parents = [
1645 mapping.get(old.p1().node(), (old.p1(),))[0],
1648 mapping.get(old.p1().node(), (old.p1(),))[0],
1646 mapping.get(old.p2().node(), (old.p2(),))[0],
1649 mapping.get(old.p2().node(), (old.p2(),))[0],
1647 ]
1650 ]
1648 new = context.metadataonlyctx(
1651 new = context.metadataonlyctx(
1649 repo,
1652 repo,
1650 old,
1653 old,
1651 parents=parents,
1654 parents=parents,
1652 text=rewriteutil.update_hash_refs(
1655 text=rewriteutil.update_hash_refs(
1653 repo, old.description(), mapping
1656 repo, old.description(), mapping
1654 ),
1657 ),
1655 user=old.user(),
1658 user=old.user(),
1656 date=old.date(),
1659 date=old.date(),
1657 extra=old.extra(),
1660 extra=old.extra(),
1658 )
1661 )
1659
1662
1660 newnode = new.commit()
1663 newnode = new.commit()
1661
1664
1662 # Don't obsolete unselected descendants of nodes that have not
1665 # Don't obsolete unselected descendants of nodes that have not
1663 # been changed in this transaction- that results in an error.
1666 # been changed in this transaction- that results in an error.
1664 if newnode != old.node():
1667 if newnode != old.node():
1665 mapping[old.node()] = [newnode]
1668 mapping[old.node()] = [newnode]
1666 _debug(
1669 _debug(
1667 ui,
1670 ui,
1668 b"restabilizing %s as %s\n"
1671 b"restabilizing %s as %s\n"
1669 % (short(old.node()), short(newnode)),
1672 % (short(old.node()), short(newnode)),
1670 )
1673 )
1671 else:
1674 else:
1672 _debug(
1675 _debug(
1673 ui,
1676 ui,
1674 b"not restabilizing unchanged %s\n" % short(old.node()),
1677 b"not restabilizing unchanged %s\n" % short(old.node()),
1675 )
1678 )
1676
1679
1677 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1680 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1678 if wnode in mapping:
1681 if wnode in mapping:
1679 unfi.setparents(mapping[wnode][0])
1682 unfi.setparents(mapping[wnode][0])
1680
1683
1681
1684
1682 # Map from "hg:meta" keys to header understood by "hg import". The order is
1685 # Map from "hg:meta" keys to header understood by "hg import". The order is
1683 # consistent with "hg export" output.
1686 # consistent with "hg export" output.
1684 _metanamemap = util.sortdict(
1687 _metanamemap = util.sortdict(
1685 [
1688 [
1686 (b'user', b'User'),
1689 (b'user', b'User'),
1687 (b'date', b'Date'),
1690 (b'date', b'Date'),
1688 (b'branch', b'Branch'),
1691 (b'branch', b'Branch'),
1689 (b'node', b'Node ID'),
1692 (b'node', b'Node ID'),
1690 (b'parent', b'Parent '),
1693 (b'parent', b'Parent '),
1691 ]
1694 ]
1692 )
1695 )
1693
1696
1694
1697
1695 def _confirmbeforesend(repo, revs, oldmap):
1698 def _confirmbeforesend(repo, revs, oldmap):
1696 url, token = readurltoken(repo.ui)
1699 url, token = readurltoken(repo.ui)
1697 ui = repo.ui
1700 ui = repo.ui
1698 for rev in revs:
1701 for rev in revs:
1699 ctx = repo[rev]
1702 ctx = repo[rev]
1700 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1703 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1701 if drevid:
1704 if drevid:
1702 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1705 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1703 else:
1706 else:
1704 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1707 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1705
1708
1706 ui.write(
1709 ui.write(
1707 _(b'%s - %s\n')
1710 _(b'%s - %s\n')
1708 % (
1711 % (
1709 drevdesc,
1712 drevdesc,
1710 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1713 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1711 )
1714 )
1712 )
1715 )
1713
1716
1714 if ui.promptchoice(
1717 if ui.promptchoice(
1715 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1718 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1716 ):
1719 ):
1717 return False
1720 return False
1718
1721
1719 return True
1722 return True
1720
1723
1721
1724
1722 _knownstatusnames = {
1725 _knownstatusnames = {
1723 b'accepted',
1726 b'accepted',
1724 b'needsreview',
1727 b'needsreview',
1725 b'needsrevision',
1728 b'needsrevision',
1726 b'closed',
1729 b'closed',
1727 b'abandoned',
1730 b'abandoned',
1728 b'changesplanned',
1731 b'changesplanned',
1729 }
1732 }
1730
1733
1731
1734
1732 def _getstatusname(drev):
1735 def _getstatusname(drev):
1733 """get normalized status name from a Differential Revision"""
1736 """get normalized status name from a Differential Revision"""
1734 return drev[b'statusName'].replace(b' ', b'').lower()
1737 return drev[b'statusName'].replace(b' ', b'').lower()
1735
1738
1736
1739
1737 # Small language to specify differential revisions. Support symbols: (), :X,
1740 # Small language to specify differential revisions. Support symbols: (), :X,
1738 # +, and -.
1741 # +, and -.
1739
1742
1740 _elements = {
1743 _elements = {
1741 # token-type: binding-strength, primary, prefix, infix, suffix
1744 # token-type: binding-strength, primary, prefix, infix, suffix
1742 b'(': (12, None, (b'group', 1, b')'), None, None),
1745 b'(': (12, None, (b'group', 1, b')'), None, None),
1743 b':': (8, None, (b'ancestors', 8), None, None),
1746 b':': (8, None, (b'ancestors', 8), None, None),
1744 b'&': (5, None, None, (b'and_', 5), None),
1747 b'&': (5, None, None, (b'and_', 5), None),
1745 b'+': (4, None, None, (b'add', 4), None),
1748 b'+': (4, None, None, (b'add', 4), None),
1746 b'-': (4, None, None, (b'sub', 4), None),
1749 b'-': (4, None, None, (b'sub', 4), None),
1747 b')': (0, None, None, None, None),
1750 b')': (0, None, None, None, None),
1748 b'symbol': (0, b'symbol', None, None, None),
1751 b'symbol': (0, b'symbol', None, None, None),
1749 b'end': (0, None, None, None, None),
1752 b'end': (0, None, None, None, None),
1750 }
1753 }
1751
1754
1752
1755
1753 def _tokenize(text):
1756 def _tokenize(text):
1754 view = memoryview(text) # zero-copy slice
1757 view = memoryview(text) # zero-copy slice
1755 special = b'():+-& '
1758 special = b'():+-& '
1756 pos = 0
1759 pos = 0
1757 length = len(text)
1760 length = len(text)
1758 while pos < length:
1761 while pos < length:
1759 symbol = b''.join(
1762 symbol = b''.join(
1760 itertools.takewhile(
1763 itertools.takewhile(
1761 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1764 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1762 )
1765 )
1763 )
1766 )
1764 if symbol:
1767 if symbol:
1765 yield (b'symbol', symbol, pos)
1768 yield (b'symbol', symbol, pos)
1766 pos += len(symbol)
1769 pos += len(symbol)
1767 else: # special char, ignore space
1770 else: # special char, ignore space
1768 if text[pos : pos + 1] != b' ':
1771 if text[pos : pos + 1] != b' ':
1769 yield (text[pos : pos + 1], None, pos)
1772 yield (text[pos : pos + 1], None, pos)
1770 pos += 1
1773 pos += 1
1771 yield (b'end', None, pos)
1774 yield (b'end', None, pos)
1772
1775
1773
1776
1774 def _parse(text):
1777 def _parse(text):
1775 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1778 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1776 if pos != len(text):
1779 if pos != len(text):
1777 raise error.ParseError(b'invalid token', pos)
1780 raise error.ParseError(b'invalid token', pos)
1778 return tree
1781 return tree
1779
1782
1780
1783
1781 def _parsedrev(symbol):
1784 def _parsedrev(symbol):
1782 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1785 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1783 if symbol.startswith(b'D') and symbol[1:].isdigit():
1786 if symbol.startswith(b'D') and symbol[1:].isdigit():
1784 return int(symbol[1:])
1787 return int(symbol[1:])
1785 if symbol.isdigit():
1788 if symbol.isdigit():
1786 return int(symbol)
1789 return int(symbol)
1787
1790
1788
1791
1789 def _prefetchdrevs(tree):
1792 def _prefetchdrevs(tree):
1790 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1793 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1791 drevs = set()
1794 drevs = set()
1792 ancestordrevs = set()
1795 ancestordrevs = set()
1793 op = tree[0]
1796 op = tree[0]
1794 if op == b'symbol':
1797 if op == b'symbol':
1795 r = _parsedrev(tree[1])
1798 r = _parsedrev(tree[1])
1796 if r:
1799 if r:
1797 drevs.add(r)
1800 drevs.add(r)
1798 elif op == b'ancestors':
1801 elif op == b'ancestors':
1799 r, a = _prefetchdrevs(tree[1])
1802 r, a = _prefetchdrevs(tree[1])
1800 drevs.update(r)
1803 drevs.update(r)
1801 ancestordrevs.update(r)
1804 ancestordrevs.update(r)
1802 ancestordrevs.update(a)
1805 ancestordrevs.update(a)
1803 else:
1806 else:
1804 for t in tree[1:]:
1807 for t in tree[1:]:
1805 r, a = _prefetchdrevs(t)
1808 r, a = _prefetchdrevs(t)
1806 drevs.update(r)
1809 drevs.update(r)
1807 ancestordrevs.update(a)
1810 ancestordrevs.update(a)
1808 return drevs, ancestordrevs
1811 return drevs, ancestordrevs
1809
1812
1810
1813
1811 def querydrev(ui, spec):
1814 def querydrev(ui, spec):
1812 """return a list of "Differential Revision" dicts
1815 """return a list of "Differential Revision" dicts
1813
1816
1814 spec is a string using a simple query language, see docstring in phabread
1817 spec is a string using a simple query language, see docstring in phabread
1815 for details.
1818 for details.
1816
1819
1817 A "Differential Revision dict" looks like:
1820 A "Differential Revision dict" looks like:
1818
1821
1819 {
1822 {
1820 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1823 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1821 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1824 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1822 "auxiliary": {
1825 "auxiliary": {
1823 "phabricator:depends-on": [
1826 "phabricator:depends-on": [
1824 "PHID-DREV-gbapp366kutjebt7agcd"
1827 "PHID-DREV-gbapp366kutjebt7agcd"
1825 ]
1828 ]
1826 "phabricator:projects": [],
1829 "phabricator:projects": [],
1827 },
1830 },
1828 "branch": "default",
1831 "branch": "default",
1829 "ccs": [],
1832 "ccs": [],
1830 "commits": [],
1833 "commits": [],
1831 "dateCreated": "1499181406",
1834 "dateCreated": "1499181406",
1832 "dateModified": "1499182103",
1835 "dateModified": "1499182103",
1833 "diffs": [
1836 "diffs": [
1834 "3",
1837 "3",
1835 "4",
1838 "4",
1836 ],
1839 ],
1837 "hashes": [],
1840 "hashes": [],
1838 "id": "2",
1841 "id": "2",
1839 "lineCount": "2",
1842 "lineCount": "2",
1840 "phid": "PHID-DREV-672qvysjcczopag46qty",
1843 "phid": "PHID-DREV-672qvysjcczopag46qty",
1841 "properties": {},
1844 "properties": {},
1842 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1845 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1843 "reviewers": [],
1846 "reviewers": [],
1844 "sourcePath": null
1847 "sourcePath": null
1845 "status": "0",
1848 "status": "0",
1846 "statusName": "Needs Review",
1849 "statusName": "Needs Review",
1847 "summary": "",
1850 "summary": "",
1848 "testPlan": "",
1851 "testPlan": "",
1849 "title": "example",
1852 "title": "example",
1850 "uri": "https://phab.example.com/D2",
1853 "uri": "https://phab.example.com/D2",
1851 }
1854 }
1852 """
1855 """
1853 # TODO: replace differential.query and differential.querydiffs with
1856 # TODO: replace differential.query and differential.querydiffs with
1854 # differential.diff.search because the former (and their output) are
1857 # differential.diff.search because the former (and their output) are
1855 # frozen, and planned to be deprecated and removed.
1858 # frozen, and planned to be deprecated and removed.
1856
1859
1857 def fetch(params):
1860 def fetch(params):
1858 """params -> single drev or None"""
1861 """params -> single drev or None"""
1859 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1862 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1860 if key in prefetched:
1863 if key in prefetched:
1861 return prefetched[key]
1864 return prefetched[key]
1862 drevs = callconduit(ui, b'differential.query', params)
1865 drevs = callconduit(ui, b'differential.query', params)
1863 # Fill prefetched with the result
1866 # Fill prefetched with the result
1864 for drev in drevs:
1867 for drev in drevs:
1865 prefetched[drev[b'phid']] = drev
1868 prefetched[drev[b'phid']] = drev
1866 prefetched[int(drev[b'id'])] = drev
1869 prefetched[int(drev[b'id'])] = drev
1867 if key not in prefetched:
1870 if key not in prefetched:
1868 raise error.Abort(
1871 raise error.Abort(
1869 _(b'cannot get Differential Revision %r') % params
1872 _(b'cannot get Differential Revision %r') % params
1870 )
1873 )
1871 return prefetched[key]
1874 return prefetched[key]
1872
1875
1873 def getstack(topdrevids):
1876 def getstack(topdrevids):
1874 """given a top, get a stack from the bottom, [id] -> [id]"""
1877 """given a top, get a stack from the bottom, [id] -> [id]"""
1875 visited = set()
1878 visited = set()
1876 result = []
1879 result = []
1877 queue = [{b'ids': [i]} for i in topdrevids]
1880 queue = [{b'ids': [i]} for i in topdrevids]
1878 while queue:
1881 while queue:
1879 params = queue.pop()
1882 params = queue.pop()
1880 drev = fetch(params)
1883 drev = fetch(params)
1881 if drev[b'id'] in visited:
1884 if drev[b'id'] in visited:
1882 continue
1885 continue
1883 visited.add(drev[b'id'])
1886 visited.add(drev[b'id'])
1884 result.append(int(drev[b'id']))
1887 result.append(int(drev[b'id']))
1885 auxiliary = drev.get(b'auxiliary', {})
1888 auxiliary = drev.get(b'auxiliary', {})
1886 depends = auxiliary.get(b'phabricator:depends-on', [])
1889 depends = auxiliary.get(b'phabricator:depends-on', [])
1887 for phid in depends:
1890 for phid in depends:
1888 queue.append({b'phids': [phid]})
1891 queue.append({b'phids': [phid]})
1889 result.reverse()
1892 result.reverse()
1890 return smartset.baseset(result)
1893 return smartset.baseset(result)
1891
1894
1892 # Initialize prefetch cache
1895 # Initialize prefetch cache
1893 prefetched = {} # {id or phid: drev}
1896 prefetched = {} # {id or phid: drev}
1894
1897
1895 tree = _parse(spec)
1898 tree = _parse(spec)
1896 drevs, ancestordrevs = _prefetchdrevs(tree)
1899 drevs, ancestordrevs = _prefetchdrevs(tree)
1897
1900
1898 # developer config: phabricator.batchsize
1901 # developer config: phabricator.batchsize
1899 batchsize = ui.configint(b'phabricator', b'batchsize')
1902 batchsize = ui.configint(b'phabricator', b'batchsize')
1900
1903
1901 # Prefetch Differential Revisions in batch
1904 # Prefetch Differential Revisions in batch
1902 tofetch = set(drevs)
1905 tofetch = set(drevs)
1903 for r in ancestordrevs:
1906 for r in ancestordrevs:
1904 tofetch.update(range(max(1, r - batchsize), r + 1))
1907 tofetch.update(range(max(1, r - batchsize), r + 1))
1905 if drevs:
1908 if drevs:
1906 fetch({b'ids': list(tofetch)})
1909 fetch({b'ids': list(tofetch)})
1907 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1910 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1908
1911
1909 # Walk through the tree, return smartsets
1912 # Walk through the tree, return smartsets
1910 def walk(tree):
1913 def walk(tree):
1911 op = tree[0]
1914 op = tree[0]
1912 if op == b'symbol':
1915 if op == b'symbol':
1913 drev = _parsedrev(tree[1])
1916 drev = _parsedrev(tree[1])
1914 if drev:
1917 if drev:
1915 return smartset.baseset([drev])
1918 return smartset.baseset([drev])
1916 elif tree[1] in _knownstatusnames:
1919 elif tree[1] in _knownstatusnames:
1917 drevs = [
1920 drevs = [
1918 r
1921 r
1919 for r in validids
1922 for r in validids
1920 if _getstatusname(prefetched[r]) == tree[1]
1923 if _getstatusname(prefetched[r]) == tree[1]
1921 ]
1924 ]
1922 return smartset.baseset(drevs)
1925 return smartset.baseset(drevs)
1923 else:
1926 else:
1924 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1927 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1925 elif op in {b'and_', b'add', b'sub'}:
1928 elif op in {b'and_', b'add', b'sub'}:
1926 assert len(tree) == 3
1929 assert len(tree) == 3
1927 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1930 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1928 elif op == b'group':
1931 elif op == b'group':
1929 return walk(tree[1])
1932 return walk(tree[1])
1930 elif op == b'ancestors':
1933 elif op == b'ancestors':
1931 return getstack(walk(tree[1]))
1934 return getstack(walk(tree[1]))
1932 else:
1935 else:
1933 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1936 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1934
1937
1935 return [prefetched[r] for r in walk(tree)]
1938 return [prefetched[r] for r in walk(tree)]
1936
1939
1937
1940
1938 def getdescfromdrev(drev):
1941 def getdescfromdrev(drev):
1939 """get description (commit message) from "Differential Revision"
1942 """get description (commit message) from "Differential Revision"
1940
1943
1941 This is similar to differential.getcommitmessage API. But we only care
1944 This is similar to differential.getcommitmessage API. But we only care
1942 about limited fields: title, summary, test plan, and URL.
1945 about limited fields: title, summary, test plan, and URL.
1943 """
1946 """
1944 title = drev[b'title']
1947 title = drev[b'title']
1945 summary = drev[b'summary'].rstrip()
1948 summary = drev[b'summary'].rstrip()
1946 testplan = drev[b'testPlan'].rstrip()
1949 testplan = drev[b'testPlan'].rstrip()
1947 if testplan:
1950 if testplan:
1948 testplan = b'Test Plan:\n%s' % testplan
1951 testplan = b'Test Plan:\n%s' % testplan
1949 uri = b'Differential Revision: %s' % drev[b'uri']
1952 uri = b'Differential Revision: %s' % drev[b'uri']
1950 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1953 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1951
1954
1952
1955
1953 def get_amended_desc(drev, ctx, folded):
1956 def get_amended_desc(drev, ctx, folded):
1954 """similar to ``getdescfromdrev``, but supports a folded series of commits
1957 """similar to ``getdescfromdrev``, but supports a folded series of commits
1955
1958
1956 This is used when determining if an individual commit needs to have its
1959 This is used when determining if an individual commit needs to have its
1957 message amended after posting it for review. The determination is made for
1960 message amended after posting it for review. The determination is made for
1958 each individual commit, even when they were folded into one review.
1961 each individual commit, even when they were folded into one review.
1959 """
1962 """
1960 if not folded:
1963 if not folded:
1961 return getdescfromdrev(drev)
1964 return getdescfromdrev(drev)
1962
1965
1963 uri = b'Differential Revision: %s' % drev[b'uri']
1966 uri = b'Differential Revision: %s' % drev[b'uri']
1964
1967
1965 # Since the commit messages were combined when posting multiple commits
1968 # Since the commit messages were combined when posting multiple commits
1966 # with --fold, the fields can't be read from Phabricator here, or *all*
1969 # with --fold, the fields can't be read from Phabricator here, or *all*
1967 # affected local revisions will end up with the same commit message after
1970 # affected local revisions will end up with the same commit message after
1968 # the URI is amended in. Append in the DREV line, or update it if it
1971 # the URI is amended in. Append in the DREV line, or update it if it
1969 # exists. At worst, this means commit message or test plan updates on
1972 # exists. At worst, this means commit message or test plan updates on
1970 # Phabricator aren't propagated back to the repository, but that seems
1973 # Phabricator aren't propagated back to the repository, but that seems
1971 # reasonable for the case where local commits are effectively combined
1974 # reasonable for the case where local commits are effectively combined
1972 # in Phabricator.
1975 # in Phabricator.
1973 m = _differentialrevisiondescre.search(ctx.description())
1976 m = _differentialrevisiondescre.search(ctx.description())
1974 if not m:
1977 if not m:
1975 return b'\n\n'.join([ctx.description(), uri])
1978 return b'\n\n'.join([ctx.description(), uri])
1976
1979
1977 return _differentialrevisiondescre.sub(uri, ctx.description())
1980 return _differentialrevisiondescre.sub(uri, ctx.description())
1978
1981
1979
1982
1980 def getlocalcommits(diff):
1983 def getlocalcommits(diff):
1981 """get the set of local commits from a diff object
1984 """get the set of local commits from a diff object
1982
1985
1983 See ``getdiffmeta()`` for an example diff object.
1986 See ``getdiffmeta()`` for an example diff object.
1984 """
1987 """
1985 props = diff.get(b'properties') or {}
1988 props = diff.get(b'properties') or {}
1986 commits = props.get(b'local:commits') or {}
1989 commits = props.get(b'local:commits') or {}
1987 if len(commits) > 1:
1990 if len(commits) > 1:
1988 return {bin(c) for c in commits.keys()}
1991 return {bin(c) for c in commits.keys()}
1989
1992
1990 # Storing the diff metadata predates storing `local:commits`, so continue
1993 # Storing the diff metadata predates storing `local:commits`, so continue
1991 # to use that in the --no-fold case.
1994 # to use that in the --no-fold case.
1992 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1995 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1993
1996
1994
1997
1995 def getdiffmeta(diff):
1998 def getdiffmeta(diff):
1996 """get commit metadata (date, node, user, p1) from a diff object
1999 """get commit metadata (date, node, user, p1) from a diff object
1997
2000
1998 The metadata could be "hg:meta", sent by phabsend, like:
2001 The metadata could be "hg:meta", sent by phabsend, like:
1999
2002
2000 "properties": {
2003 "properties": {
2001 "hg:meta": {
2004 "hg:meta": {
2002 "branch": "default",
2005 "branch": "default",
2003 "date": "1499571514 25200",
2006 "date": "1499571514 25200",
2004 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2007 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2005 "user": "Foo Bar <foo@example.com>",
2008 "user": "Foo Bar <foo@example.com>",
2006 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2009 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2007 }
2010 }
2008 }
2011 }
2009
2012
2010 Or converted from "local:commits", sent by "arc", like:
2013 Or converted from "local:commits", sent by "arc", like:
2011
2014
2012 "properties": {
2015 "properties": {
2013 "local:commits": {
2016 "local:commits": {
2014 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2017 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2015 "author": "Foo Bar",
2018 "author": "Foo Bar",
2016 "authorEmail": "foo@example.com"
2019 "authorEmail": "foo@example.com"
2017 "branch": "default",
2020 "branch": "default",
2018 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2021 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2019 "local": "1000",
2022 "local": "1000",
2020 "message": "...",
2023 "message": "...",
2021 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2024 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2022 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2025 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2023 "summary": "...",
2026 "summary": "...",
2024 "tag": "",
2027 "tag": "",
2025 "time": 1499546314,
2028 "time": 1499546314,
2026 }
2029 }
2027 }
2030 }
2028 }
2031 }
2029
2032
2030 Note: metadata extracted from "local:commits" will lose time zone
2033 Note: metadata extracted from "local:commits" will lose time zone
2031 information.
2034 information.
2032 """
2035 """
2033 props = diff.get(b'properties') or {}
2036 props = diff.get(b'properties') or {}
2034 meta = props.get(b'hg:meta')
2037 meta = props.get(b'hg:meta')
2035 if not meta:
2038 if not meta:
2036 if props.get(b'local:commits'):
2039 if props.get(b'local:commits'):
2037 commit = sorted(props[b'local:commits'].values())[0]
2040 commit = sorted(props[b'local:commits'].values())[0]
2038 meta = {}
2041 meta = {}
2039 if b'author' in commit and b'authorEmail' in commit:
2042 if b'author' in commit and b'authorEmail' in commit:
2040 meta[b'user'] = b'%s <%s>' % (
2043 meta[b'user'] = b'%s <%s>' % (
2041 commit[b'author'],
2044 commit[b'author'],
2042 commit[b'authorEmail'],
2045 commit[b'authorEmail'],
2043 )
2046 )
2044 if b'time' in commit:
2047 if b'time' in commit:
2045 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2048 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2046 if b'branch' in commit:
2049 if b'branch' in commit:
2047 meta[b'branch'] = commit[b'branch']
2050 meta[b'branch'] = commit[b'branch']
2048 node = commit.get(b'commit', commit.get(b'rev'))
2051 node = commit.get(b'commit', commit.get(b'rev'))
2049 if node:
2052 if node:
2050 meta[b'node'] = node
2053 meta[b'node'] = node
2051 if len(commit.get(b'parents', ())) >= 1:
2054 if len(commit.get(b'parents', ())) >= 1:
2052 meta[b'parent'] = commit[b'parents'][0]
2055 meta[b'parent'] = commit[b'parents'][0]
2053 else:
2056 else:
2054 meta = {}
2057 meta = {}
2055 if b'date' not in meta and b'dateCreated' in diff:
2058 if b'date' not in meta and b'dateCreated' in diff:
2056 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2059 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2057 if b'branch' not in meta and diff.get(b'branch'):
2060 if b'branch' not in meta and diff.get(b'branch'):
2058 meta[b'branch'] = diff[b'branch']
2061 meta[b'branch'] = diff[b'branch']
2059 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2062 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2060 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2063 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2061 return meta
2064 return meta
2062
2065
2063
2066
2064 def _getdrevs(ui, stack, specs):
2067 def _getdrevs(ui, stack, specs):
2065 """convert user supplied DREVSPECs into "Differential Revision" dicts
2068 """convert user supplied DREVSPECs into "Differential Revision" dicts
2066
2069
2067 See ``hg help phabread`` for how to specify each DREVSPEC.
2070 See ``hg help phabread`` for how to specify each DREVSPEC.
2068 """
2071 """
2069 if len(specs) > 0:
2072 if len(specs) > 0:
2070
2073
2071 def _formatspec(s):
2074 def _formatspec(s):
2072 if stack:
2075 if stack:
2073 s = b':(%s)' % s
2076 s = b':(%s)' % s
2074 return b'(%s)' % s
2077 return b'(%s)' % s
2075
2078
2076 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2079 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2077
2080
2078 drevs = querydrev(ui, spec)
2081 drevs = querydrev(ui, spec)
2079 if drevs:
2082 if drevs:
2080 return drevs
2083 return drevs
2081
2084
2082 raise error.Abort(_(b"empty DREVSPEC set"))
2085 raise error.Abort(_(b"empty DREVSPEC set"))
2083
2086
2084
2087
2085 def readpatch(ui, drevs, write):
2088 def readpatch(ui, drevs, write):
2086 """generate plain-text patch readable by 'hg import'
2089 """generate plain-text patch readable by 'hg import'
2087
2090
2088 write takes a list of (DREV, bytes), where DREV is the differential number
2091 write takes a list of (DREV, bytes), where DREV is the differential number
2089 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2092 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2090 to be imported. drevs is what "querydrev" returns, results of
2093 to be imported. drevs is what "querydrev" returns, results of
2091 "differential.query".
2094 "differential.query".
2092 """
2095 """
2093 # Prefetch hg:meta property for all diffs
2096 # Prefetch hg:meta property for all diffs
2094 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2097 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2095 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2098 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2096
2099
2097 patches = []
2100 patches = []
2098
2101
2099 # Generate patch for each drev
2102 # Generate patch for each drev
2100 for drev in drevs:
2103 for drev in drevs:
2101 ui.note(_(b'reading D%s\n') % drev[b'id'])
2104 ui.note(_(b'reading D%s\n') % drev[b'id'])
2102
2105
2103 diffid = max(int(v) for v in drev[b'diffs'])
2106 diffid = max(int(v) for v in drev[b'diffs'])
2104 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2107 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2105 desc = getdescfromdrev(drev)
2108 desc = getdescfromdrev(drev)
2106 header = b'# HG changeset patch\n'
2109 header = b'# HG changeset patch\n'
2107
2110
2108 # Try to preserve metadata from hg:meta property. Write hg patch
2111 # Try to preserve metadata from hg:meta property. Write hg patch
2109 # headers that can be read by the "import" command. See patchheadermap
2112 # headers that can be read by the "import" command. See patchheadermap
2110 # and extract in mercurial/patch.py for supported headers.
2113 # and extract in mercurial/patch.py for supported headers.
2111 meta = getdiffmeta(diffs[b'%d' % diffid])
2114 meta = getdiffmeta(diffs[b'%d' % diffid])
2112 for k in _metanamemap.keys():
2115 for k in _metanamemap.keys():
2113 if k in meta:
2116 if k in meta:
2114 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2117 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2115
2118
2116 content = b'%s%s\n%s' % (header, desc, body)
2119 content = b'%s%s\n%s' % (header, desc, body)
2117 patches.append((drev[b'id'], content))
2120 patches.append((drev[b'id'], content))
2118
2121
2119 # Write patches to the supplied callback
2122 # Write patches to the supplied callback
2120 write(patches)
2123 write(patches)
2121
2124
2122
2125
2123 @vcrcommand(
2126 @vcrcommand(
2124 b'phabread',
2127 b'phabread',
2125 [(b'', b'stack', False, _(b'read dependencies'))],
2128 [(b'', b'stack', False, _(b'read dependencies'))],
2126 _(b'DREVSPEC... [OPTIONS]'),
2129 _(b'DREVSPEC... [OPTIONS]'),
2127 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2130 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2128 optionalrepo=True,
2131 optionalrepo=True,
2129 )
2132 )
2130 def phabread(ui, repo, *specs, **opts):
2133 def phabread(ui, repo, *specs, **opts):
2131 """print patches from Phabricator suitable for importing
2134 """print patches from Phabricator suitable for importing
2132
2135
2133 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2136 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2134 the number ``123``. It could also have common operators like ``+``, ``-``,
2137 the number ``123``. It could also have common operators like ``+``, ``-``,
2135 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2138 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2136 select a stack. If multiple DREVSPEC values are given, the result is the
2139 select a stack. If multiple DREVSPEC values are given, the result is the
2137 union of each individually evaluated value. No attempt is currently made
2140 union of each individually evaluated value. No attempt is currently made
2138 to reorder the values to run from parent to child.
2141 to reorder the values to run from parent to child.
2139
2142
2140 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2143 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2141 could be used to filter patches by status. For performance reason, they
2144 could be used to filter patches by status. For performance reason, they
2142 only represent a subset of non-status selections and cannot be used alone.
2145 only represent a subset of non-status selections and cannot be used alone.
2143
2146
2144 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2147 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2145 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2148 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2146 stack up to D9.
2149 stack up to D9.
2147
2150
2148 If --stack is given, follow dependencies information and read all patches.
2151 If --stack is given, follow dependencies information and read all patches.
2149 It is equivalent to the ``:`` operator.
2152 It is equivalent to the ``:`` operator.
2150 """
2153 """
2151 opts = pycompat.byteskwargs(opts)
2154 opts = pycompat.byteskwargs(opts)
2152 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2153
2156
2154 def _write(patches):
2157 def _write(patches):
2155 for drev, content in patches:
2158 for drev, content in patches:
2156 ui.write(content)
2159 ui.write(content)
2157
2160
2158 readpatch(ui, drevs, _write)
2161 readpatch(ui, drevs, _write)
2159
2162
2160
2163
2161 @vcrcommand(
2164 @vcrcommand(
2162 b'phabimport',
2165 b'phabimport',
2163 [(b'', b'stack', False, _(b'import dependencies as well'))],
2166 [(b'', b'stack', False, _(b'import dependencies as well'))],
2164 _(b'DREVSPEC... [OPTIONS]'),
2167 _(b'DREVSPEC... [OPTIONS]'),
2165 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2168 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2166 )
2169 )
2167 def phabimport(ui, repo, *specs, **opts):
2170 def phabimport(ui, repo, *specs, **opts):
2168 """import patches from Phabricator for the specified Differential Revisions
2171 """import patches from Phabricator for the specified Differential Revisions
2169
2172
2170 The patches are read and applied starting at the parent of the working
2173 The patches are read and applied starting at the parent of the working
2171 directory.
2174 directory.
2172
2175
2173 See ``hg help phabread`` for how to specify DREVSPEC.
2176 See ``hg help phabread`` for how to specify DREVSPEC.
2174 """
2177 """
2175 opts = pycompat.byteskwargs(opts)
2178 opts = pycompat.byteskwargs(opts)
2176
2179
2177 # --bypass avoids losing exec and symlink bits when importing on Windows,
2180 # --bypass avoids losing exec and symlink bits when importing on Windows,
2178 # and allows importing with a dirty wdir. It also aborts instead of leaving
2181 # and allows importing with a dirty wdir. It also aborts instead of leaving
2179 # rejects.
2182 # rejects.
2180 opts[b'bypass'] = True
2183 opts[b'bypass'] = True
2181
2184
2182 # Mandatory default values, synced with commands.import
2185 # Mandatory default values, synced with commands.import
2183 opts[b'strip'] = 1
2186 opts[b'strip'] = 1
2184 opts[b'prefix'] = b''
2187 opts[b'prefix'] = b''
2185 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2188 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2186 opts[b'obsolete'] = False
2189 opts[b'obsolete'] = False
2187
2190
2188 if ui.configbool(b'phabimport', b'secret'):
2191 if ui.configbool(b'phabimport', b'secret'):
2189 opts[b'secret'] = True
2192 opts[b'secret'] = True
2190 if ui.configbool(b'phabimport', b'obsolete'):
2193 if ui.configbool(b'phabimport', b'obsolete'):
2191 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2194 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2192
2195
2193 def _write(patches):
2196 def _write(patches):
2194 parents = repo[None].parents()
2197 parents = repo[None].parents()
2195
2198
2196 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2199 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2197 for drev, contents in patches:
2200 for drev, contents in patches:
2198 ui.status(_(b'applying patch from D%s\n') % drev)
2201 ui.status(_(b'applying patch from D%s\n') % drev)
2199
2202
2200 with patch.extract(ui, io.BytesIO(contents)) as patchdata:
2203 with patch.extract(ui, io.BytesIO(contents)) as patchdata:
2201 msg, node, rej = cmdutil.tryimportone(
2204 msg, node, rej = cmdutil.tryimportone(
2202 ui,
2205 ui,
2203 repo,
2206 repo,
2204 patchdata,
2207 patchdata,
2205 parents,
2208 parents,
2206 opts,
2209 opts,
2207 [],
2210 [],
2208 None, # Never update wdir to another revision
2211 None, # Never update wdir to another revision
2209 )
2212 )
2210
2213
2211 if not node:
2214 if not node:
2212 raise error.Abort(_(b'D%s: no diffs found') % drev)
2215 raise error.Abort(_(b'D%s: no diffs found') % drev)
2213
2216
2214 ui.note(msg + b'\n')
2217 ui.note(msg + b'\n')
2215 parents = [repo[node]]
2218 parents = [repo[node]]
2216
2219
2217 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2220 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2218
2221
2219 readpatch(repo.ui, drevs, _write)
2222 readpatch(repo.ui, drevs, _write)
2220
2223
2221
2224
2222 @vcrcommand(
2225 @vcrcommand(
2223 b'phabupdate',
2226 b'phabupdate',
2224 [
2227 [
2225 (b'', b'accept', False, _(b'accept revisions')),
2228 (b'', b'accept', False, _(b'accept revisions')),
2226 (b'', b'reject', False, _(b'reject revisions')),
2229 (b'', b'reject', False, _(b'reject revisions')),
2227 (b'', b'request-review', False, _(b'request review on revisions')),
2230 (b'', b'request-review', False, _(b'request review on revisions')),
2228 (b'', b'abandon', False, _(b'abandon revisions')),
2231 (b'', b'abandon', False, _(b'abandon revisions')),
2229 (b'', b'reclaim', False, _(b'reclaim revisions')),
2232 (b'', b'reclaim', False, _(b'reclaim revisions')),
2230 (b'', b'close', False, _(b'close revisions')),
2233 (b'', b'close', False, _(b'close revisions')),
2231 (b'', b'reopen', False, _(b'reopen revisions')),
2234 (b'', b'reopen', False, _(b'reopen revisions')),
2232 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2235 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2233 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2236 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2234 (b'', b'commandeer', False, _(b'commandeer revisions')),
2237 (b'', b'commandeer', False, _(b'commandeer revisions')),
2235 (b'm', b'comment', b'', _(b'comment on the last revision')),
2238 (b'm', b'comment', b'', _(b'comment on the last revision')),
2236 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2239 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2237 ],
2240 ],
2238 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2241 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2239 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2242 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2240 optionalrepo=True,
2243 optionalrepo=True,
2241 )
2244 )
2242 def phabupdate(ui, repo, *specs, **opts):
2245 def phabupdate(ui, repo, *specs, **opts):
2243 """update Differential Revision in batch
2246 """update Differential Revision in batch
2244
2247
2245 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2246 """
2249 """
2247 opts = pycompat.byteskwargs(opts)
2250 opts = pycompat.byteskwargs(opts)
2248 transactions = [
2251 transactions = [
2249 b'abandon',
2252 b'abandon',
2250 b'accept',
2253 b'accept',
2251 b'close',
2254 b'close',
2252 b'commandeer',
2255 b'commandeer',
2253 b'plan-changes',
2256 b'plan-changes',
2254 b'reclaim',
2257 b'reclaim',
2255 b'reject',
2258 b'reject',
2256 b'reopen',
2259 b'reopen',
2257 b'request-review',
2260 b'request-review',
2258 b'resign',
2261 b'resign',
2259 ]
2262 ]
2260 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2263 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2261 if len(flags) > 1:
2264 if len(flags) > 1:
2262 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2265 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2263
2266
2264 actions = []
2267 actions = []
2265 for f in flags:
2268 for f in flags:
2266 actions.append({b'type': f, b'value': True})
2269 actions.append({b'type': f, b'value': True})
2267
2270
2268 revs = opts.get(b'rev')
2271 revs = opts.get(b'rev')
2269 if revs:
2272 if revs:
2270 if not repo:
2273 if not repo:
2271 raise error.InputError(_(b'--rev requires a repository'))
2274 raise error.InputError(_(b'--rev requires a repository'))
2272
2275
2273 if specs:
2276 if specs:
2274 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2277 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2275
2278
2276 drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs]))
2279 drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs]))
2277 specs = []
2280 specs = []
2278 unknown = []
2281 unknown = []
2279 for r, d in drevmap.items():
2282 for r, d in drevmap.items():
2280 if d is None:
2283 if d is None:
2281 unknown.append(repo[r])
2284 unknown.append(repo[r])
2282 else:
2285 else:
2283 specs.append(b'D%d' % d)
2286 specs.append(b'D%d' % d)
2284 if unknown:
2287 if unknown:
2285 raise error.InputError(
2288 raise error.InputError(
2286 _(b'selected revisions without a Differential: %s')
2289 _(b'selected revisions without a Differential: %s')
2287 % scmutil.nodesummaries(repo, unknown)
2290 % scmutil.nodesummaries(repo, unknown)
2288 )
2291 )
2289
2292
2290 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2293 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2291 for i, drev in enumerate(drevs):
2294 for i, drev in enumerate(drevs):
2292 if i + 1 == len(drevs) and opts.get(b'comment'):
2295 if i + 1 == len(drevs) and opts.get(b'comment'):
2293 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2296 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2294 if actions:
2297 if actions:
2295 params = {
2298 params = {
2296 b'objectIdentifier': drev[b'phid'],
2299 b'objectIdentifier': drev[b'phid'],
2297 b'transactions': actions,
2300 b'transactions': actions,
2298 }
2301 }
2299 callconduit(ui, b'differential.revision.edit', params)
2302 callconduit(ui, b'differential.revision.edit', params)
2300
2303
2301
2304
2302 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2305 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2303 def template_review(context, mapping):
2306 def template_review(context, mapping):
2304 """:phabreview: Object describing the review for this changeset.
2307 """:phabreview: Object describing the review for this changeset.
2305 Has attributes `url` and `id`.
2308 Has attributes `url` and `id`.
2306 """
2309 """
2307 ctx = context.resource(mapping, b'ctx')
2310 ctx = context.resource(mapping, b'ctx')
2308 m = _differentialrevisiondescre.search(ctx.description())
2311 m = _differentialrevisiondescre.search(ctx.description())
2309 if m:
2312 if m:
2310 return templateutil.hybriddict(
2313 return templateutil.hybriddict(
2311 {
2314 {
2312 b'url': m.group('url'),
2315 b'url': m.group('url'),
2313 b'id': b"D%s" % m.group('id'),
2316 b'id': b"D%s" % m.group('id'),
2314 }
2317 }
2315 )
2318 )
2316 else:
2319 else:
2317 tags = ctx.repo().nodetags(ctx.node())
2320 tags = ctx.repo().nodetags(ctx.node())
2318 for t in tags:
2321 for t in tags:
2319 if _differentialrevisiontagre.match(t):
2322 if _differentialrevisiontagre.match(t):
2320 url = ctx.repo().ui.config(b'phabricator', b'url')
2323 url = ctx.repo().ui.config(b'phabricator', b'url')
2321 if not url.endswith(b'/'):
2324 if not url.endswith(b'/'):
2322 url += b'/'
2325 url += b'/'
2323 url += t
2326 url += t
2324
2327
2325 return templateutil.hybriddict(
2328 return templateutil.hybriddict(
2326 {
2329 {
2327 b'url': url,
2330 b'url': url,
2328 b'id': t,
2331 b'id': t,
2329 }
2332 }
2330 )
2333 )
2331 return None
2334 return None
2332
2335
2333
2336
2334 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2337 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2335 def template_status(context, mapping):
2338 def template_status(context, mapping):
2336 """:phabstatus: String. Status of Phabricator differential."""
2339 """:phabstatus: String. Status of Phabricator differential."""
2337 ctx = context.resource(mapping, b'ctx')
2340 ctx = context.resource(mapping, b'ctx')
2338 repo = context.resource(mapping, b'repo')
2341 repo = context.resource(mapping, b'repo')
2339 ui = context.resource(mapping, b'ui')
2342 ui = context.resource(mapping, b'ui')
2340
2343
2341 rev = ctx.rev()
2344 rev = ctx.rev()
2342 try:
2345 try:
2343 drevid = getdrevmap(repo, [rev])[rev]
2346 drevid = getdrevmap(repo, [rev])[rev]
2344 except KeyError:
2347 except KeyError:
2345 return None
2348 return None
2346 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2349 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2347 for drev in drevs:
2350 for drev in drevs:
2348 if int(drev[b'id']) == drevid:
2351 if int(drev[b'id']) == drevid:
2349 return templateutil.hybriddict(
2352 return templateutil.hybriddict(
2350 {
2353 {
2351 b'url': drev[b'uri'],
2354 b'url': drev[b'uri'],
2352 b'status': drev[b'statusName'],
2355 b'status': drev[b'statusName'],
2353 }
2356 }
2354 )
2357 )
2355 return None
2358 return None
2356
2359
2357
2360
2358 @show.showview(b'phabstatus', csettopic=b'work')
2361 @show.showview(b'phabstatus', csettopic=b'work')
2359 def phabstatusshowview(ui, repo, displayer):
2362 def phabstatusshowview(ui, repo, displayer):
2360 """Phabricator differiential status"""
2363 """Phabricator differiential status"""
2361 revs = repo.revs('sort(_underway(), topo)')
2364 revs = repo.revs('sort(_underway(), topo)')
2362 drevmap = getdrevmap(repo, revs)
2365 drevmap = getdrevmap(repo, revs)
2363 unknownrevs, drevids, revsbydrevid = [], set(), {}
2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2364 for rev, drevid in drevmap.items():
2367 for rev, drevid in drevmap.items():
2365 if drevid is not None:
2368 if drevid is not None:
2366 drevids.add(drevid)
2369 drevids.add(drevid)
2367 revsbydrevid.setdefault(drevid, set()).add(rev)
2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2368 else:
2371 else:
2369 unknownrevs.append(rev)
2372 unknownrevs.append(rev)
2370
2373
2371 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2374 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2372 drevsbyrev = {}
2375 drevsbyrev = {}
2373 for drev in drevs:
2376 for drev in drevs:
2374 for rev in revsbydrevid[int(drev[b'id'])]:
2377 for rev in revsbydrevid[int(drev[b'id'])]:
2375 drevsbyrev[rev] = drev
2378 drevsbyrev[rev] = drev
2376
2379
2377 def phabstatus(ctx):
2380 def phabstatus(ctx):
2378 drev = drevsbyrev[ctx.rev()]
2381 drev = drevsbyrev[ctx.rev()]
2379 status = ui.label(
2382 status = ui.label(
2380 b'%(statusName)s' % drev,
2383 b'%(statusName)s' % drev,
2381 b'phabricator.status.%s' % _getstatusname(drev),
2384 b'phabricator.status.%s' % _getstatusname(drev),
2382 )
2385 )
2383 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2386 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2384
2387
2385 revs -= smartset.baseset(unknownrevs)
2388 revs -= smartset.baseset(unknownrevs)
2386 revdag = graphmod.dagwalker(repo, revs)
2389 revdag = graphmod.dagwalker(repo, revs)
2387
2390
2388 ui.setconfig(b'experimental', b'graphshorten', True)
2391 ui.setconfig(b'experimental', b'graphshorten', True)
2389 displayer._exthook = phabstatus
2392 displayer._exthook = phabstatus
2390 nodelen = show.longestshortest(repo, revs)
2393 nodelen = show.longestshortest(repo, revs)
2391 logcmdutil.displaygraph(
2394 logcmdutil.displaygraph(
2392 ui,
2395 ui,
2393 repo,
2396 repo,
2394 revdag,
2397 revdag,
2395 displayer,
2398 displayer,
2396 graphmod.asciiedges,
2399 graphmod.asciiedges,
2397 props={b'nodelen': nodelen},
2400 props={b'nodelen': nodelen},
2398 )
2401 )
General Comments 0
You need to be logged in to leave comments. Login now