##// END OF EJS Templates
phabricator: adapt to the new `urlutil.url()` API...
Matt Harbison -
r47830:067f2c53 5.8 stable
parent child Browse files
Show More
@@ -1,2401 +1,2401 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 # retry failed command N time (default 0). Useful when using the extension
38 # retry failed command N time (default 0). Useful when using the extension
39 # over flakly connection.
39 # over flakly connection.
40 #
40 #
41 # We wait `retry.interval` between each retry, in seconds.
41 # We wait `retry.interval` between each retry, in seconds.
42 # (default 1 second).
42 # (default 1 second).
43 retry = 3
43 retry = 3
44 retry.interval = 10
44 retry.interval = 10
45
45
46 # the retry option can combine well with the http.timeout one.
46 # the retry option can combine well with the http.timeout one.
47 #
47 #
48 # For example to give up on http request after 20 seconds:
48 # For example to give up on http request after 20 seconds:
49 [http]
49 [http]
50 timeout=20
50 timeout=20
51
51
52 [auth]
52 [auth]
53 example.schemes = https
53 example.schemes = https
54 example.prefix = phab.example.com
54 example.prefix = phab.example.com
55
55
56 # API token. Get it from https://$HOST/conduit/login/
56 # API token. Get it from https://$HOST/conduit/login/
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
58 """
58 """
59
59
60 from __future__ import absolute_import
60 from __future__ import absolute_import
61
61
62 import base64
62 import base64
63 import contextlib
63 import contextlib
64 import hashlib
64 import hashlib
65 import itertools
65 import itertools
66 import json
66 import json
67 import mimetypes
67 import mimetypes
68 import operator
68 import operator
69 import re
69 import re
70 import time
70 import time
71
71
72 from mercurial.node import bin, nullid, short
72 from mercurial.node import bin, nullid, short
73 from mercurial.i18n import _
73 from mercurial.i18n import _
74 from mercurial.pycompat import getattr
74 from mercurial.pycompat import getattr
75 from mercurial.thirdparty import attr
75 from mercurial.thirdparty import attr
76 from mercurial import (
76 from mercurial import (
77 cmdutil,
77 cmdutil,
78 context,
78 context,
79 copies,
79 copies,
80 encoding,
80 encoding,
81 error,
81 error,
82 exthelper,
82 exthelper,
83 graphmod,
83 graphmod,
84 httpconnection as httpconnectionmod,
84 httpconnection as httpconnectionmod,
85 localrepo,
85 localrepo,
86 logcmdutil,
86 logcmdutil,
87 match,
87 match,
88 mdiff,
88 mdiff,
89 obsutil,
89 obsutil,
90 parser,
90 parser,
91 patch,
91 patch,
92 phases,
92 phases,
93 pycompat,
93 pycompat,
94 rewriteutil,
94 rewriteutil,
95 scmutil,
95 scmutil,
96 smartset,
96 smartset,
97 tags,
97 tags,
98 templatefilters,
98 templatefilters,
99 templateutil,
99 templateutil,
100 url as urlmod,
100 url as urlmod,
101 util,
101 util,
102 )
102 )
103 from mercurial.utils import (
103 from mercurial.utils import (
104 procutil,
104 procutil,
105 stringutil,
105 stringutil,
106 urlutil,
106 urlutil,
107 )
107 )
108 from . import show
108 from . import show
109
109
110
110
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 # be specifying the version(s) of Mercurial they are tested with, or
113 # be specifying the version(s) of Mercurial they are tested with, or
114 # leave the attribute unspecified.
114 # leave the attribute unspecified.
115 testedwith = b'ships-with-hg-core'
115 testedwith = b'ships-with-hg-core'
116
116
117 eh = exthelper.exthelper()
117 eh = exthelper.exthelper()
118
118
119 cmdtable = eh.cmdtable
119 cmdtable = eh.cmdtable
120 command = eh.command
120 command = eh.command
121 configtable = eh.configtable
121 configtable = eh.configtable
122 templatekeyword = eh.templatekeyword
122 templatekeyword = eh.templatekeyword
123 uisetup = eh.finaluisetup
123 uisetup = eh.finaluisetup
124
124
125 # developer config: phabricator.batchsize
125 # developer config: phabricator.batchsize
126 eh.configitem(
126 eh.configitem(
127 b'phabricator',
127 b'phabricator',
128 b'batchsize',
128 b'batchsize',
129 default=12,
129 default=12,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabricator',
132 b'phabricator',
133 b'callsign',
133 b'callsign',
134 default=None,
134 default=None,
135 )
135 )
136 eh.configitem(
136 eh.configitem(
137 b'phabricator',
137 b'phabricator',
138 b'curlcmd',
138 b'curlcmd',
139 default=None,
139 default=None,
140 )
140 )
141 # developer config: phabricator.debug
141 # developer config: phabricator.debug
142 eh.configitem(
142 eh.configitem(
143 b'phabricator',
143 b'phabricator',
144 b'debug',
144 b'debug',
145 default=False,
145 default=False,
146 )
146 )
147 # developer config: phabricator.repophid
147 # developer config: phabricator.repophid
148 eh.configitem(
148 eh.configitem(
149 b'phabricator',
149 b'phabricator',
150 b'repophid',
150 b'repophid',
151 default=None,
151 default=None,
152 )
152 )
153 eh.configitem(
153 eh.configitem(
154 b'phabricator',
154 b'phabricator',
155 b'retry',
155 b'retry',
156 default=0,
156 default=0,
157 )
157 )
158 eh.configitem(
158 eh.configitem(
159 b'phabricator',
159 b'phabricator',
160 b'retry.interval',
160 b'retry.interval',
161 default=1,
161 default=1,
162 )
162 )
163 eh.configitem(
163 eh.configitem(
164 b'phabricator',
164 b'phabricator',
165 b'url',
165 b'url',
166 default=None,
166 default=None,
167 )
167 )
168 eh.configitem(
168 eh.configitem(
169 b'phabsend',
169 b'phabsend',
170 b'confirm',
170 b'confirm',
171 default=False,
171 default=False,
172 )
172 )
173 eh.configitem(
173 eh.configitem(
174 b'phabimport',
174 b'phabimport',
175 b'secret',
175 b'secret',
176 default=False,
176 default=False,
177 )
177 )
178 eh.configitem(
178 eh.configitem(
179 b'phabimport',
179 b'phabimport',
180 b'obsolete',
180 b'obsolete',
181 default=False,
181 default=False,
182 )
182 )
183
183
184 colortable = {
184 colortable = {
185 b'phabricator.action.created': b'green',
185 b'phabricator.action.created': b'green',
186 b'phabricator.action.skipped': b'magenta',
186 b'phabricator.action.skipped': b'magenta',
187 b'phabricator.action.updated': b'magenta',
187 b'phabricator.action.updated': b'magenta',
188 b'phabricator.drev': b'bold',
188 b'phabricator.drev': b'bold',
189 b'phabricator.status.abandoned': b'magenta dim',
189 b'phabricator.status.abandoned': b'magenta dim',
190 b'phabricator.status.accepted': b'green bold',
190 b'phabricator.status.accepted': b'green bold',
191 b'phabricator.status.closed': b'green',
191 b'phabricator.status.closed': b'green',
192 b'phabricator.status.needsreview': b'yellow',
192 b'phabricator.status.needsreview': b'yellow',
193 b'phabricator.status.needsrevision': b'red',
193 b'phabricator.status.needsrevision': b'red',
194 b'phabricator.status.changesplanned': b'red',
194 b'phabricator.status.changesplanned': b'red',
195 }
195 }
196
196
197 _VCR_FLAGS = [
197 _VCR_FLAGS = [
198 (
198 (
199 b'',
199 b'',
200 b'test-vcr',
200 b'test-vcr',
201 b'',
201 b'',
202 _(
202 _(
203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
204 b', otherwise will mock all http requests using the specified vcr file.'
204 b', otherwise will mock all http requests using the specified vcr file.'
205 b' (ADVANCED)'
205 b' (ADVANCED)'
206 ),
206 ),
207 ),
207 ),
208 ]
208 ]
209
209
210
210
211 @eh.wrapfunction(localrepo, "loadhgrc")
211 @eh.wrapfunction(localrepo, "loadhgrc")
212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
213 """Load ``.arcconfig`` content into a ui instance on repository open."""
213 """Load ``.arcconfig`` content into a ui instance on repository open."""
214 result = False
214 result = False
215 arcconfig = {}
215 arcconfig = {}
216
216
217 try:
217 try:
218 # json.loads only accepts bytes from 3.6+
218 # json.loads only accepts bytes from 3.6+
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
220 # json.loads only returns unicode strings
220 # json.loads only returns unicode strings
221 arcconfig = pycompat.rapply(
221 arcconfig = pycompat.rapply(
222 lambda x: encoding.unitolocal(x)
222 lambda x: encoding.unitolocal(x)
223 if isinstance(x, pycompat.unicode)
223 if isinstance(x, pycompat.unicode)
224 else x,
224 else x,
225 pycompat.json_loads(rawparams),
225 pycompat.json_loads(rawparams),
226 )
226 )
227
227
228 result = True
228 result = True
229 except ValueError:
229 except ValueError:
230 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
230 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
231 except IOError:
231 except IOError:
232 pass
232 pass
233
233
234 cfg = util.sortdict()
234 cfg = util.sortdict()
235
235
236 if b"repository.callsign" in arcconfig:
236 if b"repository.callsign" in arcconfig:
237 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
237 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
238
238
239 if b"phabricator.uri" in arcconfig:
239 if b"phabricator.uri" in arcconfig:
240 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
240 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
241
241
242 if cfg:
242 if cfg:
243 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
243 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
244
244
245 return (
245 return (
246 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
246 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
247 ) # Load .hg/hgrc
247 ) # Load .hg/hgrc
248
248
249
249
250 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
250 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
251 fullflags = flags + _VCR_FLAGS
251 fullflags = flags + _VCR_FLAGS
252
252
253 def hgmatcher(r1, r2):
253 def hgmatcher(r1, r2):
254 if r1.uri != r2.uri or r1.method != r2.method:
254 if r1.uri != r2.uri or r1.method != r2.method:
255 return False
255 return False
256 r1params = util.urlreq.parseqs(r1.body)
256 r1params = util.urlreq.parseqs(r1.body)
257 r2params = util.urlreq.parseqs(r2.body)
257 r2params = util.urlreq.parseqs(r2.body)
258 for key in r1params:
258 for key in r1params:
259 if key not in r2params:
259 if key not in r2params:
260 return False
260 return False
261 value = r1params[key][0]
261 value = r1params[key][0]
262 # we want to compare json payloads without worrying about ordering
262 # we want to compare json payloads without worrying about ordering
263 if value.startswith(b'{') and value.endswith(b'}'):
263 if value.startswith(b'{') and value.endswith(b'}'):
264 r1json = pycompat.json_loads(value)
264 r1json = pycompat.json_loads(value)
265 r2json = pycompat.json_loads(r2params[key][0])
265 r2json = pycompat.json_loads(r2params[key][0])
266 if r1json != r2json:
266 if r1json != r2json:
267 return False
267 return False
268 elif r2params[key][0] != value:
268 elif r2params[key][0] != value:
269 return False
269 return False
270 return True
270 return True
271
271
272 def sanitiserequest(request):
272 def sanitiserequest(request):
273 request.body = re.sub(
273 request.body = re.sub(
274 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
274 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
275 )
275 )
276 return request
276 return request
277
277
278 def sanitiseresponse(response):
278 def sanitiseresponse(response):
279 if 'set-cookie' in response['headers']:
279 if 'set-cookie' in response['headers']:
280 del response['headers']['set-cookie']
280 del response['headers']['set-cookie']
281 return response
281 return response
282
282
283 def decorate(fn):
283 def decorate(fn):
284 def inner(*args, **kwargs):
284 def inner(*args, **kwargs):
285 vcr = kwargs.pop('test_vcr')
285 vcr = kwargs.pop('test_vcr')
286 if vcr:
286 if vcr:
287 cassette = pycompat.fsdecode(vcr)
287 cassette = pycompat.fsdecode(vcr)
288 import hgdemandimport
288 import hgdemandimport
289
289
290 with hgdemandimport.deactivated():
290 with hgdemandimport.deactivated():
291 import vcr as vcrmod
291 import vcr as vcrmod
292 import vcr.stubs as stubs
292 import vcr.stubs as stubs
293
293
294 vcr = vcrmod.VCR(
294 vcr = vcrmod.VCR(
295 serializer='json',
295 serializer='json',
296 before_record_request=sanitiserequest,
296 before_record_request=sanitiserequest,
297 before_record_response=sanitiseresponse,
297 before_record_response=sanitiseresponse,
298 custom_patches=[
298 custom_patches=[
299 (
299 (
300 urlmod,
300 urlmod,
301 'httpconnection',
301 'httpconnection',
302 stubs.VCRHTTPConnection,
302 stubs.VCRHTTPConnection,
303 ),
303 ),
304 (
304 (
305 urlmod,
305 urlmod,
306 'httpsconnection',
306 'httpsconnection',
307 stubs.VCRHTTPSConnection,
307 stubs.VCRHTTPSConnection,
308 ),
308 ),
309 ],
309 ],
310 )
310 )
311 vcr.register_matcher('hgmatcher', hgmatcher)
311 vcr.register_matcher('hgmatcher', hgmatcher)
312 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
312 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
313 return fn(*args, **kwargs)
313 return fn(*args, **kwargs)
314 return fn(*args, **kwargs)
314 return fn(*args, **kwargs)
315
315
316 cmd = util.checksignature(inner, depth=2)
316 cmd = util.checksignature(inner, depth=2)
317 cmd.__name__ = fn.__name__
317 cmd.__name__ = fn.__name__
318 cmd.__doc__ = fn.__doc__
318 cmd.__doc__ = fn.__doc__
319
319
320 return command(
320 return command(
321 name,
321 name,
322 fullflags,
322 fullflags,
323 spec,
323 spec,
324 helpcategory=helpcategory,
324 helpcategory=helpcategory,
325 optionalrepo=optionalrepo,
325 optionalrepo=optionalrepo,
326 )(cmd)
326 )(cmd)
327
327
328 return decorate
328 return decorate
329
329
330
330
331 def _debug(ui, *msg, **opts):
331 def _debug(ui, *msg, **opts):
332 """write debug output for Phabricator if ``phabricator.debug`` is set
332 """write debug output for Phabricator if ``phabricator.debug`` is set
333
333
334 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
334 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
335 printed with the --debug argument.
335 printed with the --debug argument.
336 """
336 """
337 if ui.configbool(b"phabricator", b"debug"):
337 if ui.configbool(b"phabricator", b"debug"):
338 flag = ui.debugflag
338 flag = ui.debugflag
339 try:
339 try:
340 ui.debugflag = True
340 ui.debugflag = True
341 ui.write(*msg, **opts)
341 ui.write(*msg, **opts)
342 finally:
342 finally:
343 ui.debugflag = flag
343 ui.debugflag = flag
344
344
345
345
346 def urlencodenested(params):
346 def urlencodenested(params):
347 """like urlencode, but works with nested parameters.
347 """like urlencode, but works with nested parameters.
348
348
349 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
349 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
350 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
350 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
351 urlencode. Note: the encoding is consistent with PHP's http_build_query.
351 urlencode. Note: the encoding is consistent with PHP's http_build_query.
352 """
352 """
353 flatparams = util.sortdict()
353 flatparams = util.sortdict()
354
354
355 def process(prefix, obj):
355 def process(prefix, obj):
356 if isinstance(obj, bool):
356 if isinstance(obj, bool):
357 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
357 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
358 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
358 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
359 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
359 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
360 if items is None:
360 if items is None:
361 flatparams[prefix] = obj
361 flatparams[prefix] = obj
362 else:
362 else:
363 for k, v in items(obj):
363 for k, v in items(obj):
364 if prefix:
364 if prefix:
365 process(b'%s[%s]' % (prefix, k), v)
365 process(b'%s[%s]' % (prefix, k), v)
366 else:
366 else:
367 process(k, v)
367 process(k, v)
368
368
369 process(b'', params)
369 process(b'', params)
370 return urlutil.urlreq.urlencode(flatparams)
370 return urlutil.urlreq.urlencode(flatparams)
371
371
372
372
373 def readurltoken(ui):
373 def readurltoken(ui):
374 """return conduit url, token and make sure they exist
374 """return conduit url, token and make sure they exist
375
375
376 Currently read from [auth] config section. In the future, it might
376 Currently read from [auth] config section. In the future, it might
377 make sense to read from .arcconfig and .arcrc as well.
377 make sense to read from .arcconfig and .arcrc as well.
378 """
378 """
379 url = ui.config(b'phabricator', b'url')
379 url = ui.config(b'phabricator', b'url')
380 if not url:
380 if not url:
381 raise error.Abort(
381 raise error.Abort(
382 _(b'config %s.%s is required') % (b'phabricator', b'url')
382 _(b'config %s.%s is required') % (b'phabricator', b'url')
383 )
383 )
384
384
385 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
385 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
386 token = None
386 token = None
387
387
388 if res:
388 if res:
389 group, auth = res
389 group, auth = res
390
390
391 ui.debug(b"using auth.%s.* for authentication\n" % group)
391 ui.debug(b"using auth.%s.* for authentication\n" % group)
392
392
393 token = auth.get(b'phabtoken')
393 token = auth.get(b'phabtoken')
394
394
395 if not token:
395 if not token:
396 raise error.Abort(
396 raise error.Abort(
397 _(b'Can\'t find conduit token associated to %s') % (url,)
397 _(b'Can\'t find conduit token associated to %s') % (url,)
398 )
398 )
399
399
400 return url, token
400 return url, token
401
401
402
402
403 def callconduit(ui, name, params):
403 def callconduit(ui, name, params):
404 """call Conduit API, params is a dict. return json.loads result, or None"""
404 """call Conduit API, params is a dict. return json.loads result, or None"""
405 host, token = readurltoken(ui)
405 host, token = readurltoken(ui)
406 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
406 url, authinfo = urlutil.url(b'/'.join([host, b'api', name])).authinfo()
407 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
407 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
408 params = params.copy()
408 params = params.copy()
409 params[b'__conduit__'] = {
409 params[b'__conduit__'] = {
410 b'token': token,
410 b'token': token,
411 }
411 }
412 rawdata = {
412 rawdata = {
413 b'params': templatefilters.json(params),
413 b'params': templatefilters.json(params),
414 b'output': b'json',
414 b'output': b'json',
415 b'__conduit__': 1,
415 b'__conduit__': 1,
416 }
416 }
417 data = urlencodenested(rawdata)
417 data = urlencodenested(rawdata)
418 curlcmd = ui.config(b'phabricator', b'curlcmd')
418 curlcmd = ui.config(b'phabricator', b'curlcmd')
419 if curlcmd:
419 if curlcmd:
420 sin, sout = procutil.popen2(
420 sin, sout = procutil.popen2(
421 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
421 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
422 )
422 )
423 sin.write(data)
423 sin.write(data)
424 sin.close()
424 sin.close()
425 body = sout.read()
425 body = sout.read()
426 else:
426 else:
427 urlopener = urlmod.opener(ui, authinfo)
427 urlopener = urlmod.opener(ui, authinfo)
428 request = util.urlreq.request(pycompat.strurl(url), data=data)
428 request = util.urlreq.request(pycompat.strurl(url), data=data)
429 max_try = ui.configint(b'phabricator', b'retry') + 1
429 max_try = ui.configint(b'phabricator', b'retry') + 1
430 timeout = ui.configwith(float, b'http', b'timeout')
430 timeout = ui.configwith(float, b'http', b'timeout')
431 for try_count in range(max_try):
431 for try_count in range(max_try):
432 try:
432 try:
433 with contextlib.closing(
433 with contextlib.closing(
434 urlopener.open(request, timeout=timeout)
434 urlopener.open(request, timeout=timeout)
435 ) as rsp:
435 ) as rsp:
436 body = rsp.read()
436 body = rsp.read()
437 break
437 break
438 except util.urlerr.urlerror as err:
438 except util.urlerr.urlerror as err:
439 if try_count == max_try - 1:
439 if try_count == max_try - 1:
440 raise
440 raise
441 ui.debug(
441 ui.debug(
442 b'Conduit Request failed (try %d/%d): %r\n'
442 b'Conduit Request failed (try %d/%d): %r\n'
443 % (try_count + 1, max_try, err)
443 % (try_count + 1, max_try, err)
444 )
444 )
445 # failing request might come from overloaded server
445 # failing request might come from overloaded server
446 retry_interval = ui.configint(b'phabricator', b'retry.interval')
446 retry_interval = ui.configint(b'phabricator', b'retry.interval')
447 time.sleep(retry_interval)
447 time.sleep(retry_interval)
448 ui.debug(b'Conduit Response: %s\n' % body)
448 ui.debug(b'Conduit Response: %s\n' % body)
449 parsed = pycompat.rapply(
449 parsed = pycompat.rapply(
450 lambda x: encoding.unitolocal(x)
450 lambda x: encoding.unitolocal(x)
451 if isinstance(x, pycompat.unicode)
451 if isinstance(x, pycompat.unicode)
452 else x,
452 else x,
453 # json.loads only accepts bytes from py3.6+
453 # json.loads only accepts bytes from py3.6+
454 pycompat.json_loads(encoding.unifromlocal(body)),
454 pycompat.json_loads(encoding.unifromlocal(body)),
455 )
455 )
456 if parsed.get(b'error_code'):
456 if parsed.get(b'error_code'):
457 msg = _(b'Conduit Error (%s): %s') % (
457 msg = _(b'Conduit Error (%s): %s') % (
458 parsed[b'error_code'],
458 parsed[b'error_code'],
459 parsed[b'error_info'],
459 parsed[b'error_info'],
460 )
460 )
461 raise error.Abort(msg)
461 raise error.Abort(msg)
462 return parsed[b'result']
462 return parsed[b'result']
463
463
464
464
465 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
465 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
466 def debugcallconduit(ui, repo, name):
466 def debugcallconduit(ui, repo, name):
467 """call Conduit API
467 """call Conduit API
468
468
469 Call parameters are read from stdin as a JSON blob. Result will be written
469 Call parameters are read from stdin as a JSON blob. Result will be written
470 to stdout as a JSON blob.
470 to stdout as a JSON blob.
471 """
471 """
472 # json.loads only accepts bytes from 3.6+
472 # json.loads only accepts bytes from 3.6+
473 rawparams = encoding.unifromlocal(ui.fin.read())
473 rawparams = encoding.unifromlocal(ui.fin.read())
474 # json.loads only returns unicode strings
474 # json.loads only returns unicode strings
475 params = pycompat.rapply(
475 params = pycompat.rapply(
476 lambda x: encoding.unitolocal(x)
476 lambda x: encoding.unitolocal(x)
477 if isinstance(x, pycompat.unicode)
477 if isinstance(x, pycompat.unicode)
478 else x,
478 else x,
479 pycompat.json_loads(rawparams),
479 pycompat.json_loads(rawparams),
480 )
480 )
481 # json.dumps only accepts unicode strings
481 # json.dumps only accepts unicode strings
482 result = pycompat.rapply(
482 result = pycompat.rapply(
483 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
483 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
484 callconduit(ui, name, params),
484 callconduit(ui, name, params),
485 )
485 )
486 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
486 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
487 ui.write(b'%s\n' % encoding.unitolocal(s))
487 ui.write(b'%s\n' % encoding.unitolocal(s))
488
488
489
489
490 def getrepophid(repo):
490 def getrepophid(repo):
491 """given callsign, return repository PHID or None"""
491 """given callsign, return repository PHID or None"""
492 # developer config: phabricator.repophid
492 # developer config: phabricator.repophid
493 repophid = repo.ui.config(b'phabricator', b'repophid')
493 repophid = repo.ui.config(b'phabricator', b'repophid')
494 if repophid:
494 if repophid:
495 return repophid
495 return repophid
496 callsign = repo.ui.config(b'phabricator', b'callsign')
496 callsign = repo.ui.config(b'phabricator', b'callsign')
497 if not callsign:
497 if not callsign:
498 return None
498 return None
499 query = callconduit(
499 query = callconduit(
500 repo.ui,
500 repo.ui,
501 b'diffusion.repository.search',
501 b'diffusion.repository.search',
502 {b'constraints': {b'callsigns': [callsign]}},
502 {b'constraints': {b'callsigns': [callsign]}},
503 )
503 )
504 if len(query[b'data']) == 0:
504 if len(query[b'data']) == 0:
505 return None
505 return None
506 repophid = query[b'data'][0][b'phid']
506 repophid = query[b'data'][0][b'phid']
507 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
507 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
508 return repophid
508 return repophid
509
509
510
510
511 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
511 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
512 _differentialrevisiondescre = re.compile(
512 _differentialrevisiondescre = re.compile(
513 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
513 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
514 )
514 )
515
515
516
516
517 def getoldnodedrevmap(repo, nodelist):
517 def getoldnodedrevmap(repo, nodelist):
518 """find previous nodes that has been sent to Phabricator
518 """find previous nodes that has been sent to Phabricator
519
519
520 return {node: (oldnode, Differential diff, Differential Revision ID)}
520 return {node: (oldnode, Differential diff, Differential Revision ID)}
521 for node in nodelist with known previous sent versions, or associated
521 for node in nodelist with known previous sent versions, or associated
522 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
522 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
523 be ``None``.
523 be ``None``.
524
524
525 Examines commit messages like "Differential Revision:" to get the
525 Examines commit messages like "Differential Revision:" to get the
526 association information.
526 association information.
527
527
528 If such commit message line is not found, examines all precursors and their
528 If such commit message line is not found, examines all precursors and their
529 tags. Tags with format like "D1234" are considered a match and the node
529 tags. Tags with format like "D1234" are considered a match and the node
530 with that tag, and the number after "D" (ex. 1234) will be returned.
530 with that tag, and the number after "D" (ex. 1234) will be returned.
531
531
532 The ``old node``, if not None, is guaranteed to be the last diff of
532 The ``old node``, if not None, is guaranteed to be the last diff of
533 corresponding Differential Revision, and exist in the repo.
533 corresponding Differential Revision, and exist in the repo.
534 """
534 """
535 unfi = repo.unfiltered()
535 unfi = repo.unfiltered()
536 has_node = unfi.changelog.index.has_node
536 has_node = unfi.changelog.index.has_node
537
537
538 result = {} # {node: (oldnode?, lastdiff?, drev)}
538 result = {} # {node: (oldnode?, lastdiff?, drev)}
539 # ordered for test stability when printing new -> old mapping below
539 # ordered for test stability when printing new -> old mapping below
540 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
540 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
541 for node in nodelist:
541 for node in nodelist:
542 ctx = unfi[node]
542 ctx = unfi[node]
543 # For tags like "D123", put them into "toconfirm" to verify later
543 # For tags like "D123", put them into "toconfirm" to verify later
544 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
544 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
545 for n in precnodes:
545 for n in precnodes:
546 if has_node(n):
546 if has_node(n):
547 for tag in unfi.nodetags(n):
547 for tag in unfi.nodetags(n):
548 m = _differentialrevisiontagre.match(tag)
548 m = _differentialrevisiontagre.match(tag)
549 if m:
549 if m:
550 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
550 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
551 break
551 break
552 else:
552 else:
553 continue # move to next predecessor
553 continue # move to next predecessor
554 break # found a tag, stop
554 break # found a tag, stop
555 else:
555 else:
556 # Check commit message
556 # Check commit message
557 m = _differentialrevisiondescre.search(ctx.description())
557 m = _differentialrevisiondescre.search(ctx.description())
558 if m:
558 if m:
559 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
559 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
560
560
561 # Double check if tags are genuine by collecting all old nodes from
561 # Double check if tags are genuine by collecting all old nodes from
562 # Phabricator, and expect precursors overlap with it.
562 # Phabricator, and expect precursors overlap with it.
563 if toconfirm:
563 if toconfirm:
564 drevs = [drev for force, precs, drev in toconfirm.values()]
564 drevs = [drev for force, precs, drev in toconfirm.values()]
565 alldiffs = callconduit(
565 alldiffs = callconduit(
566 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
566 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
567 )
567 )
568
568
569 def getnodes(d, precset):
569 def getnodes(d, precset):
570 # Ignore other nodes that were combined into the Differential
570 # Ignore other nodes that were combined into the Differential
571 # that aren't predecessors of the current local node.
571 # that aren't predecessors of the current local node.
572 return [n for n in getlocalcommits(d) if n in precset]
572 return [n for n in getlocalcommits(d) if n in precset]
573
573
574 for newnode, (force, precset, drev) in toconfirm.items():
574 for newnode, (force, precset, drev) in toconfirm.items():
575 diffs = [
575 diffs = [
576 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
576 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
577 ]
577 ]
578
578
579 # local predecessors known by Phabricator
579 # local predecessors known by Phabricator
580 phprecset = {n for d in diffs for n in getnodes(d, precset)}
580 phprecset = {n for d in diffs for n in getnodes(d, precset)}
581
581
582 # Ignore if precursors (Phabricator and local repo) do not overlap,
582 # Ignore if precursors (Phabricator and local repo) do not overlap,
583 # and force is not set (when commit message says nothing)
583 # and force is not set (when commit message says nothing)
584 if not force and not phprecset:
584 if not force and not phprecset:
585 tagname = b'D%d' % drev
585 tagname = b'D%d' % drev
586 tags.tag(
586 tags.tag(
587 repo,
587 repo,
588 tagname,
588 tagname,
589 nullid,
589 nullid,
590 message=None,
590 message=None,
591 user=None,
591 user=None,
592 date=None,
592 date=None,
593 local=True,
593 local=True,
594 )
594 )
595 unfi.ui.warn(
595 unfi.ui.warn(
596 _(
596 _(
597 b'D%d: local tag removed - does not match '
597 b'D%d: local tag removed - does not match '
598 b'Differential history\n'
598 b'Differential history\n'
599 )
599 )
600 % drev
600 % drev
601 )
601 )
602 continue
602 continue
603
603
604 # Find the last node using Phabricator metadata, and make sure it
604 # Find the last node using Phabricator metadata, and make sure it
605 # exists in the repo
605 # exists in the repo
606 oldnode = lastdiff = None
606 oldnode = lastdiff = None
607 if diffs:
607 if diffs:
608 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
608 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
609 oldnodes = getnodes(lastdiff, precset)
609 oldnodes = getnodes(lastdiff, precset)
610
610
611 _debug(
611 _debug(
612 unfi.ui,
612 unfi.ui,
613 b"%s mapped to old nodes %s\n"
613 b"%s mapped to old nodes %s\n"
614 % (
614 % (
615 short(newnode),
615 short(newnode),
616 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
616 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
617 ),
617 ),
618 )
618 )
619
619
620 # If this commit was the result of `hg fold` after submission,
620 # If this commit was the result of `hg fold` after submission,
621 # and now resubmitted with --fold, the easiest thing to do is
621 # and now resubmitted with --fold, the easiest thing to do is
622 # to leave the node clear. This only results in creating a new
622 # to leave the node clear. This only results in creating a new
623 # diff for the _same_ Differential Revision if this commit is
623 # diff for the _same_ Differential Revision if this commit is
624 # the first or last in the selected range. If we picked a node
624 # the first or last in the selected range. If we picked a node
625 # from the list instead, it would have to be the lowest if at
625 # from the list instead, it would have to be the lowest if at
626 # the beginning of the --fold range, or the highest at the end.
626 # the beginning of the --fold range, or the highest at the end.
627 # Otherwise, one or more of the nodes wouldn't be considered in
627 # Otherwise, one or more of the nodes wouldn't be considered in
628 # the diff, and the Differential wouldn't be properly updated.
628 # the diff, and the Differential wouldn't be properly updated.
629 # If this commit is the result of `hg split` in the same
629 # If this commit is the result of `hg split` in the same
630 # scenario, there is a single oldnode here (and multiple
630 # scenario, there is a single oldnode here (and multiple
631 # newnodes mapped to it). That makes it the same as the normal
631 # newnodes mapped to it). That makes it the same as the normal
632 # case, as the edges of the newnode range cleanly maps to one
632 # case, as the edges of the newnode range cleanly maps to one
633 # oldnode each.
633 # oldnode each.
634 if len(oldnodes) == 1:
634 if len(oldnodes) == 1:
635 oldnode = oldnodes[0]
635 oldnode = oldnodes[0]
636 if oldnode and not has_node(oldnode):
636 if oldnode and not has_node(oldnode):
637 oldnode = None
637 oldnode = None
638
638
639 result[newnode] = (oldnode, lastdiff, drev)
639 result[newnode] = (oldnode, lastdiff, drev)
640
640
641 return result
641 return result
642
642
643
643
644 def getdrevmap(repo, revs):
644 def getdrevmap(repo, revs):
645 """Return a dict mapping each rev in `revs` to their Differential Revision
645 """Return a dict mapping each rev in `revs` to their Differential Revision
646 ID or None.
646 ID or None.
647 """
647 """
648 result = {}
648 result = {}
649 for rev in revs:
649 for rev in revs:
650 result[rev] = None
650 result[rev] = None
651 ctx = repo[rev]
651 ctx = repo[rev]
652 # Check commit message
652 # Check commit message
653 m = _differentialrevisiondescre.search(ctx.description())
653 m = _differentialrevisiondescre.search(ctx.description())
654 if m:
654 if m:
655 result[rev] = int(m.group('id'))
655 result[rev] = int(m.group('id'))
656 continue
656 continue
657 # Check tags
657 # Check tags
658 for tag in repo.nodetags(ctx.node()):
658 for tag in repo.nodetags(ctx.node()):
659 m = _differentialrevisiontagre.match(tag)
659 m = _differentialrevisiontagre.match(tag)
660 if m:
660 if m:
661 result[rev] = int(m.group(1))
661 result[rev] = int(m.group(1))
662 break
662 break
663
663
664 return result
664 return result
665
665
666
666
667 def getdiff(basectx, ctx, diffopts):
667 def getdiff(basectx, ctx, diffopts):
668 """plain-text diff without header (user, commit message, etc)"""
668 """plain-text diff without header (user, commit message, etc)"""
669 output = util.stringio()
669 output = util.stringio()
670 for chunk, _label in patch.diffui(
670 for chunk, _label in patch.diffui(
671 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
671 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
672 ):
672 ):
673 output.write(chunk)
673 output.write(chunk)
674 return output.getvalue()
674 return output.getvalue()
675
675
676
676
677 class DiffChangeType(object):
677 class DiffChangeType(object):
678 ADD = 1
678 ADD = 1
679 CHANGE = 2
679 CHANGE = 2
680 DELETE = 3
680 DELETE = 3
681 MOVE_AWAY = 4
681 MOVE_AWAY = 4
682 COPY_AWAY = 5
682 COPY_AWAY = 5
683 MOVE_HERE = 6
683 MOVE_HERE = 6
684 COPY_HERE = 7
684 COPY_HERE = 7
685 MULTICOPY = 8
685 MULTICOPY = 8
686
686
687
687
688 class DiffFileType(object):
688 class DiffFileType(object):
689 TEXT = 1
689 TEXT = 1
690 IMAGE = 2
690 IMAGE = 2
691 BINARY = 3
691 BINARY = 3
692
692
693
693
694 @attr.s
694 @attr.s
695 class phabhunk(dict):
695 class phabhunk(dict):
696 """Represents a Differential hunk, which is owned by a Differential change"""
696 """Represents a Differential hunk, which is owned by a Differential change"""
697
697
698 oldOffset = attr.ib(default=0) # camelcase-required
698 oldOffset = attr.ib(default=0) # camelcase-required
699 oldLength = attr.ib(default=0) # camelcase-required
699 oldLength = attr.ib(default=0) # camelcase-required
700 newOffset = attr.ib(default=0) # camelcase-required
700 newOffset = attr.ib(default=0) # camelcase-required
701 newLength = attr.ib(default=0) # camelcase-required
701 newLength = attr.ib(default=0) # camelcase-required
702 corpus = attr.ib(default='')
702 corpus = attr.ib(default='')
703 # These get added to the phabchange's equivalents
703 # These get added to the phabchange's equivalents
704 addLines = attr.ib(default=0) # camelcase-required
704 addLines = attr.ib(default=0) # camelcase-required
705 delLines = attr.ib(default=0) # camelcase-required
705 delLines = attr.ib(default=0) # camelcase-required
706
706
707
707
708 @attr.s
708 @attr.s
709 class phabchange(object):
709 class phabchange(object):
710 """Represents a Differential change, owns Differential hunks and owned by a
710 """Represents a Differential change, owns Differential hunks and owned by a
711 Differential diff. Each one represents one file in a diff.
711 Differential diff. Each one represents one file in a diff.
712 """
712 """
713
713
714 currentPath = attr.ib(default=None) # camelcase-required
714 currentPath = attr.ib(default=None) # camelcase-required
715 oldPath = attr.ib(default=None) # camelcase-required
715 oldPath = attr.ib(default=None) # camelcase-required
716 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
716 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
717 metadata = attr.ib(default=attr.Factory(dict))
717 metadata = attr.ib(default=attr.Factory(dict))
718 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
718 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
720 type = attr.ib(default=DiffChangeType.CHANGE)
720 type = attr.ib(default=DiffChangeType.CHANGE)
721 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
721 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
722 commitHash = attr.ib(default=None) # camelcase-required
722 commitHash = attr.ib(default=None) # camelcase-required
723 addLines = attr.ib(default=0) # camelcase-required
723 addLines = attr.ib(default=0) # camelcase-required
724 delLines = attr.ib(default=0) # camelcase-required
724 delLines = attr.ib(default=0) # camelcase-required
725 hunks = attr.ib(default=attr.Factory(list))
725 hunks = attr.ib(default=attr.Factory(list))
726
726
727 def copynewmetadatatoold(self):
727 def copynewmetadatatoold(self):
728 for key in list(self.metadata.keys()):
728 for key in list(self.metadata.keys()):
729 newkey = key.replace(b'new:', b'old:')
729 newkey = key.replace(b'new:', b'old:')
730 self.metadata[newkey] = self.metadata[key]
730 self.metadata[newkey] = self.metadata[key]
731
731
732 def addoldmode(self, value):
732 def addoldmode(self, value):
733 self.oldProperties[b'unix:filemode'] = value
733 self.oldProperties[b'unix:filemode'] = value
734
734
735 def addnewmode(self, value):
735 def addnewmode(self, value):
736 self.newProperties[b'unix:filemode'] = value
736 self.newProperties[b'unix:filemode'] = value
737
737
738 def addhunk(self, hunk):
738 def addhunk(self, hunk):
739 if not isinstance(hunk, phabhunk):
739 if not isinstance(hunk, phabhunk):
740 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
740 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
741 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
741 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
742 # It's useful to include these stats since the Phab web UI shows them,
742 # It's useful to include these stats since the Phab web UI shows them,
743 # and uses them to estimate how large a change a Revision is. Also used
743 # and uses them to estimate how large a change a Revision is. Also used
744 # in email subjects for the [+++--] bit.
744 # in email subjects for the [+++--] bit.
745 self.addLines += hunk.addLines
745 self.addLines += hunk.addLines
746 self.delLines += hunk.delLines
746 self.delLines += hunk.delLines
747
747
748
748
749 @attr.s
749 @attr.s
750 class phabdiff(object):
750 class phabdiff(object):
751 """Represents a Differential diff, owns Differential changes. Corresponds
751 """Represents a Differential diff, owns Differential changes. Corresponds
752 to a commit.
752 to a commit.
753 """
753 """
754
754
755 # Doesn't seem to be any reason to send this (output of uname -n)
755 # Doesn't seem to be any reason to send this (output of uname -n)
756 sourceMachine = attr.ib(default=b'') # camelcase-required
756 sourceMachine = attr.ib(default=b'') # camelcase-required
757 sourcePath = attr.ib(default=b'/') # camelcase-required
757 sourcePath = attr.ib(default=b'/') # camelcase-required
758 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
758 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
759 sourceControlPath = attr.ib(default=b'/') # camelcase-required
759 sourceControlPath = attr.ib(default=b'/') # camelcase-required
760 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
760 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
761 branch = attr.ib(default=b'default')
761 branch = attr.ib(default=b'default')
762 bookmark = attr.ib(default=None)
762 bookmark = attr.ib(default=None)
763 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
763 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
764 lintStatus = attr.ib(default=b'none') # camelcase-required
764 lintStatus = attr.ib(default=b'none') # camelcase-required
765 unitStatus = attr.ib(default=b'none') # camelcase-required
765 unitStatus = attr.ib(default=b'none') # camelcase-required
766 changes = attr.ib(default=attr.Factory(dict))
766 changes = attr.ib(default=attr.Factory(dict))
767 repositoryPHID = attr.ib(default=None) # camelcase-required
767 repositoryPHID = attr.ib(default=None) # camelcase-required
768
768
769 def addchange(self, change):
769 def addchange(self, change):
770 if not isinstance(change, phabchange):
770 if not isinstance(change, phabchange):
771 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
771 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
772 self.changes[change.currentPath] = pycompat.byteskwargs(
772 self.changes[change.currentPath] = pycompat.byteskwargs(
773 attr.asdict(change)
773 attr.asdict(change)
774 )
774 )
775
775
776
776
777 def maketext(pchange, basectx, ctx, fname):
777 def maketext(pchange, basectx, ctx, fname):
778 """populate the phabchange for a text file"""
778 """populate the phabchange for a text file"""
779 repo = ctx.repo()
779 repo = ctx.repo()
780 fmatcher = match.exact([fname])
780 fmatcher = match.exact([fname])
781 diffopts = mdiff.diffopts(git=True, context=32767)
781 diffopts = mdiff.diffopts(git=True, context=32767)
782 _pfctx, _fctx, header, fhunks = next(
782 _pfctx, _fctx, header, fhunks = next(
783 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
783 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
784 )
784 )
785
785
786 for fhunk in fhunks:
786 for fhunk in fhunks:
787 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
787 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
788 corpus = b''.join(lines[1:])
788 corpus = b''.join(lines[1:])
789 shunk = list(header)
789 shunk = list(header)
790 shunk.extend(lines)
790 shunk.extend(lines)
791 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
791 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
792 patch.diffstatdata(util.iterlines(shunk))
792 patch.diffstatdata(util.iterlines(shunk))
793 )
793 )
794 pchange.addhunk(
794 pchange.addhunk(
795 phabhunk(
795 phabhunk(
796 oldOffset,
796 oldOffset,
797 oldLength,
797 oldLength,
798 newOffset,
798 newOffset,
799 newLength,
799 newLength,
800 corpus,
800 corpus,
801 addLines,
801 addLines,
802 delLines,
802 delLines,
803 )
803 )
804 )
804 )
805
805
806
806
807 def uploadchunks(fctx, fphid):
807 def uploadchunks(fctx, fphid):
808 """upload large binary files as separate chunks.
808 """upload large binary files as separate chunks.
809 Phab requests chunking over 8MiB, and splits into 4MiB chunks
809 Phab requests chunking over 8MiB, and splits into 4MiB chunks
810 """
810 """
811 ui = fctx.repo().ui
811 ui = fctx.repo().ui
812 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
812 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
813 with ui.makeprogress(
813 with ui.makeprogress(
814 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
814 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
815 ) as progress:
815 ) as progress:
816 for chunk in chunks:
816 for chunk in chunks:
817 progress.increment()
817 progress.increment()
818 if chunk[b'complete']:
818 if chunk[b'complete']:
819 continue
819 continue
820 bstart = int(chunk[b'byteStart'])
820 bstart = int(chunk[b'byteStart'])
821 bend = int(chunk[b'byteEnd'])
821 bend = int(chunk[b'byteEnd'])
822 callconduit(
822 callconduit(
823 ui,
823 ui,
824 b'file.uploadchunk',
824 b'file.uploadchunk',
825 {
825 {
826 b'filePHID': fphid,
826 b'filePHID': fphid,
827 b'byteStart': bstart,
827 b'byteStart': bstart,
828 b'data': base64.b64encode(fctx.data()[bstart:bend]),
828 b'data': base64.b64encode(fctx.data()[bstart:bend]),
829 b'dataEncoding': b'base64',
829 b'dataEncoding': b'base64',
830 },
830 },
831 )
831 )
832
832
833
833
834 def uploadfile(fctx):
834 def uploadfile(fctx):
835 """upload binary files to Phabricator"""
835 """upload binary files to Phabricator"""
836 repo = fctx.repo()
836 repo = fctx.repo()
837 ui = repo.ui
837 ui = repo.ui
838 fname = fctx.path()
838 fname = fctx.path()
839 size = fctx.size()
839 size = fctx.size()
840 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
840 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
841
841
842 # an allocate call is required first to see if an upload is even required
842 # an allocate call is required first to see if an upload is even required
843 # (Phab might already have it) and to determine if chunking is needed
843 # (Phab might already have it) and to determine if chunking is needed
844 allocateparams = {
844 allocateparams = {
845 b'name': fname,
845 b'name': fname,
846 b'contentLength': size,
846 b'contentLength': size,
847 b'contentHash': fhash,
847 b'contentHash': fhash,
848 }
848 }
849 filealloc = callconduit(ui, b'file.allocate', allocateparams)
849 filealloc = callconduit(ui, b'file.allocate', allocateparams)
850 fphid = filealloc[b'filePHID']
850 fphid = filealloc[b'filePHID']
851
851
852 if filealloc[b'upload']:
852 if filealloc[b'upload']:
853 ui.write(_(b'uploading %s\n') % bytes(fctx))
853 ui.write(_(b'uploading %s\n') % bytes(fctx))
854 if not fphid:
854 if not fphid:
855 uploadparams = {
855 uploadparams = {
856 b'name': fname,
856 b'name': fname,
857 b'data_base64': base64.b64encode(fctx.data()),
857 b'data_base64': base64.b64encode(fctx.data()),
858 }
858 }
859 fphid = callconduit(ui, b'file.upload', uploadparams)
859 fphid = callconduit(ui, b'file.upload', uploadparams)
860 else:
860 else:
861 uploadchunks(fctx, fphid)
861 uploadchunks(fctx, fphid)
862 else:
862 else:
863 ui.debug(b'server already has %s\n' % bytes(fctx))
863 ui.debug(b'server already has %s\n' % bytes(fctx))
864
864
865 if not fphid:
865 if not fphid:
866 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
866 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
867
867
868 return fphid
868 return fphid
869
869
870
870
871 def addoldbinary(pchange, oldfctx, fctx):
871 def addoldbinary(pchange, oldfctx, fctx):
872 """add the metadata for the previous version of a binary file to the
872 """add the metadata for the previous version of a binary file to the
873 phabchange for the new version
873 phabchange for the new version
874
874
875 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
875 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
876 version of the file, or None if the file is being removed.
876 version of the file, or None if the file is being removed.
877 """
877 """
878 if not fctx or fctx.cmp(oldfctx):
878 if not fctx or fctx.cmp(oldfctx):
879 # Files differ, add the old one
879 # Files differ, add the old one
880 pchange.metadata[b'old:file:size'] = oldfctx.size()
880 pchange.metadata[b'old:file:size'] = oldfctx.size()
881 mimeguess, _enc = mimetypes.guess_type(
881 mimeguess, _enc = mimetypes.guess_type(
882 encoding.unifromlocal(oldfctx.path())
882 encoding.unifromlocal(oldfctx.path())
883 )
883 )
884 if mimeguess:
884 if mimeguess:
885 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
885 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
886 mimeguess
886 mimeguess
887 )
887 )
888 fphid = uploadfile(oldfctx)
888 fphid = uploadfile(oldfctx)
889 pchange.metadata[b'old:binary-phid'] = fphid
889 pchange.metadata[b'old:binary-phid'] = fphid
890 else:
890 else:
891 # If it's left as IMAGE/BINARY web UI might try to display it
891 # If it's left as IMAGE/BINARY web UI might try to display it
892 pchange.fileType = DiffFileType.TEXT
892 pchange.fileType = DiffFileType.TEXT
893 pchange.copynewmetadatatoold()
893 pchange.copynewmetadatatoold()
894
894
895
895
896 def makebinary(pchange, fctx):
896 def makebinary(pchange, fctx):
897 """populate the phabchange for a binary file"""
897 """populate the phabchange for a binary file"""
898 pchange.fileType = DiffFileType.BINARY
898 pchange.fileType = DiffFileType.BINARY
899 fphid = uploadfile(fctx)
899 fphid = uploadfile(fctx)
900 pchange.metadata[b'new:binary-phid'] = fphid
900 pchange.metadata[b'new:binary-phid'] = fphid
901 pchange.metadata[b'new:file:size'] = fctx.size()
901 pchange.metadata[b'new:file:size'] = fctx.size()
902 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
902 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
903 if mimeguess:
903 if mimeguess:
904 mimeguess = pycompat.bytestr(mimeguess)
904 mimeguess = pycompat.bytestr(mimeguess)
905 pchange.metadata[b'new:file:mime-type'] = mimeguess
905 pchange.metadata[b'new:file:mime-type'] = mimeguess
906 if mimeguess.startswith(b'image/'):
906 if mimeguess.startswith(b'image/'):
907 pchange.fileType = DiffFileType.IMAGE
907 pchange.fileType = DiffFileType.IMAGE
908
908
909
909
910 # Copied from mercurial/patch.py
910 # Copied from mercurial/patch.py
911 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
911 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
912
912
913
913
914 def notutf8(fctx):
914 def notutf8(fctx):
915 """detect non-UTF-8 text files since Phabricator requires them to be marked
915 """detect non-UTF-8 text files since Phabricator requires them to be marked
916 as binary
916 as binary
917 """
917 """
918 try:
918 try:
919 fctx.data().decode('utf-8')
919 fctx.data().decode('utf-8')
920 return False
920 return False
921 except UnicodeDecodeError:
921 except UnicodeDecodeError:
922 fctx.repo().ui.write(
922 fctx.repo().ui.write(
923 _(b'file %s detected as non-UTF-8, marked as binary\n')
923 _(b'file %s detected as non-UTF-8, marked as binary\n')
924 % fctx.path()
924 % fctx.path()
925 )
925 )
926 return True
926 return True
927
927
928
928
929 def addremoved(pdiff, basectx, ctx, removed):
929 def addremoved(pdiff, basectx, ctx, removed):
930 """add removed files to the phabdiff. Shouldn't include moves"""
930 """add removed files to the phabdiff. Shouldn't include moves"""
931 for fname in removed:
931 for fname in removed:
932 pchange = phabchange(
932 pchange = phabchange(
933 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
933 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
934 )
934 )
935 oldfctx = basectx.p1()[fname]
935 oldfctx = basectx.p1()[fname]
936 pchange.addoldmode(gitmode[oldfctx.flags()])
936 pchange.addoldmode(gitmode[oldfctx.flags()])
937 if not (oldfctx.isbinary() or notutf8(oldfctx)):
937 if not (oldfctx.isbinary() or notutf8(oldfctx)):
938 maketext(pchange, basectx, ctx, fname)
938 maketext(pchange, basectx, ctx, fname)
939
939
940 pdiff.addchange(pchange)
940 pdiff.addchange(pchange)
941
941
942
942
943 def addmodified(pdiff, basectx, ctx, modified):
943 def addmodified(pdiff, basectx, ctx, modified):
944 """add modified files to the phabdiff"""
944 """add modified files to the phabdiff"""
945 for fname in modified:
945 for fname in modified:
946 fctx = ctx[fname]
946 fctx = ctx[fname]
947 oldfctx = basectx.p1()[fname]
947 oldfctx = basectx.p1()[fname]
948 pchange = phabchange(currentPath=fname, oldPath=fname)
948 pchange = phabchange(currentPath=fname, oldPath=fname)
949 filemode = gitmode[fctx.flags()]
949 filemode = gitmode[fctx.flags()]
950 originalmode = gitmode[oldfctx.flags()]
950 originalmode = gitmode[oldfctx.flags()]
951 if filemode != originalmode:
951 if filemode != originalmode:
952 pchange.addoldmode(originalmode)
952 pchange.addoldmode(originalmode)
953 pchange.addnewmode(filemode)
953 pchange.addnewmode(filemode)
954
954
955 if (
955 if (
956 fctx.isbinary()
956 fctx.isbinary()
957 or notutf8(fctx)
957 or notutf8(fctx)
958 or oldfctx.isbinary()
958 or oldfctx.isbinary()
959 or notutf8(oldfctx)
959 or notutf8(oldfctx)
960 ):
960 ):
961 makebinary(pchange, fctx)
961 makebinary(pchange, fctx)
962 addoldbinary(pchange, oldfctx, fctx)
962 addoldbinary(pchange, oldfctx, fctx)
963 else:
963 else:
964 maketext(pchange, basectx, ctx, fname)
964 maketext(pchange, basectx, ctx, fname)
965
965
966 pdiff.addchange(pchange)
966 pdiff.addchange(pchange)
967
967
968
968
969 def addadded(pdiff, basectx, ctx, added, removed):
969 def addadded(pdiff, basectx, ctx, added, removed):
970 """add file adds to the phabdiff, both new files and copies/moves"""
970 """add file adds to the phabdiff, both new files and copies/moves"""
971 # Keep track of files that've been recorded as moved/copied, so if there are
971 # Keep track of files that've been recorded as moved/copied, so if there are
972 # additional copies we can mark them (moves get removed from removed)
972 # additional copies we can mark them (moves get removed from removed)
973 copiedchanges = {}
973 copiedchanges = {}
974 movedchanges = {}
974 movedchanges = {}
975
975
976 copy = {}
976 copy = {}
977 if basectx != ctx:
977 if basectx != ctx:
978 copy = copies.pathcopies(basectx.p1(), ctx)
978 copy = copies.pathcopies(basectx.p1(), ctx)
979
979
980 for fname in added:
980 for fname in added:
981 fctx = ctx[fname]
981 fctx = ctx[fname]
982 oldfctx = None
982 oldfctx = None
983 pchange = phabchange(currentPath=fname)
983 pchange = phabchange(currentPath=fname)
984
984
985 filemode = gitmode[fctx.flags()]
985 filemode = gitmode[fctx.flags()]
986
986
987 if copy:
987 if copy:
988 originalfname = copy.get(fname, fname)
988 originalfname = copy.get(fname, fname)
989 else:
989 else:
990 originalfname = fname
990 originalfname = fname
991 if fctx.renamed():
991 if fctx.renamed():
992 originalfname = fctx.renamed()[0]
992 originalfname = fctx.renamed()[0]
993
993
994 renamed = fname != originalfname
994 renamed = fname != originalfname
995
995
996 if renamed:
996 if renamed:
997 oldfctx = basectx.p1()[originalfname]
997 oldfctx = basectx.p1()[originalfname]
998 originalmode = gitmode[oldfctx.flags()]
998 originalmode = gitmode[oldfctx.flags()]
999 pchange.oldPath = originalfname
999 pchange.oldPath = originalfname
1000
1000
1001 if originalfname in removed:
1001 if originalfname in removed:
1002 origpchange = phabchange(
1002 origpchange = phabchange(
1003 currentPath=originalfname,
1003 currentPath=originalfname,
1004 oldPath=originalfname,
1004 oldPath=originalfname,
1005 type=DiffChangeType.MOVE_AWAY,
1005 type=DiffChangeType.MOVE_AWAY,
1006 awayPaths=[fname],
1006 awayPaths=[fname],
1007 )
1007 )
1008 movedchanges[originalfname] = origpchange
1008 movedchanges[originalfname] = origpchange
1009 removed.remove(originalfname)
1009 removed.remove(originalfname)
1010 pchange.type = DiffChangeType.MOVE_HERE
1010 pchange.type = DiffChangeType.MOVE_HERE
1011 elif originalfname in movedchanges:
1011 elif originalfname in movedchanges:
1012 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1012 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1013 movedchanges[originalfname].awayPaths.append(fname)
1013 movedchanges[originalfname].awayPaths.append(fname)
1014 pchange.type = DiffChangeType.COPY_HERE
1014 pchange.type = DiffChangeType.COPY_HERE
1015 else: # pure copy
1015 else: # pure copy
1016 if originalfname not in copiedchanges:
1016 if originalfname not in copiedchanges:
1017 origpchange = phabchange(
1017 origpchange = phabchange(
1018 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1018 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1019 )
1019 )
1020 copiedchanges[originalfname] = origpchange
1020 copiedchanges[originalfname] = origpchange
1021 else:
1021 else:
1022 origpchange = copiedchanges[originalfname]
1022 origpchange = copiedchanges[originalfname]
1023 origpchange.awayPaths.append(fname)
1023 origpchange.awayPaths.append(fname)
1024 pchange.type = DiffChangeType.COPY_HERE
1024 pchange.type = DiffChangeType.COPY_HERE
1025
1025
1026 if filemode != originalmode:
1026 if filemode != originalmode:
1027 pchange.addoldmode(originalmode)
1027 pchange.addoldmode(originalmode)
1028 pchange.addnewmode(filemode)
1028 pchange.addnewmode(filemode)
1029 else: # Brand-new file
1029 else: # Brand-new file
1030 pchange.addnewmode(gitmode[fctx.flags()])
1030 pchange.addnewmode(gitmode[fctx.flags()])
1031 pchange.type = DiffChangeType.ADD
1031 pchange.type = DiffChangeType.ADD
1032
1032
1033 if (
1033 if (
1034 fctx.isbinary()
1034 fctx.isbinary()
1035 or notutf8(fctx)
1035 or notutf8(fctx)
1036 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1036 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1037 ):
1037 ):
1038 makebinary(pchange, fctx)
1038 makebinary(pchange, fctx)
1039 if renamed:
1039 if renamed:
1040 addoldbinary(pchange, oldfctx, fctx)
1040 addoldbinary(pchange, oldfctx, fctx)
1041 else:
1041 else:
1042 maketext(pchange, basectx, ctx, fname)
1042 maketext(pchange, basectx, ctx, fname)
1043
1043
1044 pdiff.addchange(pchange)
1044 pdiff.addchange(pchange)
1045
1045
1046 for _path, copiedchange in copiedchanges.items():
1046 for _path, copiedchange in copiedchanges.items():
1047 pdiff.addchange(copiedchange)
1047 pdiff.addchange(copiedchange)
1048 for _path, movedchange in movedchanges.items():
1048 for _path, movedchange in movedchanges.items():
1049 pdiff.addchange(movedchange)
1049 pdiff.addchange(movedchange)
1050
1050
1051
1051
1052 def creatediff(basectx, ctx):
1052 def creatediff(basectx, ctx):
1053 """create a Differential Diff"""
1053 """create a Differential Diff"""
1054 repo = ctx.repo()
1054 repo = ctx.repo()
1055 repophid = getrepophid(repo)
1055 repophid = getrepophid(repo)
1056 # Create a "Differential Diff" via "differential.creatediff" API
1056 # Create a "Differential Diff" via "differential.creatediff" API
1057 pdiff = phabdiff(
1057 pdiff = phabdiff(
1058 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1058 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1059 branch=b'%s' % ctx.branch(),
1059 branch=b'%s' % ctx.branch(),
1060 )
1060 )
1061 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1061 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1062 # addadded will remove moved files from removed, so addremoved won't get
1062 # addadded will remove moved files from removed, so addremoved won't get
1063 # them
1063 # them
1064 addadded(pdiff, basectx, ctx, added, removed)
1064 addadded(pdiff, basectx, ctx, added, removed)
1065 addmodified(pdiff, basectx, ctx, modified)
1065 addmodified(pdiff, basectx, ctx, modified)
1066 addremoved(pdiff, basectx, ctx, removed)
1066 addremoved(pdiff, basectx, ctx, removed)
1067 if repophid:
1067 if repophid:
1068 pdiff.repositoryPHID = repophid
1068 pdiff.repositoryPHID = repophid
1069 diff = callconduit(
1069 diff = callconduit(
1070 repo.ui,
1070 repo.ui,
1071 b'differential.creatediff',
1071 b'differential.creatediff',
1072 pycompat.byteskwargs(attr.asdict(pdiff)),
1072 pycompat.byteskwargs(attr.asdict(pdiff)),
1073 )
1073 )
1074 if not diff:
1074 if not diff:
1075 if basectx != ctx:
1075 if basectx != ctx:
1076 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1076 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1077 else:
1077 else:
1078 msg = _(b'cannot create diff for %s') % ctx
1078 msg = _(b'cannot create diff for %s') % ctx
1079 raise error.Abort(msg)
1079 raise error.Abort(msg)
1080 return diff
1080 return diff
1081
1081
1082
1082
1083 def writediffproperties(ctxs, diff):
1083 def writediffproperties(ctxs, diff):
1084 """write metadata to diff so patches could be applied losslessly
1084 """write metadata to diff so patches could be applied losslessly
1085
1085
1086 ``ctxs`` is the list of commits that created the diff, in ascending order.
1086 ``ctxs`` is the list of commits that created the diff, in ascending order.
1087 The list is generally a single commit, but may be several when using
1087 The list is generally a single commit, but may be several when using
1088 ``phabsend --fold``.
1088 ``phabsend --fold``.
1089 """
1089 """
1090 # creatediff returns with a diffid but query returns with an id
1090 # creatediff returns with a diffid but query returns with an id
1091 diffid = diff.get(b'diffid', diff.get(b'id'))
1091 diffid = diff.get(b'diffid', diff.get(b'id'))
1092 basectx = ctxs[0]
1092 basectx = ctxs[0]
1093 tipctx = ctxs[-1]
1093 tipctx = ctxs[-1]
1094
1094
1095 params = {
1095 params = {
1096 b'diff_id': diffid,
1096 b'diff_id': diffid,
1097 b'name': b'hg:meta',
1097 b'name': b'hg:meta',
1098 b'data': templatefilters.json(
1098 b'data': templatefilters.json(
1099 {
1099 {
1100 b'user': tipctx.user(),
1100 b'user': tipctx.user(),
1101 b'date': b'%d %d' % tipctx.date(),
1101 b'date': b'%d %d' % tipctx.date(),
1102 b'branch': tipctx.branch(),
1102 b'branch': tipctx.branch(),
1103 b'node': tipctx.hex(),
1103 b'node': tipctx.hex(),
1104 b'parent': basectx.p1().hex(),
1104 b'parent': basectx.p1().hex(),
1105 }
1105 }
1106 ),
1106 ),
1107 }
1107 }
1108 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1108 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1109
1109
1110 commits = {}
1110 commits = {}
1111 for ctx in ctxs:
1111 for ctx in ctxs:
1112 commits[ctx.hex()] = {
1112 commits[ctx.hex()] = {
1113 b'author': stringutil.person(ctx.user()),
1113 b'author': stringutil.person(ctx.user()),
1114 b'authorEmail': stringutil.email(ctx.user()),
1114 b'authorEmail': stringutil.email(ctx.user()),
1115 b'time': int(ctx.date()[0]),
1115 b'time': int(ctx.date()[0]),
1116 b'commit': ctx.hex(),
1116 b'commit': ctx.hex(),
1117 b'parents': [ctx.p1().hex()],
1117 b'parents': [ctx.p1().hex()],
1118 b'branch': ctx.branch(),
1118 b'branch': ctx.branch(),
1119 }
1119 }
1120 params = {
1120 params = {
1121 b'diff_id': diffid,
1121 b'diff_id': diffid,
1122 b'name': b'local:commits',
1122 b'name': b'local:commits',
1123 b'data': templatefilters.json(commits),
1123 b'data': templatefilters.json(commits),
1124 }
1124 }
1125 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1125 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1126
1126
1127
1127
1128 def createdifferentialrevision(
1128 def createdifferentialrevision(
1129 ctxs,
1129 ctxs,
1130 revid=None,
1130 revid=None,
1131 parentrevphid=None,
1131 parentrevphid=None,
1132 oldbasenode=None,
1132 oldbasenode=None,
1133 oldnode=None,
1133 oldnode=None,
1134 olddiff=None,
1134 olddiff=None,
1135 actions=None,
1135 actions=None,
1136 comment=None,
1136 comment=None,
1137 ):
1137 ):
1138 """create or update a Differential Revision
1138 """create or update a Differential Revision
1139
1139
1140 If revid is None, create a new Differential Revision, otherwise update
1140 If revid is None, create a new Differential Revision, otherwise update
1141 revid. If parentrevphid is not None, set it as a dependency.
1141 revid. If parentrevphid is not None, set it as a dependency.
1142
1142
1143 If there is a single commit for the new Differential Revision, ``ctxs`` will
1143 If there is a single commit for the new Differential Revision, ``ctxs`` will
1144 be a list of that single context. Otherwise, it is a list that covers the
1144 be a list of that single context. Otherwise, it is a list that covers the
1145 range of changes for the differential, where ``ctxs[0]`` is the first change
1145 range of changes for the differential, where ``ctxs[0]`` is the first change
1146 to include and ``ctxs[-1]`` is the last.
1146 to include and ``ctxs[-1]`` is the last.
1147
1147
1148 If oldnode is not None, check if the patch content (without commit message
1148 If oldnode is not None, check if the patch content (without commit message
1149 and metadata) has changed before creating another diff. For a Revision with
1149 and metadata) has changed before creating another diff. For a Revision with
1150 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1150 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1151 Revision covering multiple commits, ``oldbasenode`` corresponds to
1151 Revision covering multiple commits, ``oldbasenode`` corresponds to
1152 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1152 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1153 corresponds to ``ctxs[-1]``.
1153 corresponds to ``ctxs[-1]``.
1154
1154
1155 If actions is not None, they will be appended to the transaction.
1155 If actions is not None, they will be appended to the transaction.
1156 """
1156 """
1157 ctx = ctxs[-1]
1157 ctx = ctxs[-1]
1158 basectx = ctxs[0]
1158 basectx = ctxs[0]
1159
1159
1160 repo = ctx.repo()
1160 repo = ctx.repo()
1161 if oldnode:
1161 if oldnode:
1162 diffopts = mdiff.diffopts(git=True, context=32767)
1162 diffopts = mdiff.diffopts(git=True, context=32767)
1163 unfi = repo.unfiltered()
1163 unfi = repo.unfiltered()
1164 oldctx = unfi[oldnode]
1164 oldctx = unfi[oldnode]
1165 oldbasectx = unfi[oldbasenode]
1165 oldbasectx = unfi[oldbasenode]
1166 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1166 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1167 oldbasectx, oldctx, diffopts
1167 oldbasectx, oldctx, diffopts
1168 )
1168 )
1169 else:
1169 else:
1170 neednewdiff = True
1170 neednewdiff = True
1171
1171
1172 transactions = []
1172 transactions = []
1173 if neednewdiff:
1173 if neednewdiff:
1174 diff = creatediff(basectx, ctx)
1174 diff = creatediff(basectx, ctx)
1175 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1175 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1176 if comment:
1176 if comment:
1177 transactions.append({b'type': b'comment', b'value': comment})
1177 transactions.append({b'type': b'comment', b'value': comment})
1178 else:
1178 else:
1179 # Even if we don't need to upload a new diff because the patch content
1179 # Even if we don't need to upload a new diff because the patch content
1180 # does not change. We might still need to update its metadata so
1180 # does not change. We might still need to update its metadata so
1181 # pushers could know the correct node metadata.
1181 # pushers could know the correct node metadata.
1182 assert olddiff
1182 assert olddiff
1183 diff = olddiff
1183 diff = olddiff
1184 writediffproperties(ctxs, diff)
1184 writediffproperties(ctxs, diff)
1185
1185
1186 # Set the parent Revision every time, so commit re-ordering is picked-up
1186 # Set the parent Revision every time, so commit re-ordering is picked-up
1187 if parentrevphid:
1187 if parentrevphid:
1188 transactions.append(
1188 transactions.append(
1189 {b'type': b'parents.set', b'value': [parentrevphid]}
1189 {b'type': b'parents.set', b'value': [parentrevphid]}
1190 )
1190 )
1191
1191
1192 if actions:
1192 if actions:
1193 transactions += actions
1193 transactions += actions
1194
1194
1195 # When folding multiple local commits into a single review, arcanist will
1195 # When folding multiple local commits into a single review, arcanist will
1196 # take the summary line of the first commit as the title, and then
1196 # take the summary line of the first commit as the title, and then
1197 # concatenate the rest of the remaining messages (including each of their
1197 # concatenate the rest of the remaining messages (including each of their
1198 # first lines) to the rest of the first commit message (each separated by
1198 # first lines) to the rest of the first commit message (each separated by
1199 # an empty line), and use that as the summary field. Do the same here.
1199 # an empty line), and use that as the summary field. Do the same here.
1200 # For commits with only a one line message, there is no summary field, as
1200 # For commits with only a one line message, there is no summary field, as
1201 # this gets assigned to the title.
1201 # this gets assigned to the title.
1202 fields = util.sortdict() # sorted for stable wire protocol in tests
1202 fields = util.sortdict() # sorted for stable wire protocol in tests
1203
1203
1204 for i, _ctx in enumerate(ctxs):
1204 for i, _ctx in enumerate(ctxs):
1205 # Parse commit message and update related fields.
1205 # Parse commit message and update related fields.
1206 desc = _ctx.description()
1206 desc = _ctx.description()
1207 info = callconduit(
1207 info = callconduit(
1208 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1208 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1209 )
1209 )
1210
1210
1211 for k in [b'title', b'summary', b'testPlan']:
1211 for k in [b'title', b'summary', b'testPlan']:
1212 v = info[b'fields'].get(k)
1212 v = info[b'fields'].get(k)
1213 if not v:
1213 if not v:
1214 continue
1214 continue
1215
1215
1216 if i == 0:
1216 if i == 0:
1217 # Title, summary and test plan (if present) are taken verbatim
1217 # Title, summary and test plan (if present) are taken verbatim
1218 # for the first commit.
1218 # for the first commit.
1219 fields[k] = v.rstrip()
1219 fields[k] = v.rstrip()
1220 continue
1220 continue
1221 elif k == b'title':
1221 elif k == b'title':
1222 # Add subsequent titles (i.e. the first line of the commit
1222 # Add subsequent titles (i.e. the first line of the commit
1223 # message) back to the summary.
1223 # message) back to the summary.
1224 k = b'summary'
1224 k = b'summary'
1225
1225
1226 # Append any current field to the existing composite field
1226 # Append any current field to the existing composite field
1227 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1227 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1228
1228
1229 for k, v in fields.items():
1229 for k, v in fields.items():
1230 transactions.append({b'type': k, b'value': v})
1230 transactions.append({b'type': k, b'value': v})
1231
1231
1232 params = {b'transactions': transactions}
1232 params = {b'transactions': transactions}
1233 if revid is not None:
1233 if revid is not None:
1234 # Update an existing Differential Revision
1234 # Update an existing Differential Revision
1235 params[b'objectIdentifier'] = revid
1235 params[b'objectIdentifier'] = revid
1236
1236
1237 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1237 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1238 if not revision:
1238 if not revision:
1239 if len(ctxs) == 1:
1239 if len(ctxs) == 1:
1240 msg = _(b'cannot create revision for %s') % ctx
1240 msg = _(b'cannot create revision for %s') % ctx
1241 else:
1241 else:
1242 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1242 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1243 raise error.Abort(msg)
1243 raise error.Abort(msg)
1244
1244
1245 return revision, diff
1245 return revision, diff
1246
1246
1247
1247
1248 def userphids(ui, names):
1248 def userphids(ui, names):
1249 """convert user names to PHIDs"""
1249 """convert user names to PHIDs"""
1250 names = [name.lower() for name in names]
1250 names = [name.lower() for name in names]
1251 query = {b'constraints': {b'usernames': names}}
1251 query = {b'constraints': {b'usernames': names}}
1252 result = callconduit(ui, b'user.search', query)
1252 result = callconduit(ui, b'user.search', query)
1253 # username not found is not an error of the API. So check if we have missed
1253 # username not found is not an error of the API. So check if we have missed
1254 # some names here.
1254 # some names here.
1255 data = result[b'data']
1255 data = result[b'data']
1256 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1256 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1257 unresolved = set(names) - resolved
1257 unresolved = set(names) - resolved
1258 if unresolved:
1258 if unresolved:
1259 raise error.Abort(
1259 raise error.Abort(
1260 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1260 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1261 )
1261 )
1262 return [entry[b'phid'] for entry in data]
1262 return [entry[b'phid'] for entry in data]
1263
1263
1264
1264
1265 def _print_phabsend_action(ui, ctx, newrevid, action):
1265 def _print_phabsend_action(ui, ctx, newrevid, action):
1266 """print the ``action`` that occurred when posting ``ctx`` for review
1266 """print the ``action`` that occurred when posting ``ctx`` for review
1267
1267
1268 This is a utility function for the sending phase of ``phabsend``, which
1268 This is a utility function for the sending phase of ``phabsend``, which
1269 makes it easier to show a status for all local commits with `--fold``.
1269 makes it easier to show a status for all local commits with `--fold``.
1270 """
1270 """
1271 actiondesc = ui.label(
1271 actiondesc = ui.label(
1272 {
1272 {
1273 b'created': _(b'created'),
1273 b'created': _(b'created'),
1274 b'skipped': _(b'skipped'),
1274 b'skipped': _(b'skipped'),
1275 b'updated': _(b'updated'),
1275 b'updated': _(b'updated'),
1276 }[action],
1276 }[action],
1277 b'phabricator.action.%s' % action,
1277 b'phabricator.action.%s' % action,
1278 )
1278 )
1279 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1279 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1280 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1280 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1281 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1281 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1282
1282
1283
1283
1284 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1284 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1285 """update the local commit list for the ``diff`` associated with ``drevid``
1285 """update the local commit list for the ``diff`` associated with ``drevid``
1286
1286
1287 This is a utility function for the amend phase of ``phabsend``, which
1287 This is a utility function for the amend phase of ``phabsend``, which
1288 converts failures to warning messages.
1288 converts failures to warning messages.
1289 """
1289 """
1290 _debug(
1290 _debug(
1291 unfi.ui,
1291 unfi.ui,
1292 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1292 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1293 )
1293 )
1294
1294
1295 try:
1295 try:
1296 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1296 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1297 except util.urlerr.urlerror:
1297 except util.urlerr.urlerror:
1298 # If it fails just warn and keep going, otherwise the DREV
1298 # If it fails just warn and keep going, otherwise the DREV
1299 # associations will be lost
1299 # associations will be lost
1300 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1300 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1301
1301
1302
1302
1303 @vcrcommand(
1303 @vcrcommand(
1304 b'phabsend',
1304 b'phabsend',
1305 [
1305 [
1306 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1306 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1307 (b'', b'amend', True, _(b'update commit messages')),
1307 (b'', b'amend', True, _(b'update commit messages')),
1308 (b'', b'reviewer', [], _(b'specify reviewers')),
1308 (b'', b'reviewer', [], _(b'specify reviewers')),
1309 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1309 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1310 (
1310 (
1311 b'm',
1311 b'm',
1312 b'comment',
1312 b'comment',
1313 b'',
1313 b'',
1314 _(b'add a comment to Revisions with new/updated Diffs'),
1314 _(b'add a comment to Revisions with new/updated Diffs'),
1315 ),
1315 ),
1316 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1316 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1317 (b'', b'fold', False, _(b'combine the revisions into one review')),
1317 (b'', b'fold', False, _(b'combine the revisions into one review')),
1318 ],
1318 ],
1319 _(b'REV [OPTIONS]'),
1319 _(b'REV [OPTIONS]'),
1320 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1320 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1321 )
1321 )
1322 def phabsend(ui, repo, *revs, **opts):
1322 def phabsend(ui, repo, *revs, **opts):
1323 """upload changesets to Phabricator
1323 """upload changesets to Phabricator
1324
1324
1325 If there are multiple revisions specified, they will be send as a stack
1325 If there are multiple revisions specified, they will be send as a stack
1326 with a linear dependencies relationship using the order specified by the
1326 with a linear dependencies relationship using the order specified by the
1327 revset.
1327 revset.
1328
1328
1329 For the first time uploading changesets, local tags will be created to
1329 For the first time uploading changesets, local tags will be created to
1330 maintain the association. After the first time, phabsend will check
1330 maintain the association. After the first time, phabsend will check
1331 obsstore and tags information so it can figure out whether to update an
1331 obsstore and tags information so it can figure out whether to update an
1332 existing Differential Revision, or create a new one.
1332 existing Differential Revision, or create a new one.
1333
1333
1334 If --amend is set, update commit messages so they have the
1334 If --amend is set, update commit messages so they have the
1335 ``Differential Revision`` URL, remove related tags. This is similar to what
1335 ``Differential Revision`` URL, remove related tags. This is similar to what
1336 arcanist will do, and is more desired in author-push workflows. Otherwise,
1336 arcanist will do, and is more desired in author-push workflows. Otherwise,
1337 use local tags to record the ``Differential Revision`` association.
1337 use local tags to record the ``Differential Revision`` association.
1338
1338
1339 The --confirm option lets you confirm changesets before sending them. You
1339 The --confirm option lets you confirm changesets before sending them. You
1340 can also add following to your configuration file to make it default
1340 can also add following to your configuration file to make it default
1341 behaviour::
1341 behaviour::
1342
1342
1343 [phabsend]
1343 [phabsend]
1344 confirm = true
1344 confirm = true
1345
1345
1346 By default, a separate review will be created for each commit that is
1346 By default, a separate review will be created for each commit that is
1347 selected, and will have the same parent/child relationship in Phabricator.
1347 selected, and will have the same parent/child relationship in Phabricator.
1348 If ``--fold`` is set, multiple commits are rolled up into a single review
1348 If ``--fold`` is set, multiple commits are rolled up into a single review
1349 as if diffed from the parent of the first revision to the last. The commit
1349 as if diffed from the parent of the first revision to the last. The commit
1350 messages are concatenated in the summary field on Phabricator.
1350 messages are concatenated in the summary field on Phabricator.
1351
1351
1352 phabsend will check obsstore and the above association to decide whether to
1352 phabsend will check obsstore and the above association to decide whether to
1353 update an existing Differential Revision, or create a new one.
1353 update an existing Differential Revision, or create a new one.
1354 """
1354 """
1355 opts = pycompat.byteskwargs(opts)
1355 opts = pycompat.byteskwargs(opts)
1356 revs = list(revs) + opts.get(b'rev', [])
1356 revs = list(revs) + opts.get(b'rev', [])
1357 revs = scmutil.revrange(repo, revs)
1357 revs = scmutil.revrange(repo, revs)
1358 revs.sort() # ascending order to preserve topological parent/child in phab
1358 revs.sort() # ascending order to preserve topological parent/child in phab
1359
1359
1360 if not revs:
1360 if not revs:
1361 raise error.Abort(_(b'phabsend requires at least one changeset'))
1361 raise error.Abort(_(b'phabsend requires at least one changeset'))
1362 if opts.get(b'amend'):
1362 if opts.get(b'amend'):
1363 cmdutil.checkunfinished(repo)
1363 cmdutil.checkunfinished(repo)
1364
1364
1365 ctxs = [repo[rev] for rev in revs]
1365 ctxs = [repo[rev] for rev in revs]
1366
1366
1367 if any(c for c in ctxs if c.obsolete()):
1367 if any(c for c in ctxs if c.obsolete()):
1368 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1368 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1369
1369
1370 # Ensure the local commits are an unbroken range. The semantics of the
1370 # Ensure the local commits are an unbroken range. The semantics of the
1371 # --fold option implies this, and the auto restacking of orphans requires
1371 # --fold option implies this, and the auto restacking of orphans requires
1372 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1372 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1373 # get A' as a parent.
1373 # get A' as a parent.
1374 def _fail_nonlinear_revs(revs, revtype):
1374 def _fail_nonlinear_revs(revs, revtype):
1375 badnodes = [repo[r].node() for r in revs]
1375 badnodes = [repo[r].node() for r in revs]
1376 raise error.Abort(
1376 raise error.Abort(
1377 _(b"cannot phabsend multiple %s revisions: %s")
1377 _(b"cannot phabsend multiple %s revisions: %s")
1378 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1378 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1379 hint=_(b"the revisions must form a linear chain"),
1379 hint=_(b"the revisions must form a linear chain"),
1380 )
1380 )
1381
1381
1382 heads = repo.revs(b'heads(%ld)', revs)
1382 heads = repo.revs(b'heads(%ld)', revs)
1383 if len(heads) > 1:
1383 if len(heads) > 1:
1384 _fail_nonlinear_revs(heads, b"head")
1384 _fail_nonlinear_revs(heads, b"head")
1385
1385
1386 roots = repo.revs(b'roots(%ld)', revs)
1386 roots = repo.revs(b'roots(%ld)', revs)
1387 if len(roots) > 1:
1387 if len(roots) > 1:
1388 _fail_nonlinear_revs(roots, b"root")
1388 _fail_nonlinear_revs(roots, b"root")
1389
1389
1390 fold = opts.get(b'fold')
1390 fold = opts.get(b'fold')
1391 if fold:
1391 if fold:
1392 if len(revs) == 1:
1392 if len(revs) == 1:
1393 # TODO: just switch to --no-fold instead?
1393 # TODO: just switch to --no-fold instead?
1394 raise error.Abort(_(b"cannot fold a single revision"))
1394 raise error.Abort(_(b"cannot fold a single revision"))
1395
1395
1396 # There's no clear way to manage multiple commits with a Dxxx tag, so
1396 # There's no clear way to manage multiple commits with a Dxxx tag, so
1397 # require the amend option. (We could append "_nnn", but then it
1397 # require the amend option. (We could append "_nnn", but then it
1398 # becomes jumbled if earlier commits are added to an update.) It should
1398 # becomes jumbled if earlier commits are added to an update.) It should
1399 # lock the repo and ensure that the range is editable, but that would
1399 # lock the repo and ensure that the range is editable, but that would
1400 # make the code pretty convoluted. The default behavior of `arc` is to
1400 # make the code pretty convoluted. The default behavior of `arc` is to
1401 # create a new review anyway.
1401 # create a new review anyway.
1402 if not opts.get(b"amend"):
1402 if not opts.get(b"amend"):
1403 raise error.Abort(_(b"cannot fold with --no-amend"))
1403 raise error.Abort(_(b"cannot fold with --no-amend"))
1404
1404
1405 # It might be possible to bucketize the revisions by the DREV value, and
1405 # It might be possible to bucketize the revisions by the DREV value, and
1406 # iterate over those groups when posting, and then again when amending.
1406 # iterate over those groups when posting, and then again when amending.
1407 # But for simplicity, require all selected revisions to be for the same
1407 # But for simplicity, require all selected revisions to be for the same
1408 # DREV (if present). Adding local revisions to an existing DREV is
1408 # DREV (if present). Adding local revisions to an existing DREV is
1409 # acceptable.
1409 # acceptable.
1410 drevmatchers = [
1410 drevmatchers = [
1411 _differentialrevisiondescre.search(ctx.description())
1411 _differentialrevisiondescre.search(ctx.description())
1412 for ctx in ctxs
1412 for ctx in ctxs
1413 ]
1413 ]
1414 if len({m.group('url') for m in drevmatchers if m}) > 1:
1414 if len({m.group('url') for m in drevmatchers if m}) > 1:
1415 raise error.Abort(
1415 raise error.Abort(
1416 _(b"cannot fold revisions with different DREV values")
1416 _(b"cannot fold revisions with different DREV values")
1417 )
1417 )
1418
1418
1419 # {newnode: (oldnode, olddiff, olddrev}
1419 # {newnode: (oldnode, olddiff, olddrev}
1420 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1420 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1421
1421
1422 confirm = ui.configbool(b'phabsend', b'confirm')
1422 confirm = ui.configbool(b'phabsend', b'confirm')
1423 confirm |= bool(opts.get(b'confirm'))
1423 confirm |= bool(opts.get(b'confirm'))
1424 if confirm:
1424 if confirm:
1425 confirmed = _confirmbeforesend(repo, revs, oldmap)
1425 confirmed = _confirmbeforesend(repo, revs, oldmap)
1426 if not confirmed:
1426 if not confirmed:
1427 raise error.Abort(_(b'phabsend cancelled'))
1427 raise error.Abort(_(b'phabsend cancelled'))
1428
1428
1429 actions = []
1429 actions = []
1430 reviewers = opts.get(b'reviewer', [])
1430 reviewers = opts.get(b'reviewer', [])
1431 blockers = opts.get(b'blocker', [])
1431 blockers = opts.get(b'blocker', [])
1432 phids = []
1432 phids = []
1433 if reviewers:
1433 if reviewers:
1434 phids.extend(userphids(repo.ui, reviewers))
1434 phids.extend(userphids(repo.ui, reviewers))
1435 if blockers:
1435 if blockers:
1436 phids.extend(
1436 phids.extend(
1437 map(
1437 map(
1438 lambda phid: b'blocking(%s)' % phid,
1438 lambda phid: b'blocking(%s)' % phid,
1439 userphids(repo.ui, blockers),
1439 userphids(repo.ui, blockers),
1440 )
1440 )
1441 )
1441 )
1442 if phids:
1442 if phids:
1443 actions.append({b'type': b'reviewers.add', b'value': phids})
1443 actions.append({b'type': b'reviewers.add', b'value': phids})
1444
1444
1445 drevids = [] # [int]
1445 drevids = [] # [int]
1446 diffmap = {} # {newnode: diff}
1446 diffmap = {} # {newnode: diff}
1447
1447
1448 # Send patches one by one so we know their Differential Revision PHIDs and
1448 # Send patches one by one so we know their Differential Revision PHIDs and
1449 # can provide dependency relationship
1449 # can provide dependency relationship
1450 lastrevphid = None
1450 lastrevphid = None
1451 for ctx in ctxs:
1451 for ctx in ctxs:
1452 if fold:
1452 if fold:
1453 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1453 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1454 else:
1454 else:
1455 ui.debug(b'sending rev %d\n' % ctx.rev())
1455 ui.debug(b'sending rev %d\n' % ctx.rev())
1456
1456
1457 # Get Differential Revision ID
1457 # Get Differential Revision ID
1458 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1458 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1459 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1459 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1460
1460
1461 if fold:
1461 if fold:
1462 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1462 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1463 ctxs[-1].node(), (None, None, None)
1463 ctxs[-1].node(), (None, None, None)
1464 )
1464 )
1465
1465
1466 if oldnode != ctx.node() or opts.get(b'amend'):
1466 if oldnode != ctx.node() or opts.get(b'amend'):
1467 # Create or update Differential Revision
1467 # Create or update Differential Revision
1468 revision, diff = createdifferentialrevision(
1468 revision, diff = createdifferentialrevision(
1469 ctxs if fold else [ctx],
1469 ctxs if fold else [ctx],
1470 revid,
1470 revid,
1471 lastrevphid,
1471 lastrevphid,
1472 oldbasenode,
1472 oldbasenode,
1473 oldnode,
1473 oldnode,
1474 olddiff,
1474 olddiff,
1475 actions,
1475 actions,
1476 opts.get(b'comment'),
1476 opts.get(b'comment'),
1477 )
1477 )
1478
1478
1479 if fold:
1479 if fold:
1480 for ctx in ctxs:
1480 for ctx in ctxs:
1481 diffmap[ctx.node()] = diff
1481 diffmap[ctx.node()] = diff
1482 else:
1482 else:
1483 diffmap[ctx.node()] = diff
1483 diffmap[ctx.node()] = diff
1484
1484
1485 newrevid = int(revision[b'object'][b'id'])
1485 newrevid = int(revision[b'object'][b'id'])
1486 newrevphid = revision[b'object'][b'phid']
1486 newrevphid = revision[b'object'][b'phid']
1487 if revid:
1487 if revid:
1488 action = b'updated'
1488 action = b'updated'
1489 else:
1489 else:
1490 action = b'created'
1490 action = b'created'
1491
1491
1492 # Create a local tag to note the association, if commit message
1492 # Create a local tag to note the association, if commit message
1493 # does not have it already
1493 # does not have it already
1494 if not fold:
1494 if not fold:
1495 m = _differentialrevisiondescre.search(ctx.description())
1495 m = _differentialrevisiondescre.search(ctx.description())
1496 if not m or int(m.group('id')) != newrevid:
1496 if not m or int(m.group('id')) != newrevid:
1497 tagname = b'D%d' % newrevid
1497 tagname = b'D%d' % newrevid
1498 tags.tag(
1498 tags.tag(
1499 repo,
1499 repo,
1500 tagname,
1500 tagname,
1501 ctx.node(),
1501 ctx.node(),
1502 message=None,
1502 message=None,
1503 user=None,
1503 user=None,
1504 date=None,
1504 date=None,
1505 local=True,
1505 local=True,
1506 )
1506 )
1507 else:
1507 else:
1508 # Nothing changed. But still set "newrevphid" so the next revision
1508 # Nothing changed. But still set "newrevphid" so the next revision
1509 # could depend on this one and "newrevid" for the summary line.
1509 # could depend on this one and "newrevid" for the summary line.
1510 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1510 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1511 newrevid = revid
1511 newrevid = revid
1512 action = b'skipped'
1512 action = b'skipped'
1513
1513
1514 drevids.append(newrevid)
1514 drevids.append(newrevid)
1515 lastrevphid = newrevphid
1515 lastrevphid = newrevphid
1516
1516
1517 if fold:
1517 if fold:
1518 for c in ctxs:
1518 for c in ctxs:
1519 if oldmap.get(c.node(), (None, None, None))[2]:
1519 if oldmap.get(c.node(), (None, None, None))[2]:
1520 action = b'updated'
1520 action = b'updated'
1521 else:
1521 else:
1522 action = b'created'
1522 action = b'created'
1523 _print_phabsend_action(ui, c, newrevid, action)
1523 _print_phabsend_action(ui, c, newrevid, action)
1524 break
1524 break
1525
1525
1526 _print_phabsend_action(ui, ctx, newrevid, action)
1526 _print_phabsend_action(ui, ctx, newrevid, action)
1527
1527
1528 # Update commit messages and remove tags
1528 # Update commit messages and remove tags
1529 if opts.get(b'amend'):
1529 if opts.get(b'amend'):
1530 unfi = repo.unfiltered()
1530 unfi = repo.unfiltered()
1531 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1531 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1532 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1532 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1533 # Eagerly evaluate commits to restabilize before creating new
1533 # Eagerly evaluate commits to restabilize before creating new
1534 # commits. The selected revisions are excluded because they are
1534 # commits. The selected revisions are excluded because they are
1535 # automatically restacked as part of the submission process.
1535 # automatically restacked as part of the submission process.
1536 restack = [
1536 restack = [
1537 c
1537 c
1538 for c in repo.set(
1538 for c in repo.set(
1539 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1539 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1540 revs,
1540 revs,
1541 revs,
1541 revs,
1542 )
1542 )
1543 ]
1543 ]
1544 wnode = unfi[b'.'].node()
1544 wnode = unfi[b'.'].node()
1545 mapping = {} # {oldnode: [newnode]}
1545 mapping = {} # {oldnode: [newnode]}
1546 newnodes = []
1546 newnodes = []
1547
1547
1548 drevid = drevids[0]
1548 drevid = drevids[0]
1549
1549
1550 for i, rev in enumerate(revs):
1550 for i, rev in enumerate(revs):
1551 old = unfi[rev]
1551 old = unfi[rev]
1552 if not fold:
1552 if not fold:
1553 drevid = drevids[i]
1553 drevid = drevids[i]
1554 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1554 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1555
1555
1556 newdesc = get_amended_desc(drev, old, fold)
1556 newdesc = get_amended_desc(drev, old, fold)
1557 # Make sure commit message contain "Differential Revision"
1557 # Make sure commit message contain "Differential Revision"
1558 if (
1558 if (
1559 old.description() != newdesc
1559 old.description() != newdesc
1560 or old.p1().node() in mapping
1560 or old.p1().node() in mapping
1561 or old.p2().node() in mapping
1561 or old.p2().node() in mapping
1562 ):
1562 ):
1563 if old.phase() == phases.public:
1563 if old.phase() == phases.public:
1564 ui.warn(
1564 ui.warn(
1565 _(b"warning: not updating public commit %s\n")
1565 _(b"warning: not updating public commit %s\n")
1566 % scmutil.formatchangeid(old)
1566 % scmutil.formatchangeid(old)
1567 )
1567 )
1568 continue
1568 continue
1569 parents = [
1569 parents = [
1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1572 ]
1572 ]
1573 newdesc = rewriteutil.update_hash_refs(
1573 newdesc = rewriteutil.update_hash_refs(
1574 repo,
1574 repo,
1575 newdesc,
1575 newdesc,
1576 mapping,
1576 mapping,
1577 )
1577 )
1578 new = context.metadataonlyctx(
1578 new = context.metadataonlyctx(
1579 repo,
1579 repo,
1580 old,
1580 old,
1581 parents=parents,
1581 parents=parents,
1582 text=newdesc,
1582 text=newdesc,
1583 user=old.user(),
1583 user=old.user(),
1584 date=old.date(),
1584 date=old.date(),
1585 extra=old.extra(),
1585 extra=old.extra(),
1586 )
1586 )
1587
1587
1588 newnode = new.commit()
1588 newnode = new.commit()
1589
1589
1590 mapping[old.node()] = [newnode]
1590 mapping[old.node()] = [newnode]
1591
1591
1592 if fold:
1592 if fold:
1593 # Defer updating the (single) Diff until all nodes are
1593 # Defer updating the (single) Diff until all nodes are
1594 # collected. No tags were created, so none need to be
1594 # collected. No tags were created, so none need to be
1595 # removed.
1595 # removed.
1596 newnodes.append(newnode)
1596 newnodes.append(newnode)
1597 continue
1597 continue
1598
1598
1599 _amend_diff_properties(
1599 _amend_diff_properties(
1600 unfi, drevid, [newnode], diffmap[old.node()]
1600 unfi, drevid, [newnode], diffmap[old.node()]
1601 )
1601 )
1602
1602
1603 # Remove local tags since it's no longer necessary
1603 # Remove local tags since it's no longer necessary
1604 tagname = b'D%d' % drevid
1604 tagname = b'D%d' % drevid
1605 if tagname in repo.tags():
1605 if tagname in repo.tags():
1606 tags.tag(
1606 tags.tag(
1607 repo,
1607 repo,
1608 tagname,
1608 tagname,
1609 nullid,
1609 nullid,
1610 message=None,
1610 message=None,
1611 user=None,
1611 user=None,
1612 date=None,
1612 date=None,
1613 local=True,
1613 local=True,
1614 )
1614 )
1615 elif fold:
1615 elif fold:
1616 # When folding multiple commits into one review with
1616 # When folding multiple commits into one review with
1617 # --fold, track even the commits that weren't amended, so
1617 # --fold, track even the commits that weren't amended, so
1618 # that their association isn't lost if the properties are
1618 # that their association isn't lost if the properties are
1619 # rewritten below.
1619 # rewritten below.
1620 newnodes.append(old.node())
1620 newnodes.append(old.node())
1621
1621
1622 # If the submitted commits are public, no amend takes place so
1622 # If the submitted commits are public, no amend takes place so
1623 # there are no newnodes and therefore no diff update to do.
1623 # there are no newnodes and therefore no diff update to do.
1624 if fold and newnodes:
1624 if fold and newnodes:
1625 diff = diffmap[old.node()]
1625 diff = diffmap[old.node()]
1626
1626
1627 # The diff object in diffmap doesn't have the local commits
1627 # The diff object in diffmap doesn't have the local commits
1628 # because that could be returned from differential.creatediff,
1628 # because that could be returned from differential.creatediff,
1629 # not differential.querydiffs. So use the queried diff (if
1629 # not differential.querydiffs. So use the queried diff (if
1630 # present), or force the amend (a new revision is being posted.)
1630 # present), or force the amend (a new revision is being posted.)
1631 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1631 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1632 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1632 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1633 _amend_diff_properties(unfi, drevid, newnodes, diff)
1633 _amend_diff_properties(unfi, drevid, newnodes, diff)
1634 else:
1634 else:
1635 _debug(
1635 _debug(
1636 ui,
1636 ui,
1637 b"local commit list for D%d is already up-to-date\n"
1637 b"local commit list for D%d is already up-to-date\n"
1638 % drevid,
1638 % drevid,
1639 )
1639 )
1640 elif fold:
1640 elif fold:
1641 _debug(ui, b"no newnodes to update\n")
1641 _debug(ui, b"no newnodes to update\n")
1642
1642
1643 # Restack any children of first-time submissions that were orphaned
1643 # Restack any children of first-time submissions that were orphaned
1644 # in the process. The ctx won't report that it is an orphan until
1644 # in the process. The ctx won't report that it is an orphan until
1645 # the cleanup takes place below.
1645 # the cleanup takes place below.
1646 for old in restack:
1646 for old in restack:
1647 parents = [
1647 parents = [
1648 mapping.get(old.p1().node(), (old.p1(),))[0],
1648 mapping.get(old.p1().node(), (old.p1(),))[0],
1649 mapping.get(old.p2().node(), (old.p2(),))[0],
1649 mapping.get(old.p2().node(), (old.p2(),))[0],
1650 ]
1650 ]
1651 new = context.metadataonlyctx(
1651 new = context.metadataonlyctx(
1652 repo,
1652 repo,
1653 old,
1653 old,
1654 parents=parents,
1654 parents=parents,
1655 text=rewriteutil.update_hash_refs(
1655 text=rewriteutil.update_hash_refs(
1656 repo, old.description(), mapping
1656 repo, old.description(), mapping
1657 ),
1657 ),
1658 user=old.user(),
1658 user=old.user(),
1659 date=old.date(),
1659 date=old.date(),
1660 extra=old.extra(),
1660 extra=old.extra(),
1661 )
1661 )
1662
1662
1663 newnode = new.commit()
1663 newnode = new.commit()
1664
1664
1665 # Don't obsolete unselected descendants of nodes that have not
1665 # Don't obsolete unselected descendants of nodes that have not
1666 # been changed in this transaction- that results in an error.
1666 # been changed in this transaction- that results in an error.
1667 if newnode != old.node():
1667 if newnode != old.node():
1668 mapping[old.node()] = [newnode]
1668 mapping[old.node()] = [newnode]
1669 _debug(
1669 _debug(
1670 ui,
1670 ui,
1671 b"restabilizing %s as %s\n"
1671 b"restabilizing %s as %s\n"
1672 % (short(old.node()), short(newnode)),
1672 % (short(old.node()), short(newnode)),
1673 )
1673 )
1674 else:
1674 else:
1675 _debug(
1675 _debug(
1676 ui,
1676 ui,
1677 b"not restabilizing unchanged %s\n" % short(old.node()),
1677 b"not restabilizing unchanged %s\n" % short(old.node()),
1678 )
1678 )
1679
1679
1680 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1680 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1681 if wnode in mapping:
1681 if wnode in mapping:
1682 unfi.setparents(mapping[wnode][0])
1682 unfi.setparents(mapping[wnode][0])
1683
1683
1684
1684
1685 # Map from "hg:meta" keys to header understood by "hg import". The order is
1685 # Map from "hg:meta" keys to header understood by "hg import". The order is
1686 # consistent with "hg export" output.
1686 # consistent with "hg export" output.
1687 _metanamemap = util.sortdict(
1687 _metanamemap = util.sortdict(
1688 [
1688 [
1689 (b'user', b'User'),
1689 (b'user', b'User'),
1690 (b'date', b'Date'),
1690 (b'date', b'Date'),
1691 (b'branch', b'Branch'),
1691 (b'branch', b'Branch'),
1692 (b'node', b'Node ID'),
1692 (b'node', b'Node ID'),
1693 (b'parent', b'Parent '),
1693 (b'parent', b'Parent '),
1694 ]
1694 ]
1695 )
1695 )
1696
1696
1697
1697
1698 def _confirmbeforesend(repo, revs, oldmap):
1698 def _confirmbeforesend(repo, revs, oldmap):
1699 url, token = readurltoken(repo.ui)
1699 url, token = readurltoken(repo.ui)
1700 ui = repo.ui
1700 ui = repo.ui
1701 for rev in revs:
1701 for rev in revs:
1702 ctx = repo[rev]
1702 ctx = repo[rev]
1703 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1703 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1704 if drevid:
1704 if drevid:
1705 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1705 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1706 else:
1706 else:
1707 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1707 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1708
1708
1709 ui.write(
1709 ui.write(
1710 _(b'%s - %s\n')
1710 _(b'%s - %s\n')
1711 % (
1711 % (
1712 drevdesc,
1712 drevdesc,
1713 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1713 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1714 )
1714 )
1715 )
1715 )
1716
1716
1717 if ui.promptchoice(
1717 if ui.promptchoice(
1718 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1718 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1719 ):
1719 ):
1720 return False
1720 return False
1721
1721
1722 return True
1722 return True
1723
1723
1724
1724
1725 _knownstatusnames = {
1725 _knownstatusnames = {
1726 b'accepted',
1726 b'accepted',
1727 b'needsreview',
1727 b'needsreview',
1728 b'needsrevision',
1728 b'needsrevision',
1729 b'closed',
1729 b'closed',
1730 b'abandoned',
1730 b'abandoned',
1731 b'changesplanned',
1731 b'changesplanned',
1732 }
1732 }
1733
1733
1734
1734
1735 def _getstatusname(drev):
1735 def _getstatusname(drev):
1736 """get normalized status name from a Differential Revision"""
1736 """get normalized status name from a Differential Revision"""
1737 return drev[b'statusName'].replace(b' ', b'').lower()
1737 return drev[b'statusName'].replace(b' ', b'').lower()
1738
1738
1739
1739
1740 # Small language to specify differential revisions. Support symbols: (), :X,
1740 # Small language to specify differential revisions. Support symbols: (), :X,
1741 # +, and -.
1741 # +, and -.
1742
1742
1743 _elements = {
1743 _elements = {
1744 # token-type: binding-strength, primary, prefix, infix, suffix
1744 # token-type: binding-strength, primary, prefix, infix, suffix
1745 b'(': (12, None, (b'group', 1, b')'), None, None),
1745 b'(': (12, None, (b'group', 1, b')'), None, None),
1746 b':': (8, None, (b'ancestors', 8), None, None),
1746 b':': (8, None, (b'ancestors', 8), None, None),
1747 b'&': (5, None, None, (b'and_', 5), None),
1747 b'&': (5, None, None, (b'and_', 5), None),
1748 b'+': (4, None, None, (b'add', 4), None),
1748 b'+': (4, None, None, (b'add', 4), None),
1749 b'-': (4, None, None, (b'sub', 4), None),
1749 b'-': (4, None, None, (b'sub', 4), None),
1750 b')': (0, None, None, None, None),
1750 b')': (0, None, None, None, None),
1751 b'symbol': (0, b'symbol', None, None, None),
1751 b'symbol': (0, b'symbol', None, None, None),
1752 b'end': (0, None, None, None, None),
1752 b'end': (0, None, None, None, None),
1753 }
1753 }
1754
1754
1755
1755
1756 def _tokenize(text):
1756 def _tokenize(text):
1757 view = memoryview(text) # zero-copy slice
1757 view = memoryview(text) # zero-copy slice
1758 special = b'():+-& '
1758 special = b'():+-& '
1759 pos = 0
1759 pos = 0
1760 length = len(text)
1760 length = len(text)
1761 while pos < length:
1761 while pos < length:
1762 symbol = b''.join(
1762 symbol = b''.join(
1763 itertools.takewhile(
1763 itertools.takewhile(
1764 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1764 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1765 )
1765 )
1766 )
1766 )
1767 if symbol:
1767 if symbol:
1768 yield (b'symbol', symbol, pos)
1768 yield (b'symbol', symbol, pos)
1769 pos += len(symbol)
1769 pos += len(symbol)
1770 else: # special char, ignore space
1770 else: # special char, ignore space
1771 if text[pos : pos + 1] != b' ':
1771 if text[pos : pos + 1] != b' ':
1772 yield (text[pos : pos + 1], None, pos)
1772 yield (text[pos : pos + 1], None, pos)
1773 pos += 1
1773 pos += 1
1774 yield (b'end', None, pos)
1774 yield (b'end', None, pos)
1775
1775
1776
1776
1777 def _parse(text):
1777 def _parse(text):
1778 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1778 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1779 if pos != len(text):
1779 if pos != len(text):
1780 raise error.ParseError(b'invalid token', pos)
1780 raise error.ParseError(b'invalid token', pos)
1781 return tree
1781 return tree
1782
1782
1783
1783
1784 def _parsedrev(symbol):
1784 def _parsedrev(symbol):
1785 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1785 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1786 if symbol.startswith(b'D') and symbol[1:].isdigit():
1786 if symbol.startswith(b'D') and symbol[1:].isdigit():
1787 return int(symbol[1:])
1787 return int(symbol[1:])
1788 if symbol.isdigit():
1788 if symbol.isdigit():
1789 return int(symbol)
1789 return int(symbol)
1790
1790
1791
1791
1792 def _prefetchdrevs(tree):
1792 def _prefetchdrevs(tree):
1793 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1793 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1794 drevs = set()
1794 drevs = set()
1795 ancestordrevs = set()
1795 ancestordrevs = set()
1796 op = tree[0]
1796 op = tree[0]
1797 if op == b'symbol':
1797 if op == b'symbol':
1798 r = _parsedrev(tree[1])
1798 r = _parsedrev(tree[1])
1799 if r:
1799 if r:
1800 drevs.add(r)
1800 drevs.add(r)
1801 elif op == b'ancestors':
1801 elif op == b'ancestors':
1802 r, a = _prefetchdrevs(tree[1])
1802 r, a = _prefetchdrevs(tree[1])
1803 drevs.update(r)
1803 drevs.update(r)
1804 ancestordrevs.update(r)
1804 ancestordrevs.update(r)
1805 ancestordrevs.update(a)
1805 ancestordrevs.update(a)
1806 else:
1806 else:
1807 for t in tree[1:]:
1807 for t in tree[1:]:
1808 r, a = _prefetchdrevs(t)
1808 r, a = _prefetchdrevs(t)
1809 drevs.update(r)
1809 drevs.update(r)
1810 ancestordrevs.update(a)
1810 ancestordrevs.update(a)
1811 return drevs, ancestordrevs
1811 return drevs, ancestordrevs
1812
1812
1813
1813
1814 def querydrev(ui, spec):
1814 def querydrev(ui, spec):
1815 """return a list of "Differential Revision" dicts
1815 """return a list of "Differential Revision" dicts
1816
1816
1817 spec is a string using a simple query language, see docstring in phabread
1817 spec is a string using a simple query language, see docstring in phabread
1818 for details.
1818 for details.
1819
1819
1820 A "Differential Revision dict" looks like:
1820 A "Differential Revision dict" looks like:
1821
1821
1822 {
1822 {
1823 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1823 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1824 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1824 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1825 "auxiliary": {
1825 "auxiliary": {
1826 "phabricator:depends-on": [
1826 "phabricator:depends-on": [
1827 "PHID-DREV-gbapp366kutjebt7agcd"
1827 "PHID-DREV-gbapp366kutjebt7agcd"
1828 ]
1828 ]
1829 "phabricator:projects": [],
1829 "phabricator:projects": [],
1830 },
1830 },
1831 "branch": "default",
1831 "branch": "default",
1832 "ccs": [],
1832 "ccs": [],
1833 "commits": [],
1833 "commits": [],
1834 "dateCreated": "1499181406",
1834 "dateCreated": "1499181406",
1835 "dateModified": "1499182103",
1835 "dateModified": "1499182103",
1836 "diffs": [
1836 "diffs": [
1837 "3",
1837 "3",
1838 "4",
1838 "4",
1839 ],
1839 ],
1840 "hashes": [],
1840 "hashes": [],
1841 "id": "2",
1841 "id": "2",
1842 "lineCount": "2",
1842 "lineCount": "2",
1843 "phid": "PHID-DREV-672qvysjcczopag46qty",
1843 "phid": "PHID-DREV-672qvysjcczopag46qty",
1844 "properties": {},
1844 "properties": {},
1845 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1845 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1846 "reviewers": [],
1846 "reviewers": [],
1847 "sourcePath": null
1847 "sourcePath": null
1848 "status": "0",
1848 "status": "0",
1849 "statusName": "Needs Review",
1849 "statusName": "Needs Review",
1850 "summary": "",
1850 "summary": "",
1851 "testPlan": "",
1851 "testPlan": "",
1852 "title": "example",
1852 "title": "example",
1853 "uri": "https://phab.example.com/D2",
1853 "uri": "https://phab.example.com/D2",
1854 }
1854 }
1855 """
1855 """
1856 # TODO: replace differential.query and differential.querydiffs with
1856 # TODO: replace differential.query and differential.querydiffs with
1857 # differential.diff.search because the former (and their output) are
1857 # differential.diff.search because the former (and their output) are
1858 # frozen, and planned to be deprecated and removed.
1858 # frozen, and planned to be deprecated and removed.
1859
1859
1860 def fetch(params):
1860 def fetch(params):
1861 """params -> single drev or None"""
1861 """params -> single drev or None"""
1862 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1862 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1863 if key in prefetched:
1863 if key in prefetched:
1864 return prefetched[key]
1864 return prefetched[key]
1865 drevs = callconduit(ui, b'differential.query', params)
1865 drevs = callconduit(ui, b'differential.query', params)
1866 # Fill prefetched with the result
1866 # Fill prefetched with the result
1867 for drev in drevs:
1867 for drev in drevs:
1868 prefetched[drev[b'phid']] = drev
1868 prefetched[drev[b'phid']] = drev
1869 prefetched[int(drev[b'id'])] = drev
1869 prefetched[int(drev[b'id'])] = drev
1870 if key not in prefetched:
1870 if key not in prefetched:
1871 raise error.Abort(
1871 raise error.Abort(
1872 _(b'cannot get Differential Revision %r') % params
1872 _(b'cannot get Differential Revision %r') % params
1873 )
1873 )
1874 return prefetched[key]
1874 return prefetched[key]
1875
1875
1876 def getstack(topdrevids):
1876 def getstack(topdrevids):
1877 """given a top, get a stack from the bottom, [id] -> [id]"""
1877 """given a top, get a stack from the bottom, [id] -> [id]"""
1878 visited = set()
1878 visited = set()
1879 result = []
1879 result = []
1880 queue = [{b'ids': [i]} for i in topdrevids]
1880 queue = [{b'ids': [i]} for i in topdrevids]
1881 while queue:
1881 while queue:
1882 params = queue.pop()
1882 params = queue.pop()
1883 drev = fetch(params)
1883 drev = fetch(params)
1884 if drev[b'id'] in visited:
1884 if drev[b'id'] in visited:
1885 continue
1885 continue
1886 visited.add(drev[b'id'])
1886 visited.add(drev[b'id'])
1887 result.append(int(drev[b'id']))
1887 result.append(int(drev[b'id']))
1888 auxiliary = drev.get(b'auxiliary', {})
1888 auxiliary = drev.get(b'auxiliary', {})
1889 depends = auxiliary.get(b'phabricator:depends-on', [])
1889 depends = auxiliary.get(b'phabricator:depends-on', [])
1890 for phid in depends:
1890 for phid in depends:
1891 queue.append({b'phids': [phid]})
1891 queue.append({b'phids': [phid]})
1892 result.reverse()
1892 result.reverse()
1893 return smartset.baseset(result)
1893 return smartset.baseset(result)
1894
1894
1895 # Initialize prefetch cache
1895 # Initialize prefetch cache
1896 prefetched = {} # {id or phid: drev}
1896 prefetched = {} # {id or phid: drev}
1897
1897
1898 tree = _parse(spec)
1898 tree = _parse(spec)
1899 drevs, ancestordrevs = _prefetchdrevs(tree)
1899 drevs, ancestordrevs = _prefetchdrevs(tree)
1900
1900
1901 # developer config: phabricator.batchsize
1901 # developer config: phabricator.batchsize
1902 batchsize = ui.configint(b'phabricator', b'batchsize')
1902 batchsize = ui.configint(b'phabricator', b'batchsize')
1903
1903
1904 # Prefetch Differential Revisions in batch
1904 # Prefetch Differential Revisions in batch
1905 tofetch = set(drevs)
1905 tofetch = set(drevs)
1906 for r in ancestordrevs:
1906 for r in ancestordrevs:
1907 tofetch.update(range(max(1, r - batchsize), r + 1))
1907 tofetch.update(range(max(1, r - batchsize), r + 1))
1908 if drevs:
1908 if drevs:
1909 fetch({b'ids': list(tofetch)})
1909 fetch({b'ids': list(tofetch)})
1910 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1910 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1911
1911
1912 # Walk through the tree, return smartsets
1912 # Walk through the tree, return smartsets
1913 def walk(tree):
1913 def walk(tree):
1914 op = tree[0]
1914 op = tree[0]
1915 if op == b'symbol':
1915 if op == b'symbol':
1916 drev = _parsedrev(tree[1])
1916 drev = _parsedrev(tree[1])
1917 if drev:
1917 if drev:
1918 return smartset.baseset([drev])
1918 return smartset.baseset([drev])
1919 elif tree[1] in _knownstatusnames:
1919 elif tree[1] in _knownstatusnames:
1920 drevs = [
1920 drevs = [
1921 r
1921 r
1922 for r in validids
1922 for r in validids
1923 if _getstatusname(prefetched[r]) == tree[1]
1923 if _getstatusname(prefetched[r]) == tree[1]
1924 ]
1924 ]
1925 return smartset.baseset(drevs)
1925 return smartset.baseset(drevs)
1926 else:
1926 else:
1927 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1927 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1928 elif op in {b'and_', b'add', b'sub'}:
1928 elif op in {b'and_', b'add', b'sub'}:
1929 assert len(tree) == 3
1929 assert len(tree) == 3
1930 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1930 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1931 elif op == b'group':
1931 elif op == b'group':
1932 return walk(tree[1])
1932 return walk(tree[1])
1933 elif op == b'ancestors':
1933 elif op == b'ancestors':
1934 return getstack(walk(tree[1]))
1934 return getstack(walk(tree[1]))
1935 else:
1935 else:
1936 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1936 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1937
1937
1938 return [prefetched[r] for r in walk(tree)]
1938 return [prefetched[r] for r in walk(tree)]
1939
1939
1940
1940
1941 def getdescfromdrev(drev):
1941 def getdescfromdrev(drev):
1942 """get description (commit message) from "Differential Revision"
1942 """get description (commit message) from "Differential Revision"
1943
1943
1944 This is similar to differential.getcommitmessage API. But we only care
1944 This is similar to differential.getcommitmessage API. But we only care
1945 about limited fields: title, summary, test plan, and URL.
1945 about limited fields: title, summary, test plan, and URL.
1946 """
1946 """
1947 title = drev[b'title']
1947 title = drev[b'title']
1948 summary = drev[b'summary'].rstrip()
1948 summary = drev[b'summary'].rstrip()
1949 testplan = drev[b'testPlan'].rstrip()
1949 testplan = drev[b'testPlan'].rstrip()
1950 if testplan:
1950 if testplan:
1951 testplan = b'Test Plan:\n%s' % testplan
1951 testplan = b'Test Plan:\n%s' % testplan
1952 uri = b'Differential Revision: %s' % drev[b'uri']
1952 uri = b'Differential Revision: %s' % drev[b'uri']
1953 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1953 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1954
1954
1955
1955
1956 def get_amended_desc(drev, ctx, folded):
1956 def get_amended_desc(drev, ctx, folded):
1957 """similar to ``getdescfromdrev``, but supports a folded series of commits
1957 """similar to ``getdescfromdrev``, but supports a folded series of commits
1958
1958
1959 This is used when determining if an individual commit needs to have its
1959 This is used when determining if an individual commit needs to have its
1960 message amended after posting it for review. The determination is made for
1960 message amended after posting it for review. The determination is made for
1961 each individual commit, even when they were folded into one review.
1961 each individual commit, even when they were folded into one review.
1962 """
1962 """
1963 if not folded:
1963 if not folded:
1964 return getdescfromdrev(drev)
1964 return getdescfromdrev(drev)
1965
1965
1966 uri = b'Differential Revision: %s' % drev[b'uri']
1966 uri = b'Differential Revision: %s' % drev[b'uri']
1967
1967
1968 # Since the commit messages were combined when posting multiple commits
1968 # Since the commit messages were combined when posting multiple commits
1969 # with --fold, the fields can't be read from Phabricator here, or *all*
1969 # with --fold, the fields can't be read from Phabricator here, or *all*
1970 # affected local revisions will end up with the same commit message after
1970 # affected local revisions will end up with the same commit message after
1971 # the URI is amended in. Append in the DREV line, or update it if it
1971 # the URI is amended in. Append in the DREV line, or update it if it
1972 # exists. At worst, this means commit message or test plan updates on
1972 # exists. At worst, this means commit message or test plan updates on
1973 # Phabricator aren't propagated back to the repository, but that seems
1973 # Phabricator aren't propagated back to the repository, but that seems
1974 # reasonable for the case where local commits are effectively combined
1974 # reasonable for the case where local commits are effectively combined
1975 # in Phabricator.
1975 # in Phabricator.
1976 m = _differentialrevisiondescre.search(ctx.description())
1976 m = _differentialrevisiondescre.search(ctx.description())
1977 if not m:
1977 if not m:
1978 return b'\n\n'.join([ctx.description(), uri])
1978 return b'\n\n'.join([ctx.description(), uri])
1979
1979
1980 return _differentialrevisiondescre.sub(uri, ctx.description())
1980 return _differentialrevisiondescre.sub(uri, ctx.description())
1981
1981
1982
1982
1983 def getlocalcommits(diff):
1983 def getlocalcommits(diff):
1984 """get the set of local commits from a diff object
1984 """get the set of local commits from a diff object
1985
1985
1986 See ``getdiffmeta()`` for an example diff object.
1986 See ``getdiffmeta()`` for an example diff object.
1987 """
1987 """
1988 props = diff.get(b'properties') or {}
1988 props = diff.get(b'properties') or {}
1989 commits = props.get(b'local:commits') or {}
1989 commits = props.get(b'local:commits') or {}
1990 if len(commits) > 1:
1990 if len(commits) > 1:
1991 return {bin(c) for c in commits.keys()}
1991 return {bin(c) for c in commits.keys()}
1992
1992
1993 # Storing the diff metadata predates storing `local:commits`, so continue
1993 # Storing the diff metadata predates storing `local:commits`, so continue
1994 # to use that in the --no-fold case.
1994 # to use that in the --no-fold case.
1995 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1995 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1996
1996
1997
1997
1998 def getdiffmeta(diff):
1998 def getdiffmeta(diff):
1999 """get commit metadata (date, node, user, p1) from a diff object
1999 """get commit metadata (date, node, user, p1) from a diff object
2000
2000
2001 The metadata could be "hg:meta", sent by phabsend, like:
2001 The metadata could be "hg:meta", sent by phabsend, like:
2002
2002
2003 "properties": {
2003 "properties": {
2004 "hg:meta": {
2004 "hg:meta": {
2005 "branch": "default",
2005 "branch": "default",
2006 "date": "1499571514 25200",
2006 "date": "1499571514 25200",
2007 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2007 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2008 "user": "Foo Bar <foo@example.com>",
2008 "user": "Foo Bar <foo@example.com>",
2009 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2009 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2010 }
2010 }
2011 }
2011 }
2012
2012
2013 Or converted from "local:commits", sent by "arc", like:
2013 Or converted from "local:commits", sent by "arc", like:
2014
2014
2015 "properties": {
2015 "properties": {
2016 "local:commits": {
2016 "local:commits": {
2017 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2017 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2018 "author": "Foo Bar",
2018 "author": "Foo Bar",
2019 "authorEmail": "foo@example.com"
2019 "authorEmail": "foo@example.com"
2020 "branch": "default",
2020 "branch": "default",
2021 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2021 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2022 "local": "1000",
2022 "local": "1000",
2023 "message": "...",
2023 "message": "...",
2024 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2024 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2025 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2025 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2026 "summary": "...",
2026 "summary": "...",
2027 "tag": "",
2027 "tag": "",
2028 "time": 1499546314,
2028 "time": 1499546314,
2029 }
2029 }
2030 }
2030 }
2031 }
2031 }
2032
2032
2033 Note: metadata extracted from "local:commits" will lose time zone
2033 Note: metadata extracted from "local:commits" will lose time zone
2034 information.
2034 information.
2035 """
2035 """
2036 props = diff.get(b'properties') or {}
2036 props = diff.get(b'properties') or {}
2037 meta = props.get(b'hg:meta')
2037 meta = props.get(b'hg:meta')
2038 if not meta:
2038 if not meta:
2039 if props.get(b'local:commits'):
2039 if props.get(b'local:commits'):
2040 commit = sorted(props[b'local:commits'].values())[0]
2040 commit = sorted(props[b'local:commits'].values())[0]
2041 meta = {}
2041 meta = {}
2042 if b'author' in commit and b'authorEmail' in commit:
2042 if b'author' in commit and b'authorEmail' in commit:
2043 meta[b'user'] = b'%s <%s>' % (
2043 meta[b'user'] = b'%s <%s>' % (
2044 commit[b'author'],
2044 commit[b'author'],
2045 commit[b'authorEmail'],
2045 commit[b'authorEmail'],
2046 )
2046 )
2047 if b'time' in commit:
2047 if b'time' in commit:
2048 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2048 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2049 if b'branch' in commit:
2049 if b'branch' in commit:
2050 meta[b'branch'] = commit[b'branch']
2050 meta[b'branch'] = commit[b'branch']
2051 node = commit.get(b'commit', commit.get(b'rev'))
2051 node = commit.get(b'commit', commit.get(b'rev'))
2052 if node:
2052 if node:
2053 meta[b'node'] = node
2053 meta[b'node'] = node
2054 if len(commit.get(b'parents', ())) >= 1:
2054 if len(commit.get(b'parents', ())) >= 1:
2055 meta[b'parent'] = commit[b'parents'][0]
2055 meta[b'parent'] = commit[b'parents'][0]
2056 else:
2056 else:
2057 meta = {}
2057 meta = {}
2058 if b'date' not in meta and b'dateCreated' in diff:
2058 if b'date' not in meta and b'dateCreated' in diff:
2059 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2059 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2060 if b'branch' not in meta and diff.get(b'branch'):
2060 if b'branch' not in meta and diff.get(b'branch'):
2061 meta[b'branch'] = diff[b'branch']
2061 meta[b'branch'] = diff[b'branch']
2062 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2062 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2063 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2063 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2064 return meta
2064 return meta
2065
2065
2066
2066
2067 def _getdrevs(ui, stack, specs):
2067 def _getdrevs(ui, stack, specs):
2068 """convert user supplied DREVSPECs into "Differential Revision" dicts
2068 """convert user supplied DREVSPECs into "Differential Revision" dicts
2069
2069
2070 See ``hg help phabread`` for how to specify each DREVSPEC.
2070 See ``hg help phabread`` for how to specify each DREVSPEC.
2071 """
2071 """
2072 if len(specs) > 0:
2072 if len(specs) > 0:
2073
2073
2074 def _formatspec(s):
2074 def _formatspec(s):
2075 if stack:
2075 if stack:
2076 s = b':(%s)' % s
2076 s = b':(%s)' % s
2077 return b'(%s)' % s
2077 return b'(%s)' % s
2078
2078
2079 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2079 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2080
2080
2081 drevs = querydrev(ui, spec)
2081 drevs = querydrev(ui, spec)
2082 if drevs:
2082 if drevs:
2083 return drevs
2083 return drevs
2084
2084
2085 raise error.Abort(_(b"empty DREVSPEC set"))
2085 raise error.Abort(_(b"empty DREVSPEC set"))
2086
2086
2087
2087
2088 def readpatch(ui, drevs, write):
2088 def readpatch(ui, drevs, write):
2089 """generate plain-text patch readable by 'hg import'
2089 """generate plain-text patch readable by 'hg import'
2090
2090
2091 write takes a list of (DREV, bytes), where DREV is the differential number
2091 write takes a list of (DREV, bytes), where DREV is the differential number
2092 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2092 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2093 to be imported. drevs is what "querydrev" returns, results of
2093 to be imported. drevs is what "querydrev" returns, results of
2094 "differential.query".
2094 "differential.query".
2095 """
2095 """
2096 # Prefetch hg:meta property for all diffs
2096 # Prefetch hg:meta property for all diffs
2097 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2097 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2098 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2098 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2099
2099
2100 patches = []
2100 patches = []
2101
2101
2102 # Generate patch for each drev
2102 # Generate patch for each drev
2103 for drev in drevs:
2103 for drev in drevs:
2104 ui.note(_(b'reading D%s\n') % drev[b'id'])
2104 ui.note(_(b'reading D%s\n') % drev[b'id'])
2105
2105
2106 diffid = max(int(v) for v in drev[b'diffs'])
2106 diffid = max(int(v) for v in drev[b'diffs'])
2107 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2107 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2108 desc = getdescfromdrev(drev)
2108 desc = getdescfromdrev(drev)
2109 header = b'# HG changeset patch\n'
2109 header = b'# HG changeset patch\n'
2110
2110
2111 # Try to preserve metadata from hg:meta property. Write hg patch
2111 # Try to preserve metadata from hg:meta property. Write hg patch
2112 # headers that can be read by the "import" command. See patchheadermap
2112 # headers that can be read by the "import" command. See patchheadermap
2113 # and extract in mercurial/patch.py for supported headers.
2113 # and extract in mercurial/patch.py for supported headers.
2114 meta = getdiffmeta(diffs[b'%d' % diffid])
2114 meta = getdiffmeta(diffs[b'%d' % diffid])
2115 for k in _metanamemap.keys():
2115 for k in _metanamemap.keys():
2116 if k in meta:
2116 if k in meta:
2117 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2117 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2118
2118
2119 content = b'%s%s\n%s' % (header, desc, body)
2119 content = b'%s%s\n%s' % (header, desc, body)
2120 patches.append((drev[b'id'], content))
2120 patches.append((drev[b'id'], content))
2121
2121
2122 # Write patches to the supplied callback
2122 # Write patches to the supplied callback
2123 write(patches)
2123 write(patches)
2124
2124
2125
2125
2126 @vcrcommand(
2126 @vcrcommand(
2127 b'phabread',
2127 b'phabread',
2128 [(b'', b'stack', False, _(b'read dependencies'))],
2128 [(b'', b'stack', False, _(b'read dependencies'))],
2129 _(b'DREVSPEC... [OPTIONS]'),
2129 _(b'DREVSPEC... [OPTIONS]'),
2130 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2130 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2131 optionalrepo=True,
2131 optionalrepo=True,
2132 )
2132 )
2133 def phabread(ui, repo, *specs, **opts):
2133 def phabread(ui, repo, *specs, **opts):
2134 """print patches from Phabricator suitable for importing
2134 """print patches from Phabricator suitable for importing
2135
2135
2136 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2136 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2137 the number ``123``. It could also have common operators like ``+``, ``-``,
2137 the number ``123``. It could also have common operators like ``+``, ``-``,
2138 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2138 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2139 select a stack. If multiple DREVSPEC values are given, the result is the
2139 select a stack. If multiple DREVSPEC values are given, the result is the
2140 union of each individually evaluated value. No attempt is currently made
2140 union of each individually evaluated value. No attempt is currently made
2141 to reorder the values to run from parent to child.
2141 to reorder the values to run from parent to child.
2142
2142
2143 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2143 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2144 could be used to filter patches by status. For performance reason, they
2144 could be used to filter patches by status. For performance reason, they
2145 only represent a subset of non-status selections and cannot be used alone.
2145 only represent a subset of non-status selections and cannot be used alone.
2146
2146
2147 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2147 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2148 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2148 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2149 stack up to D9.
2149 stack up to D9.
2150
2150
2151 If --stack is given, follow dependencies information and read all patches.
2151 If --stack is given, follow dependencies information and read all patches.
2152 It is equivalent to the ``:`` operator.
2152 It is equivalent to the ``:`` operator.
2153 """
2153 """
2154 opts = pycompat.byteskwargs(opts)
2154 opts = pycompat.byteskwargs(opts)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2156
2156
2157 def _write(patches):
2157 def _write(patches):
2158 for drev, content in patches:
2158 for drev, content in patches:
2159 ui.write(content)
2159 ui.write(content)
2160
2160
2161 readpatch(ui, drevs, _write)
2161 readpatch(ui, drevs, _write)
2162
2162
2163
2163
2164 @vcrcommand(
2164 @vcrcommand(
2165 b'phabimport',
2165 b'phabimport',
2166 [(b'', b'stack', False, _(b'import dependencies as well'))],
2166 [(b'', b'stack', False, _(b'import dependencies as well'))],
2167 _(b'DREVSPEC... [OPTIONS]'),
2167 _(b'DREVSPEC... [OPTIONS]'),
2168 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2168 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2169 )
2169 )
2170 def phabimport(ui, repo, *specs, **opts):
2170 def phabimport(ui, repo, *specs, **opts):
2171 """import patches from Phabricator for the specified Differential Revisions
2171 """import patches from Phabricator for the specified Differential Revisions
2172
2172
2173 The patches are read and applied starting at the parent of the working
2173 The patches are read and applied starting at the parent of the working
2174 directory.
2174 directory.
2175
2175
2176 See ``hg help phabread`` for how to specify DREVSPEC.
2176 See ``hg help phabread`` for how to specify DREVSPEC.
2177 """
2177 """
2178 opts = pycompat.byteskwargs(opts)
2178 opts = pycompat.byteskwargs(opts)
2179
2179
2180 # --bypass avoids losing exec and symlink bits when importing on Windows,
2180 # --bypass avoids losing exec and symlink bits when importing on Windows,
2181 # and allows importing with a dirty wdir. It also aborts instead of leaving
2181 # and allows importing with a dirty wdir. It also aborts instead of leaving
2182 # rejects.
2182 # rejects.
2183 opts[b'bypass'] = True
2183 opts[b'bypass'] = True
2184
2184
2185 # Mandatory default values, synced with commands.import
2185 # Mandatory default values, synced with commands.import
2186 opts[b'strip'] = 1
2186 opts[b'strip'] = 1
2187 opts[b'prefix'] = b''
2187 opts[b'prefix'] = b''
2188 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2188 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2189 opts[b'obsolete'] = False
2189 opts[b'obsolete'] = False
2190
2190
2191 if ui.configbool(b'phabimport', b'secret'):
2191 if ui.configbool(b'phabimport', b'secret'):
2192 opts[b'secret'] = True
2192 opts[b'secret'] = True
2193 if ui.configbool(b'phabimport', b'obsolete'):
2193 if ui.configbool(b'phabimport', b'obsolete'):
2194 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2194 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2195
2195
2196 def _write(patches):
2196 def _write(patches):
2197 parents = repo[None].parents()
2197 parents = repo[None].parents()
2198
2198
2199 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2199 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2200 for drev, contents in patches:
2200 for drev, contents in patches:
2201 ui.status(_(b'applying patch from D%s\n') % drev)
2201 ui.status(_(b'applying patch from D%s\n') % drev)
2202
2202
2203 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2203 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2204 msg, node, rej = cmdutil.tryimportone(
2204 msg, node, rej = cmdutil.tryimportone(
2205 ui,
2205 ui,
2206 repo,
2206 repo,
2207 patchdata,
2207 patchdata,
2208 parents,
2208 parents,
2209 opts,
2209 opts,
2210 [],
2210 [],
2211 None, # Never update wdir to another revision
2211 None, # Never update wdir to another revision
2212 )
2212 )
2213
2213
2214 if not node:
2214 if not node:
2215 raise error.Abort(_(b'D%s: no diffs found') % drev)
2215 raise error.Abort(_(b'D%s: no diffs found') % drev)
2216
2216
2217 ui.note(msg + b'\n')
2217 ui.note(msg + b'\n')
2218 parents = [repo[node]]
2218 parents = [repo[node]]
2219
2219
2220 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2220 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2221
2221
2222 readpatch(repo.ui, drevs, _write)
2222 readpatch(repo.ui, drevs, _write)
2223
2223
2224
2224
2225 @vcrcommand(
2225 @vcrcommand(
2226 b'phabupdate',
2226 b'phabupdate',
2227 [
2227 [
2228 (b'', b'accept', False, _(b'accept revisions')),
2228 (b'', b'accept', False, _(b'accept revisions')),
2229 (b'', b'reject', False, _(b'reject revisions')),
2229 (b'', b'reject', False, _(b'reject revisions')),
2230 (b'', b'request-review', False, _(b'request review on revisions')),
2230 (b'', b'request-review', False, _(b'request review on revisions')),
2231 (b'', b'abandon', False, _(b'abandon revisions')),
2231 (b'', b'abandon', False, _(b'abandon revisions')),
2232 (b'', b'reclaim', False, _(b'reclaim revisions')),
2232 (b'', b'reclaim', False, _(b'reclaim revisions')),
2233 (b'', b'close', False, _(b'close revisions')),
2233 (b'', b'close', False, _(b'close revisions')),
2234 (b'', b'reopen', False, _(b'reopen revisions')),
2234 (b'', b'reopen', False, _(b'reopen revisions')),
2235 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2235 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2236 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2236 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2237 (b'', b'commandeer', False, _(b'commandeer revisions')),
2237 (b'', b'commandeer', False, _(b'commandeer revisions')),
2238 (b'm', b'comment', b'', _(b'comment on the last revision')),
2238 (b'm', b'comment', b'', _(b'comment on the last revision')),
2239 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2239 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2240 ],
2240 ],
2241 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2241 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2242 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2242 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2243 optionalrepo=True,
2243 optionalrepo=True,
2244 )
2244 )
2245 def phabupdate(ui, repo, *specs, **opts):
2245 def phabupdate(ui, repo, *specs, **opts):
2246 """update Differential Revision in batch
2246 """update Differential Revision in batch
2247
2247
2248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2249 """
2249 """
2250 opts = pycompat.byteskwargs(opts)
2250 opts = pycompat.byteskwargs(opts)
2251 transactions = [
2251 transactions = [
2252 b'abandon',
2252 b'abandon',
2253 b'accept',
2253 b'accept',
2254 b'close',
2254 b'close',
2255 b'commandeer',
2255 b'commandeer',
2256 b'plan-changes',
2256 b'plan-changes',
2257 b'reclaim',
2257 b'reclaim',
2258 b'reject',
2258 b'reject',
2259 b'reopen',
2259 b'reopen',
2260 b'request-review',
2260 b'request-review',
2261 b'resign',
2261 b'resign',
2262 ]
2262 ]
2263 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2263 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2264 if len(flags) > 1:
2264 if len(flags) > 1:
2265 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2265 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2266
2266
2267 actions = []
2267 actions = []
2268 for f in flags:
2268 for f in flags:
2269 actions.append({b'type': f, b'value': True})
2269 actions.append({b'type': f, b'value': True})
2270
2270
2271 revs = opts.get(b'rev')
2271 revs = opts.get(b'rev')
2272 if revs:
2272 if revs:
2273 if not repo:
2273 if not repo:
2274 raise error.InputError(_(b'--rev requires a repository'))
2274 raise error.InputError(_(b'--rev requires a repository'))
2275
2275
2276 if specs:
2276 if specs:
2277 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2277 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2278
2278
2279 drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
2279 drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
2280 specs = []
2280 specs = []
2281 unknown = []
2281 unknown = []
2282 for r, d in pycompat.iteritems(drevmap):
2282 for r, d in pycompat.iteritems(drevmap):
2283 if d is None:
2283 if d is None:
2284 unknown.append(repo[r])
2284 unknown.append(repo[r])
2285 else:
2285 else:
2286 specs.append(b'D%d' % d)
2286 specs.append(b'D%d' % d)
2287 if unknown:
2287 if unknown:
2288 raise error.InputError(
2288 raise error.InputError(
2289 _(b'selected revisions without a Differential: %s')
2289 _(b'selected revisions without a Differential: %s')
2290 % scmutil.nodesummaries(repo, unknown)
2290 % scmutil.nodesummaries(repo, unknown)
2291 )
2291 )
2292
2292
2293 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2293 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2294 for i, drev in enumerate(drevs):
2294 for i, drev in enumerate(drevs):
2295 if i + 1 == len(drevs) and opts.get(b'comment'):
2295 if i + 1 == len(drevs) and opts.get(b'comment'):
2296 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2296 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2297 if actions:
2297 if actions:
2298 params = {
2298 params = {
2299 b'objectIdentifier': drev[b'phid'],
2299 b'objectIdentifier': drev[b'phid'],
2300 b'transactions': actions,
2300 b'transactions': actions,
2301 }
2301 }
2302 callconduit(ui, b'differential.revision.edit', params)
2302 callconduit(ui, b'differential.revision.edit', params)
2303
2303
2304
2304
2305 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2305 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2306 def template_review(context, mapping):
2306 def template_review(context, mapping):
2307 """:phabreview: Object describing the review for this changeset.
2307 """:phabreview: Object describing the review for this changeset.
2308 Has attributes `url` and `id`.
2308 Has attributes `url` and `id`.
2309 """
2309 """
2310 ctx = context.resource(mapping, b'ctx')
2310 ctx = context.resource(mapping, b'ctx')
2311 m = _differentialrevisiondescre.search(ctx.description())
2311 m = _differentialrevisiondescre.search(ctx.description())
2312 if m:
2312 if m:
2313 return templateutil.hybriddict(
2313 return templateutil.hybriddict(
2314 {
2314 {
2315 b'url': m.group('url'),
2315 b'url': m.group('url'),
2316 b'id': b"D%s" % m.group('id'),
2316 b'id': b"D%s" % m.group('id'),
2317 }
2317 }
2318 )
2318 )
2319 else:
2319 else:
2320 tags = ctx.repo().nodetags(ctx.node())
2320 tags = ctx.repo().nodetags(ctx.node())
2321 for t in tags:
2321 for t in tags:
2322 if _differentialrevisiontagre.match(t):
2322 if _differentialrevisiontagre.match(t):
2323 url = ctx.repo().ui.config(b'phabricator', b'url')
2323 url = ctx.repo().ui.config(b'phabricator', b'url')
2324 if not url.endswith(b'/'):
2324 if not url.endswith(b'/'):
2325 url += b'/'
2325 url += b'/'
2326 url += t
2326 url += t
2327
2327
2328 return templateutil.hybriddict(
2328 return templateutil.hybriddict(
2329 {
2329 {
2330 b'url': url,
2330 b'url': url,
2331 b'id': t,
2331 b'id': t,
2332 }
2332 }
2333 )
2333 )
2334 return None
2334 return None
2335
2335
2336
2336
2337 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2337 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2338 def template_status(context, mapping):
2338 def template_status(context, mapping):
2339 """:phabstatus: String. Status of Phabricator differential."""
2339 """:phabstatus: String. Status of Phabricator differential."""
2340 ctx = context.resource(mapping, b'ctx')
2340 ctx = context.resource(mapping, b'ctx')
2341 repo = context.resource(mapping, b'repo')
2341 repo = context.resource(mapping, b'repo')
2342 ui = context.resource(mapping, b'ui')
2342 ui = context.resource(mapping, b'ui')
2343
2343
2344 rev = ctx.rev()
2344 rev = ctx.rev()
2345 try:
2345 try:
2346 drevid = getdrevmap(repo, [rev])[rev]
2346 drevid = getdrevmap(repo, [rev])[rev]
2347 except KeyError:
2347 except KeyError:
2348 return None
2348 return None
2349 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2349 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2350 for drev in drevs:
2350 for drev in drevs:
2351 if int(drev[b'id']) == drevid:
2351 if int(drev[b'id']) == drevid:
2352 return templateutil.hybriddict(
2352 return templateutil.hybriddict(
2353 {
2353 {
2354 b'url': drev[b'uri'],
2354 b'url': drev[b'uri'],
2355 b'status': drev[b'statusName'],
2355 b'status': drev[b'statusName'],
2356 }
2356 }
2357 )
2357 )
2358 return None
2358 return None
2359
2359
2360
2360
2361 @show.showview(b'phabstatus', csettopic=b'work')
2361 @show.showview(b'phabstatus', csettopic=b'work')
2362 def phabstatusshowview(ui, repo, displayer):
2362 def phabstatusshowview(ui, repo, displayer):
2363 """Phabricator differiential status"""
2363 """Phabricator differiential status"""
2364 revs = repo.revs('sort(_underway(), topo)')
2364 revs = repo.revs('sort(_underway(), topo)')
2365 drevmap = getdrevmap(repo, revs)
2365 drevmap = getdrevmap(repo, revs)
2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2367 for rev, drevid in pycompat.iteritems(drevmap):
2367 for rev, drevid in pycompat.iteritems(drevmap):
2368 if drevid is not None:
2368 if drevid is not None:
2369 drevids.add(drevid)
2369 drevids.add(drevid)
2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2371 else:
2371 else:
2372 unknownrevs.append(rev)
2372 unknownrevs.append(rev)
2373
2373
2374 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2374 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2375 drevsbyrev = {}
2375 drevsbyrev = {}
2376 for drev in drevs:
2376 for drev in drevs:
2377 for rev in revsbydrevid[int(drev[b'id'])]:
2377 for rev in revsbydrevid[int(drev[b'id'])]:
2378 drevsbyrev[rev] = drev
2378 drevsbyrev[rev] = drev
2379
2379
2380 def phabstatus(ctx):
2380 def phabstatus(ctx):
2381 drev = drevsbyrev[ctx.rev()]
2381 drev = drevsbyrev[ctx.rev()]
2382 status = ui.label(
2382 status = ui.label(
2383 b'%(statusName)s' % drev,
2383 b'%(statusName)s' % drev,
2384 b'phabricator.status.%s' % _getstatusname(drev),
2384 b'phabricator.status.%s' % _getstatusname(drev),
2385 )
2385 )
2386 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2386 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2387
2387
2388 revs -= smartset.baseset(unknownrevs)
2388 revs -= smartset.baseset(unknownrevs)
2389 revdag = graphmod.dagwalker(repo, revs)
2389 revdag = graphmod.dagwalker(repo, revs)
2390
2390
2391 ui.setconfig(b'experimental', b'graphshorten', True)
2391 ui.setconfig(b'experimental', b'graphshorten', True)
2392 displayer._exthook = phabstatus
2392 displayer._exthook = phabstatus
2393 nodelen = show.longestshortest(repo, revs)
2393 nodelen = show.longestshortest(repo, revs)
2394 logcmdutil.displaygraph(
2394 logcmdutil.displaygraph(
2395 ui,
2395 ui,
2396 repo,
2396 repo,
2397 revdag,
2397 revdag,
2398 displayer,
2398 displayer,
2399 graphmod.asciiedges,
2399 graphmod.asciiedges,
2400 props={b'nodelen': nodelen},
2400 props={b'nodelen': nodelen},
2401 )
2401 )
General Comments 0
You need to be logged in to leave comments. Login now