##// END OF EJS Templates
phabricator: don't infer the old `fctx` in `notutf8()`...
Matt Harbison -
r44913:66a05dbb default
parent child Browse files
Show More
@@ -1,1818 +1,1819 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
261 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
262 return command(
262 return command(
263 name,
263 name,
264 fullflags,
264 fullflags,
265 spec,
265 spec,
266 helpcategory=helpcategory,
266 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
268 )(inner)
268 )(inner)
269
269
270 return decorate
270 return decorate
271
271
272
272
273 def urlencodenested(params):
273 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
275
275
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
279 """
280 flatparams = util.sortdict()
280 flatparams = util.sortdict()
281
281
282 def process(prefix, obj):
282 def process(prefix, obj):
283 if isinstance(obj, bool):
283 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
287 if items is None:
288 flatparams[prefix] = obj
288 flatparams[prefix] = obj
289 else:
289 else:
290 for k, v in items(obj):
290 for k, v in items(obj):
291 if prefix:
291 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
293 else:
293 else:
294 process(k, v)
294 process(k, v)
295
295
296 process(b'', params)
296 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
298
298
299
299
300 def readurltoken(ui):
300 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
302
302
303 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
305 """
305 """
306 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
307 if not url:
307 if not url:
308 raise error.Abort(
308 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
310 )
311
311
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
313 token = None
314
314
315 if res:
315 if res:
316 group, auth = res
316 group, auth = res
317
317
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
319
320 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
321
321
322 if not token:
322 if not token:
323 raise error.Abort(
323 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
325 )
326
326
327 return url, token
327 return url, token
328
328
329
329
330 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
335 params = params.copy()
336 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
337 b'token': token,
337 b'token': token,
338 }
338 }
339 rawdata = {
339 rawdata = {
340 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
341 b'output': b'json',
341 b'output': b'json',
342 b'__conduit__': 1,
342 b'__conduit__': 1,
343 }
343 }
344 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
346 if curlcmd:
347 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
349 )
350 sin.write(data)
350 sin.write(data)
351 sin.close()
351 sin.close()
352 body = sout.read()
352 body = sout.read()
353 else:
353 else:
354 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
357 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
362 else x,
362 else x,
363 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
365 )
366 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
368 parsed[b'error_code'],
369 parsed[b'error_info'],
369 parsed[b'error_info'],
370 )
370 )
371 raise error.Abort(msg)
371 raise error.Abort(msg)
372 return parsed[b'result']
372 return parsed[b'result']
373
373
374
374
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
377 """call Conduit API
377 """call Conduit API
378
378
379 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
380 to stdout as a JSON blob.
381 """
381 """
382 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
385 params = pycompat.rapply(
385 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
388 else x,
388 else x,
389 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
390 )
390 )
391 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
392 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
394 callconduit(ui, name, params),
395 )
395 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
398
398
399
399
400 def getrepophid(repo):
400 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
404 if repophid:
405 return repophid
405 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
407 if not callsign:
408 return None
408 return None
409 query = callconduit(
409 query = callconduit(
410 repo.ui,
410 repo.ui,
411 b'diffusion.repository.search',
411 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
413 )
413 )
414 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
415 return None
415 return None
416 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
418 return repophid
419
419
420
420
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
424 )
425
425
426
426
427 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
429
429
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
433 be ``None``.
434
434
435 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
436 association information.
436 association information.
437
437
438 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
441
441
442 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
444 """
444 """
445 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
447
447
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
450 for node in nodelist:
451 ctx = unfi[node]
451 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
454 for n in precnodes:
455 if has_node(n):
455 if has_node(n):
456 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
458 if m:
458 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
460 break
461 else:
461 else:
462 continue # move to next predecessor
462 continue # move to next predecessor
463 break # found a tag, stop
463 break # found a tag, stop
464 else:
464 else:
465 # Check commit message
465 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
467 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
469
470 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
472 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
474 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
476 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
479 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
481 ]
482
482
483 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
485
485
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
490 tags.tag(
490 tags.tag(
491 repo,
491 repo,
492 tagname,
492 tagname,
493 nullid,
493 nullid,
494 message=None,
494 message=None,
495 user=None,
495 user=None,
496 date=None,
496 date=None,
497 local=True,
497 local=True,
498 )
498 )
499 unfi.ui.warn(
499 unfi.ui.warn(
500 _(
500 _(
501 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
502 b'Differential history\n'
503 )
503 )
504 % drev
504 % drev
505 )
505 )
506 continue
506 continue
507
507
508 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
509 # exists in the repo
510 oldnode = lastdiff = None
510 oldnode = lastdiff = None
511 if diffs:
511 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
515 oldnode = None
515 oldnode = None
516
516
517 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
518
518
519 return result
519 return result
520
520
521
521
522 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
524 ID or None.
525 """
525 """
526 result = {}
526 result = {}
527 for rev in revs:
527 for rev in revs:
528 result[rev] = None
528 result[rev] = None
529 ctx = repo[rev]
529 ctx = repo[rev]
530 # Check commit message
530 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
532 if m:
533 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
534 continue
534 continue
535 # Check tags
535 # Check tags
536 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
538 if m:
538 if m:
539 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
540 break
540 break
541
541
542 return result
542 return result
543
543
544
544
545 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
547 output = util.stringio()
548 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
550 ):
551 output.write(chunk)
551 output.write(chunk)
552 return output.getvalue()
552 return output.getvalue()
553
553
554
554
555 class DiffChangeType(object):
555 class DiffChangeType(object):
556 ADD = 1
556 ADD = 1
557 CHANGE = 2
557 CHANGE = 2
558 DELETE = 3
558 DELETE = 3
559 MOVE_AWAY = 4
559 MOVE_AWAY = 4
560 COPY_AWAY = 5
560 COPY_AWAY = 5
561 MOVE_HERE = 6
561 MOVE_HERE = 6
562 COPY_HERE = 7
562 COPY_HERE = 7
563 MULTICOPY = 8
563 MULTICOPY = 8
564
564
565
565
566 class DiffFileType(object):
566 class DiffFileType(object):
567 TEXT = 1
567 TEXT = 1
568 IMAGE = 2
568 IMAGE = 2
569 BINARY = 3
569 BINARY = 3
570
570
571
571
572 @attr.s
572 @attr.s
573 class phabhunk(dict):
573 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
575 """
575 """
576
576
577 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
585
585
586
586
587 @attr.s
587 @attr.s
588 class phabchange(object):
588 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
591 """
591 """
592
592
593 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
605
605
606 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
610
610
611 def addoldmode(self, value):
611 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
613
613
614 def addnewmode(self, value):
614 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
616
616
617 def addhunk(self, hunk):
617 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
626
626
627
627
628 @attr.s
628 @attr.s
629 class phabdiff(object):
629 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
631 to a commit.
632 """
632 """
633
633
634 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
647
647
648 def addchange(self, change):
648 def addchange(self, change):
649 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
652 attr.asdict(change)
653 )
653 )
654
654
655
655
656 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
658 repo = ctx.repo()
658 repo = ctx.repo()
659 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
663 )
664
664
665 for fhunk in fhunks:
665 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
668 shunk = list(header)
668 shunk = list(header)
669 shunk.extend(lines)
669 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
672 )
672 )
673 pchange.addhunk(
673 pchange.addhunk(
674 phabhunk(
674 phabhunk(
675 oldOffset,
675 oldOffset,
676 oldLength,
676 oldLength,
677 newOffset,
677 newOffset,
678 newLength,
678 newLength,
679 corpus,
679 corpus,
680 addLines,
680 addLines,
681 delLines,
681 delLines,
682 )
682 )
683 )
683 )
684
684
685
685
686 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
689 """
690 ui = fctx.repo().ui
690 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
692 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
694 ) as progress:
695 for chunk in chunks:
695 for chunk in chunks:
696 progress.increment()
696 progress.increment()
697 if chunk[b'complete']:
697 if chunk[b'complete']:
698 continue
698 continue
699 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
701 callconduit(
701 callconduit(
702 ui,
702 ui,
703 b'file.uploadchunk',
703 b'file.uploadchunk',
704 {
704 {
705 b'filePHID': fphid,
705 b'filePHID': fphid,
706 b'byteStart': bstart,
706 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
709 },
709 },
710 )
710 )
711
711
712
712
713 def uploadfile(fctx):
713 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
715 repo = fctx.repo()
715 repo = fctx.repo()
716 ui = repo.ui
716 ui = repo.ui
717 fname = fctx.path()
717 fname = fctx.path()
718 size = fctx.size()
718 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
720
721 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
723 allocateparams = {
724 b'name': fname,
724 b'name': fname,
725 b'contentLength': size,
725 b'contentLength': size,
726 b'contentHash': fhash,
726 b'contentHash': fhash,
727 }
727 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
730
730
731 if filealloc[b'upload']:
731 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
733 if not fphid:
734 uploadparams = {
734 uploadparams = {
735 b'name': fname,
735 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
737 }
737 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
739 else:
740 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
741 else:
741 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
743
743
744 if not fphid:
744 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
746
747 return fphid
747 return fphid
748
748
749
749
750 def addoldbinary(pchange, oldfctx, fctx):
750 def addoldbinary(pchange, oldfctx, fctx):
751 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
752 phabchange for the new version
753
753
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 version of the file, or None if the file is being removed.
755 version of the file, or None if the file is being removed.
756 """
756 """
757 if not fctx or fctx.cmp(oldfctx):
757 if not fctx or fctx.cmp(oldfctx):
758 # Files differ, add the old one
758 # Files differ, add the old one
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
760 mimeguess, _enc = mimetypes.guess_type(
760 mimeguess, _enc = mimetypes.guess_type(
761 encoding.unifromlocal(oldfctx.path())
761 encoding.unifromlocal(oldfctx.path())
762 )
762 )
763 if mimeguess:
763 if mimeguess:
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
765 mimeguess
765 mimeguess
766 )
766 )
767 fphid = uploadfile(oldfctx)
767 fphid = uploadfile(oldfctx)
768 pchange.metadata[b'old:binary-phid'] = fphid
768 pchange.metadata[b'old:binary-phid'] = fphid
769 else:
769 else:
770 # If it's left as IMAGE/BINARY web UI might try to display it
770 # If it's left as IMAGE/BINARY web UI might try to display it
771 pchange.fileType = DiffFileType.TEXT
771 pchange.fileType = DiffFileType.TEXT
772 pchange.copynewmetadatatoold()
772 pchange.copynewmetadatatoold()
773
773
774
774
775 def makebinary(pchange, fctx):
775 def makebinary(pchange, fctx):
776 """populate the phabchange for a binary file"""
776 """populate the phabchange for a binary file"""
777 pchange.fileType = DiffFileType.BINARY
777 pchange.fileType = DiffFileType.BINARY
778 fphid = uploadfile(fctx)
778 fphid = uploadfile(fctx)
779 pchange.metadata[b'new:binary-phid'] = fphid
779 pchange.metadata[b'new:binary-phid'] = fphid
780 pchange.metadata[b'new:file:size'] = fctx.size()
780 pchange.metadata[b'new:file:size'] = fctx.size()
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
782 if mimeguess:
782 if mimeguess:
783 mimeguess = pycompat.bytestr(mimeguess)
783 mimeguess = pycompat.bytestr(mimeguess)
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
785 if mimeguess.startswith(b'image/'):
785 if mimeguess.startswith(b'image/'):
786 pchange.fileType = DiffFileType.IMAGE
786 pchange.fileType = DiffFileType.IMAGE
787
787
788
788
789 # Copied from mercurial/patch.py
789 # Copied from mercurial/patch.py
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
791
791
792
792
793 def notutf8(fctx):
793 def notutf8(fctx):
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
795 as binary
795 as binary
796 """
796 """
797 try:
797 try:
798 fctx.data().decode('utf-8')
798 fctx.data().decode('utf-8')
799 if fctx.parents():
800 fctx.p1().data().decode('utf-8')
801 return False
799 return False
802 except UnicodeDecodeError:
800 except UnicodeDecodeError:
803 fctx.repo().ui.write(
801 fctx.repo().ui.write(
804 _(b'file %s detected as non-UTF-8, marked as binary\n')
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
805 % fctx.path()
803 % fctx.path()
806 )
804 )
807 return True
805 return True
808
806
809
807
810 def addremoved(pdiff, ctx, removed):
808 def addremoved(pdiff, ctx, removed):
811 """add removed files to the phabdiff. Shouldn't include moves"""
809 """add removed files to the phabdiff. Shouldn't include moves"""
812 for fname in removed:
810 for fname in removed:
813 pchange = phabchange(
811 pchange = phabchange(
814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
815 )
813 )
816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
817 oldfctx = ctx.p1()[fname]
815 oldfctx = ctx.p1()[fname]
818 if not (oldfctx.isbinary() or notutf8(oldfctx)):
816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
819 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
820
818
821 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
822
820
823
821
824 def addmodified(pdiff, ctx, modified):
822 def addmodified(pdiff, ctx, modified):
825 """add modified files to the phabdiff"""
823 """add modified files to the phabdiff"""
826 for fname in modified:
824 for fname in modified:
827 fctx = ctx[fname]
825 fctx = ctx[fname]
826 oldfctx = fctx.p1()
828 pchange = phabchange(currentPath=fname, oldPath=fname)
827 pchange = phabchange(currentPath=fname, oldPath=fname)
829 filemode = gitmode[ctx[fname].flags()]
828 filemode = gitmode[ctx[fname].flags()]
830 originalmode = gitmode[ctx.p1()[fname].flags()]
829 originalmode = gitmode[ctx.p1()[fname].flags()]
831 if filemode != originalmode:
830 if filemode != originalmode:
832 pchange.addoldmode(originalmode)
831 pchange.addoldmode(originalmode)
833 pchange.addnewmode(filemode)
832 pchange.addnewmode(filemode)
834
833
835 if fctx.isbinary() or notutf8(fctx):
834 if fctx.isbinary() or notutf8(fctx) or notutf8(oldfctx):
836 makebinary(pchange, fctx)
835 makebinary(pchange, fctx)
837 addoldbinary(pchange, fctx.p1(), fctx)
836 addoldbinary(pchange, fctx.p1(), fctx)
838 else:
837 else:
839 maketext(pchange, ctx, fname)
838 maketext(pchange, ctx, fname)
840
839
841 pdiff.addchange(pchange)
840 pdiff.addchange(pchange)
842
841
843
842
844 def addadded(pdiff, ctx, added, removed):
843 def addadded(pdiff, ctx, added, removed):
845 """add file adds to the phabdiff, both new files and copies/moves"""
844 """add file adds to the phabdiff, both new files and copies/moves"""
846 # Keep track of files that've been recorded as moved/copied, so if there are
845 # Keep track of files that've been recorded as moved/copied, so if there are
847 # additional copies we can mark them (moves get removed from removed)
846 # additional copies we can mark them (moves get removed from removed)
848 copiedchanges = {}
847 copiedchanges = {}
849 movedchanges = {}
848 movedchanges = {}
850 for fname in added:
849 for fname in added:
851 fctx = ctx[fname]
850 fctx = ctx[fname]
851 oldfctx = None
852 pchange = phabchange(currentPath=fname)
852 pchange = phabchange(currentPath=fname)
853
853
854 filemode = gitmode[ctx[fname].flags()]
854 filemode = gitmode[ctx[fname].flags()]
855 renamed = fctx.renamed()
855 renamed = fctx.renamed()
856
856
857 if renamed:
857 if renamed:
858 originalfname = renamed[0]
858 originalfname = renamed[0]
859 originalmode = gitmode[ctx.p1()[originalfname].flags()]
859 oldfctx = ctx.p1()[originalfname]
860 originalmode = gitmode[oldfctx.flags()]
860 pchange.oldPath = originalfname
861 pchange.oldPath = originalfname
861
862
862 if originalfname in removed:
863 if originalfname in removed:
863 origpchange = phabchange(
864 origpchange = phabchange(
864 currentPath=originalfname,
865 currentPath=originalfname,
865 oldPath=originalfname,
866 oldPath=originalfname,
866 type=DiffChangeType.MOVE_AWAY,
867 type=DiffChangeType.MOVE_AWAY,
867 awayPaths=[fname],
868 awayPaths=[fname],
868 )
869 )
869 movedchanges[originalfname] = origpchange
870 movedchanges[originalfname] = origpchange
870 removed.remove(originalfname)
871 removed.remove(originalfname)
871 pchange.type = DiffChangeType.MOVE_HERE
872 pchange.type = DiffChangeType.MOVE_HERE
872 elif originalfname in movedchanges:
873 elif originalfname in movedchanges:
873 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
874 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
874 movedchanges[originalfname].awayPaths.append(fname)
875 movedchanges[originalfname].awayPaths.append(fname)
875 pchange.type = DiffChangeType.COPY_HERE
876 pchange.type = DiffChangeType.COPY_HERE
876 else: # pure copy
877 else: # pure copy
877 if originalfname not in copiedchanges:
878 if originalfname not in copiedchanges:
878 origpchange = phabchange(
879 origpchange = phabchange(
879 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
880 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
880 )
881 )
881 copiedchanges[originalfname] = origpchange
882 copiedchanges[originalfname] = origpchange
882 else:
883 else:
883 origpchange = copiedchanges[originalfname]
884 origpchange = copiedchanges[originalfname]
884 origpchange.awayPaths.append(fname)
885 origpchange.awayPaths.append(fname)
885 pchange.type = DiffChangeType.COPY_HERE
886 pchange.type = DiffChangeType.COPY_HERE
886
887
887 if filemode != originalmode:
888 if filemode != originalmode:
888 pchange.addoldmode(originalmode)
889 pchange.addoldmode(originalmode)
889 pchange.addnewmode(filemode)
890 pchange.addnewmode(filemode)
890 else: # Brand-new file
891 else: # Brand-new file
891 pchange.addnewmode(gitmode[fctx.flags()])
892 pchange.addnewmode(gitmode[fctx.flags()])
892 pchange.type = DiffChangeType.ADD
893 pchange.type = DiffChangeType.ADD
893
894
894 if fctx.isbinary() or notutf8(fctx):
895 if fctx.isbinary() or notutf8(fctx) or (oldfctx and notutf8(oldfctx)):
895 makebinary(pchange, fctx)
896 makebinary(pchange, fctx)
896 if renamed:
897 if renamed:
897 addoldbinary(pchange, fctx.p1(), fctx)
898 addoldbinary(pchange, oldfctx, fctx)
898 else:
899 else:
899 maketext(pchange, ctx, fname)
900 maketext(pchange, ctx, fname)
900
901
901 pdiff.addchange(pchange)
902 pdiff.addchange(pchange)
902
903
903 for _path, copiedchange in copiedchanges.items():
904 for _path, copiedchange in copiedchanges.items():
904 pdiff.addchange(copiedchange)
905 pdiff.addchange(copiedchange)
905 for _path, movedchange in movedchanges.items():
906 for _path, movedchange in movedchanges.items():
906 pdiff.addchange(movedchange)
907 pdiff.addchange(movedchange)
907
908
908
909
909 def creatediff(ctx):
910 def creatediff(ctx):
910 """create a Differential Diff"""
911 """create a Differential Diff"""
911 repo = ctx.repo()
912 repo = ctx.repo()
912 repophid = getrepophid(repo)
913 repophid = getrepophid(repo)
913 # Create a "Differential Diff" via "differential.creatediff" API
914 # Create a "Differential Diff" via "differential.creatediff" API
914 pdiff = phabdiff(
915 pdiff = phabdiff(
915 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
916 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
916 branch=b'%s' % ctx.branch(),
917 branch=b'%s' % ctx.branch(),
917 )
918 )
918 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
919 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
919 # addadded will remove moved files from removed, so addremoved won't get
920 # addadded will remove moved files from removed, so addremoved won't get
920 # them
921 # them
921 addadded(pdiff, ctx, added, removed)
922 addadded(pdiff, ctx, added, removed)
922 addmodified(pdiff, ctx, modified)
923 addmodified(pdiff, ctx, modified)
923 addremoved(pdiff, ctx, removed)
924 addremoved(pdiff, ctx, removed)
924 if repophid:
925 if repophid:
925 pdiff.repositoryPHID = repophid
926 pdiff.repositoryPHID = repophid
926 diff = callconduit(
927 diff = callconduit(
927 repo.ui,
928 repo.ui,
928 b'differential.creatediff',
929 b'differential.creatediff',
929 pycompat.byteskwargs(attr.asdict(pdiff)),
930 pycompat.byteskwargs(attr.asdict(pdiff)),
930 )
931 )
931 if not diff:
932 if not diff:
932 raise error.Abort(_(b'cannot create diff for %s') % ctx)
933 raise error.Abort(_(b'cannot create diff for %s') % ctx)
933 return diff
934 return diff
934
935
935
936
936 def writediffproperties(ctx, diff):
937 def writediffproperties(ctx, diff):
937 """write metadata to diff so patches could be applied losslessly"""
938 """write metadata to diff so patches could be applied losslessly"""
938 # creatediff returns with a diffid but query returns with an id
939 # creatediff returns with a diffid but query returns with an id
939 diffid = diff.get(b'diffid', diff.get(b'id'))
940 diffid = diff.get(b'diffid', diff.get(b'id'))
940 params = {
941 params = {
941 b'diff_id': diffid,
942 b'diff_id': diffid,
942 b'name': b'hg:meta',
943 b'name': b'hg:meta',
943 b'data': templatefilters.json(
944 b'data': templatefilters.json(
944 {
945 {
945 b'user': ctx.user(),
946 b'user': ctx.user(),
946 b'date': b'%d %d' % ctx.date(),
947 b'date': b'%d %d' % ctx.date(),
947 b'branch': ctx.branch(),
948 b'branch': ctx.branch(),
948 b'node': ctx.hex(),
949 b'node': ctx.hex(),
949 b'parent': ctx.p1().hex(),
950 b'parent': ctx.p1().hex(),
950 }
951 }
951 ),
952 ),
952 }
953 }
953 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
954 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
954
955
955 params = {
956 params = {
956 b'diff_id': diffid,
957 b'diff_id': diffid,
957 b'name': b'local:commits',
958 b'name': b'local:commits',
958 b'data': templatefilters.json(
959 b'data': templatefilters.json(
959 {
960 {
960 ctx.hex(): {
961 ctx.hex(): {
961 b'author': stringutil.person(ctx.user()),
962 b'author': stringutil.person(ctx.user()),
962 b'authorEmail': stringutil.email(ctx.user()),
963 b'authorEmail': stringutil.email(ctx.user()),
963 b'time': int(ctx.date()[0]),
964 b'time': int(ctx.date()[0]),
964 b'commit': ctx.hex(),
965 b'commit': ctx.hex(),
965 b'parents': [ctx.p1().hex()],
966 b'parents': [ctx.p1().hex()],
966 b'branch': ctx.branch(),
967 b'branch': ctx.branch(),
967 },
968 },
968 }
969 }
969 ),
970 ),
970 }
971 }
971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972
973
973
974
974 def createdifferentialrevision(
975 def createdifferentialrevision(
975 ctx,
976 ctx,
976 revid=None,
977 revid=None,
977 parentrevphid=None,
978 parentrevphid=None,
978 oldnode=None,
979 oldnode=None,
979 olddiff=None,
980 olddiff=None,
980 actions=None,
981 actions=None,
981 comment=None,
982 comment=None,
982 ):
983 ):
983 """create or update a Differential Revision
984 """create or update a Differential Revision
984
985
985 If revid is None, create a new Differential Revision, otherwise update
986 If revid is None, create a new Differential Revision, otherwise update
986 revid. If parentrevphid is not None, set it as a dependency.
987 revid. If parentrevphid is not None, set it as a dependency.
987
988
988 If oldnode is not None, check if the patch content (without commit message
989 If oldnode is not None, check if the patch content (without commit message
989 and metadata) has changed before creating another diff.
990 and metadata) has changed before creating another diff.
990
991
991 If actions is not None, they will be appended to the transaction.
992 If actions is not None, they will be appended to the transaction.
992 """
993 """
993 repo = ctx.repo()
994 repo = ctx.repo()
994 if oldnode:
995 if oldnode:
995 diffopts = mdiff.diffopts(git=True, context=32767)
996 diffopts = mdiff.diffopts(git=True, context=32767)
996 oldctx = repo.unfiltered()[oldnode]
997 oldctx = repo.unfiltered()[oldnode]
997 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
998 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
998 else:
999 else:
999 neednewdiff = True
1000 neednewdiff = True
1000
1001
1001 transactions = []
1002 transactions = []
1002 if neednewdiff:
1003 if neednewdiff:
1003 diff = creatediff(ctx)
1004 diff = creatediff(ctx)
1004 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1005 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1005 if comment:
1006 if comment:
1006 transactions.append({b'type': b'comment', b'value': comment})
1007 transactions.append({b'type': b'comment', b'value': comment})
1007 else:
1008 else:
1008 # Even if we don't need to upload a new diff because the patch content
1009 # Even if we don't need to upload a new diff because the patch content
1009 # does not change. We might still need to update its metadata so
1010 # does not change. We might still need to update its metadata so
1010 # pushers could know the correct node metadata.
1011 # pushers could know the correct node metadata.
1011 assert olddiff
1012 assert olddiff
1012 diff = olddiff
1013 diff = olddiff
1013 writediffproperties(ctx, diff)
1014 writediffproperties(ctx, diff)
1014
1015
1015 # Set the parent Revision every time, so commit re-ordering is picked-up
1016 # Set the parent Revision every time, so commit re-ordering is picked-up
1016 if parentrevphid:
1017 if parentrevphid:
1017 transactions.append(
1018 transactions.append(
1018 {b'type': b'parents.set', b'value': [parentrevphid]}
1019 {b'type': b'parents.set', b'value': [parentrevphid]}
1019 )
1020 )
1020
1021
1021 if actions:
1022 if actions:
1022 transactions += actions
1023 transactions += actions
1023
1024
1024 # Parse commit message and update related fields.
1025 # Parse commit message and update related fields.
1025 desc = ctx.description()
1026 desc = ctx.description()
1026 info = callconduit(
1027 info = callconduit(
1027 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1028 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1028 )
1029 )
1029 for k, v in info[b'fields'].items():
1030 for k, v in info[b'fields'].items():
1030 if k in [b'title', b'summary', b'testPlan']:
1031 if k in [b'title', b'summary', b'testPlan']:
1031 transactions.append({b'type': k, b'value': v})
1032 transactions.append({b'type': k, b'value': v})
1032
1033
1033 params = {b'transactions': transactions}
1034 params = {b'transactions': transactions}
1034 if revid is not None:
1035 if revid is not None:
1035 # Update an existing Differential Revision
1036 # Update an existing Differential Revision
1036 params[b'objectIdentifier'] = revid
1037 params[b'objectIdentifier'] = revid
1037
1038
1038 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1039 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1039 if not revision:
1040 if not revision:
1040 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1041 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1041
1042
1042 return revision, diff
1043 return revision, diff
1043
1044
1044
1045
1045 def userphids(ui, names):
1046 def userphids(ui, names):
1046 """convert user names to PHIDs"""
1047 """convert user names to PHIDs"""
1047 names = [name.lower() for name in names]
1048 names = [name.lower() for name in names]
1048 query = {b'constraints': {b'usernames': names}}
1049 query = {b'constraints': {b'usernames': names}}
1049 result = callconduit(ui, b'user.search', query)
1050 result = callconduit(ui, b'user.search', query)
1050 # username not found is not an error of the API. So check if we have missed
1051 # username not found is not an error of the API. So check if we have missed
1051 # some names here.
1052 # some names here.
1052 data = result[b'data']
1053 data = result[b'data']
1053 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1054 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1054 unresolved = set(names) - resolved
1055 unresolved = set(names) - resolved
1055 if unresolved:
1056 if unresolved:
1056 raise error.Abort(
1057 raise error.Abort(
1057 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1058 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1058 )
1059 )
1059 return [entry[b'phid'] for entry in data]
1060 return [entry[b'phid'] for entry in data]
1060
1061
1061
1062
1062 @vcrcommand(
1063 @vcrcommand(
1063 b'phabsend',
1064 b'phabsend',
1064 [
1065 [
1065 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1066 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1066 (b'', b'amend', True, _(b'update commit messages')),
1067 (b'', b'amend', True, _(b'update commit messages')),
1067 (b'', b'reviewer', [], _(b'specify reviewers')),
1068 (b'', b'reviewer', [], _(b'specify reviewers')),
1068 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1069 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1069 (
1070 (
1070 b'm',
1071 b'm',
1071 b'comment',
1072 b'comment',
1072 b'',
1073 b'',
1073 _(b'add a comment to Revisions with new/updated Diffs'),
1074 _(b'add a comment to Revisions with new/updated Diffs'),
1074 ),
1075 ),
1075 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1076 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1076 ],
1077 ],
1077 _(b'REV [OPTIONS]'),
1078 _(b'REV [OPTIONS]'),
1078 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1079 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1079 )
1080 )
1080 def phabsend(ui, repo, *revs, **opts):
1081 def phabsend(ui, repo, *revs, **opts):
1081 """upload changesets to Phabricator
1082 """upload changesets to Phabricator
1082
1083
1083 If there are multiple revisions specified, they will be send as a stack
1084 If there are multiple revisions specified, they will be send as a stack
1084 with a linear dependencies relationship using the order specified by the
1085 with a linear dependencies relationship using the order specified by the
1085 revset.
1086 revset.
1086
1087
1087 For the first time uploading changesets, local tags will be created to
1088 For the first time uploading changesets, local tags will be created to
1088 maintain the association. After the first time, phabsend will check
1089 maintain the association. After the first time, phabsend will check
1089 obsstore and tags information so it can figure out whether to update an
1090 obsstore and tags information so it can figure out whether to update an
1090 existing Differential Revision, or create a new one.
1091 existing Differential Revision, or create a new one.
1091
1092
1092 If --amend is set, update commit messages so they have the
1093 If --amend is set, update commit messages so they have the
1093 ``Differential Revision`` URL, remove related tags. This is similar to what
1094 ``Differential Revision`` URL, remove related tags. This is similar to what
1094 arcanist will do, and is more desired in author-push workflows. Otherwise,
1095 arcanist will do, and is more desired in author-push workflows. Otherwise,
1095 use local tags to record the ``Differential Revision`` association.
1096 use local tags to record the ``Differential Revision`` association.
1096
1097
1097 The --confirm option lets you confirm changesets before sending them. You
1098 The --confirm option lets you confirm changesets before sending them. You
1098 can also add following to your configuration file to make it default
1099 can also add following to your configuration file to make it default
1099 behaviour::
1100 behaviour::
1100
1101
1101 [phabsend]
1102 [phabsend]
1102 confirm = true
1103 confirm = true
1103
1104
1104 phabsend will check obsstore and the above association to decide whether to
1105 phabsend will check obsstore and the above association to decide whether to
1105 update an existing Differential Revision, or create a new one.
1106 update an existing Differential Revision, or create a new one.
1106 """
1107 """
1107 opts = pycompat.byteskwargs(opts)
1108 opts = pycompat.byteskwargs(opts)
1108 revs = list(revs) + opts.get(b'rev', [])
1109 revs = list(revs) + opts.get(b'rev', [])
1109 revs = scmutil.revrange(repo, revs)
1110 revs = scmutil.revrange(repo, revs)
1110 revs.sort() # ascending order to preserve topological parent/child in phab
1111 revs.sort() # ascending order to preserve topological parent/child in phab
1111
1112
1112 if not revs:
1113 if not revs:
1113 raise error.Abort(_(b'phabsend requires at least one changeset'))
1114 raise error.Abort(_(b'phabsend requires at least one changeset'))
1114 if opts.get(b'amend'):
1115 if opts.get(b'amend'):
1115 cmdutil.checkunfinished(repo)
1116 cmdutil.checkunfinished(repo)
1116
1117
1117 # {newnode: (oldnode, olddiff, olddrev}
1118 # {newnode: (oldnode, olddiff, olddrev}
1118 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1119 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1119
1120
1120 confirm = ui.configbool(b'phabsend', b'confirm')
1121 confirm = ui.configbool(b'phabsend', b'confirm')
1121 confirm |= bool(opts.get(b'confirm'))
1122 confirm |= bool(opts.get(b'confirm'))
1122 if confirm:
1123 if confirm:
1123 confirmed = _confirmbeforesend(repo, revs, oldmap)
1124 confirmed = _confirmbeforesend(repo, revs, oldmap)
1124 if not confirmed:
1125 if not confirmed:
1125 raise error.Abort(_(b'phabsend cancelled'))
1126 raise error.Abort(_(b'phabsend cancelled'))
1126
1127
1127 actions = []
1128 actions = []
1128 reviewers = opts.get(b'reviewer', [])
1129 reviewers = opts.get(b'reviewer', [])
1129 blockers = opts.get(b'blocker', [])
1130 blockers = opts.get(b'blocker', [])
1130 phids = []
1131 phids = []
1131 if reviewers:
1132 if reviewers:
1132 phids.extend(userphids(repo.ui, reviewers))
1133 phids.extend(userphids(repo.ui, reviewers))
1133 if blockers:
1134 if blockers:
1134 phids.extend(
1135 phids.extend(
1135 map(
1136 map(
1136 lambda phid: b'blocking(%s)' % phid,
1137 lambda phid: b'blocking(%s)' % phid,
1137 userphids(repo.ui, blockers),
1138 userphids(repo.ui, blockers),
1138 )
1139 )
1139 )
1140 )
1140 if phids:
1141 if phids:
1141 actions.append({b'type': b'reviewers.add', b'value': phids})
1142 actions.append({b'type': b'reviewers.add', b'value': phids})
1142
1143
1143 drevids = [] # [int]
1144 drevids = [] # [int]
1144 diffmap = {} # {newnode: diff}
1145 diffmap = {} # {newnode: diff}
1145
1146
1146 # Send patches one by one so we know their Differential Revision PHIDs and
1147 # Send patches one by one so we know their Differential Revision PHIDs and
1147 # can provide dependency relationship
1148 # can provide dependency relationship
1148 lastrevphid = None
1149 lastrevphid = None
1149 for rev in revs:
1150 for rev in revs:
1150 ui.debug(b'sending rev %d\n' % rev)
1151 ui.debug(b'sending rev %d\n' % rev)
1151 ctx = repo[rev]
1152 ctx = repo[rev]
1152
1153
1153 # Get Differential Revision ID
1154 # Get Differential Revision ID
1154 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1155 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1155 if oldnode != ctx.node() or opts.get(b'amend'):
1156 if oldnode != ctx.node() or opts.get(b'amend'):
1156 # Create or update Differential Revision
1157 # Create or update Differential Revision
1157 revision, diff = createdifferentialrevision(
1158 revision, diff = createdifferentialrevision(
1158 ctx,
1159 ctx,
1159 revid,
1160 revid,
1160 lastrevphid,
1161 lastrevphid,
1161 oldnode,
1162 oldnode,
1162 olddiff,
1163 olddiff,
1163 actions,
1164 actions,
1164 opts.get(b'comment'),
1165 opts.get(b'comment'),
1165 )
1166 )
1166 diffmap[ctx.node()] = diff
1167 diffmap[ctx.node()] = diff
1167 newrevid = int(revision[b'object'][b'id'])
1168 newrevid = int(revision[b'object'][b'id'])
1168 newrevphid = revision[b'object'][b'phid']
1169 newrevphid = revision[b'object'][b'phid']
1169 if revid:
1170 if revid:
1170 action = b'updated'
1171 action = b'updated'
1171 else:
1172 else:
1172 action = b'created'
1173 action = b'created'
1173
1174
1174 # Create a local tag to note the association, if commit message
1175 # Create a local tag to note the association, if commit message
1175 # does not have it already
1176 # does not have it already
1176 m = _differentialrevisiondescre.search(ctx.description())
1177 m = _differentialrevisiondescre.search(ctx.description())
1177 if not m or int(m.group('id')) != newrevid:
1178 if not m or int(m.group('id')) != newrevid:
1178 tagname = b'D%d' % newrevid
1179 tagname = b'D%d' % newrevid
1179 tags.tag(
1180 tags.tag(
1180 repo,
1181 repo,
1181 tagname,
1182 tagname,
1182 ctx.node(),
1183 ctx.node(),
1183 message=None,
1184 message=None,
1184 user=None,
1185 user=None,
1185 date=None,
1186 date=None,
1186 local=True,
1187 local=True,
1187 )
1188 )
1188 else:
1189 else:
1189 # Nothing changed. But still set "newrevphid" so the next revision
1190 # Nothing changed. But still set "newrevphid" so the next revision
1190 # could depend on this one and "newrevid" for the summary line.
1191 # could depend on this one and "newrevid" for the summary line.
1191 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1192 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1192 newrevid = revid
1193 newrevid = revid
1193 action = b'skipped'
1194 action = b'skipped'
1194
1195
1195 actiondesc = ui.label(
1196 actiondesc = ui.label(
1196 {
1197 {
1197 b'created': _(b'created'),
1198 b'created': _(b'created'),
1198 b'skipped': _(b'skipped'),
1199 b'skipped': _(b'skipped'),
1199 b'updated': _(b'updated'),
1200 b'updated': _(b'updated'),
1200 }[action],
1201 }[action],
1201 b'phabricator.action.%s' % action,
1202 b'phabricator.action.%s' % action,
1202 )
1203 )
1203 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1204 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1204 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1205 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1205 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1206 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1206 ui.write(
1207 ui.write(
1207 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1208 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1208 )
1209 )
1209 drevids.append(newrevid)
1210 drevids.append(newrevid)
1210 lastrevphid = newrevphid
1211 lastrevphid = newrevphid
1211
1212
1212 # Update commit messages and remove tags
1213 # Update commit messages and remove tags
1213 if opts.get(b'amend'):
1214 if opts.get(b'amend'):
1214 unfi = repo.unfiltered()
1215 unfi = repo.unfiltered()
1215 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1216 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1216 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1217 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1217 wnode = unfi[b'.'].node()
1218 wnode = unfi[b'.'].node()
1218 mapping = {} # {oldnode: [newnode]}
1219 mapping = {} # {oldnode: [newnode]}
1219 for i, rev in enumerate(revs):
1220 for i, rev in enumerate(revs):
1220 old = unfi[rev]
1221 old = unfi[rev]
1221 drevid = drevids[i]
1222 drevid = drevids[i]
1222 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1223 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1223 newdesc = getdescfromdrev(drev)
1224 newdesc = getdescfromdrev(drev)
1224 # Make sure commit message contain "Differential Revision"
1225 # Make sure commit message contain "Differential Revision"
1225 if old.description() != newdesc:
1226 if old.description() != newdesc:
1226 if old.phase() == phases.public:
1227 if old.phase() == phases.public:
1227 ui.warn(
1228 ui.warn(
1228 _(b"warning: not updating public commit %s\n")
1229 _(b"warning: not updating public commit %s\n")
1229 % scmutil.formatchangeid(old)
1230 % scmutil.formatchangeid(old)
1230 )
1231 )
1231 continue
1232 continue
1232 parents = [
1233 parents = [
1233 mapping.get(old.p1().node(), (old.p1(),))[0],
1234 mapping.get(old.p1().node(), (old.p1(),))[0],
1234 mapping.get(old.p2().node(), (old.p2(),))[0],
1235 mapping.get(old.p2().node(), (old.p2(),))[0],
1235 ]
1236 ]
1236 new = context.metadataonlyctx(
1237 new = context.metadataonlyctx(
1237 repo,
1238 repo,
1238 old,
1239 old,
1239 parents=parents,
1240 parents=parents,
1240 text=newdesc,
1241 text=newdesc,
1241 user=old.user(),
1242 user=old.user(),
1242 date=old.date(),
1243 date=old.date(),
1243 extra=old.extra(),
1244 extra=old.extra(),
1244 )
1245 )
1245
1246
1246 newnode = new.commit()
1247 newnode = new.commit()
1247
1248
1248 mapping[old.node()] = [newnode]
1249 mapping[old.node()] = [newnode]
1249 # Update diff property
1250 # Update diff property
1250 # If it fails just warn and keep going, otherwise the DREV
1251 # If it fails just warn and keep going, otherwise the DREV
1251 # associations will be lost
1252 # associations will be lost
1252 try:
1253 try:
1253 writediffproperties(unfi[newnode], diffmap[old.node()])
1254 writediffproperties(unfi[newnode], diffmap[old.node()])
1254 except util.urlerr.urlerror:
1255 except util.urlerr.urlerror:
1255 ui.warnnoi18n(
1256 ui.warnnoi18n(
1256 b'Failed to update metadata for D%d\n' % drevid
1257 b'Failed to update metadata for D%d\n' % drevid
1257 )
1258 )
1258 # Remove local tags since it's no longer necessary
1259 # Remove local tags since it's no longer necessary
1259 tagname = b'D%d' % drevid
1260 tagname = b'D%d' % drevid
1260 if tagname in repo.tags():
1261 if tagname in repo.tags():
1261 tags.tag(
1262 tags.tag(
1262 repo,
1263 repo,
1263 tagname,
1264 tagname,
1264 nullid,
1265 nullid,
1265 message=None,
1266 message=None,
1266 user=None,
1267 user=None,
1267 date=None,
1268 date=None,
1268 local=True,
1269 local=True,
1269 )
1270 )
1270 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1271 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1271 if wnode in mapping:
1272 if wnode in mapping:
1272 unfi.setparents(mapping[wnode][0])
1273 unfi.setparents(mapping[wnode][0])
1273
1274
1274
1275
1275 # Map from "hg:meta" keys to header understood by "hg import". The order is
1276 # Map from "hg:meta" keys to header understood by "hg import". The order is
1276 # consistent with "hg export" output.
1277 # consistent with "hg export" output.
1277 _metanamemap = util.sortdict(
1278 _metanamemap = util.sortdict(
1278 [
1279 [
1279 (b'user', b'User'),
1280 (b'user', b'User'),
1280 (b'date', b'Date'),
1281 (b'date', b'Date'),
1281 (b'branch', b'Branch'),
1282 (b'branch', b'Branch'),
1282 (b'node', b'Node ID'),
1283 (b'node', b'Node ID'),
1283 (b'parent', b'Parent '),
1284 (b'parent', b'Parent '),
1284 ]
1285 ]
1285 )
1286 )
1286
1287
1287
1288
1288 def _confirmbeforesend(repo, revs, oldmap):
1289 def _confirmbeforesend(repo, revs, oldmap):
1289 url, token = readurltoken(repo.ui)
1290 url, token = readurltoken(repo.ui)
1290 ui = repo.ui
1291 ui = repo.ui
1291 for rev in revs:
1292 for rev in revs:
1292 ctx = repo[rev]
1293 ctx = repo[rev]
1293 desc = ctx.description().splitlines()[0]
1294 desc = ctx.description().splitlines()[0]
1294 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1295 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1295 if drevid:
1296 if drevid:
1296 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1297 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1297 else:
1298 else:
1298 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1299 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1299
1300
1300 ui.write(
1301 ui.write(
1301 _(b'%s - %s: %s\n')
1302 _(b'%s - %s: %s\n')
1302 % (
1303 % (
1303 drevdesc,
1304 drevdesc,
1304 ui.label(bytes(ctx), b'phabricator.node'),
1305 ui.label(bytes(ctx), b'phabricator.node'),
1305 ui.label(desc, b'phabricator.desc'),
1306 ui.label(desc, b'phabricator.desc'),
1306 )
1307 )
1307 )
1308 )
1308
1309
1309 if ui.promptchoice(
1310 if ui.promptchoice(
1310 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1311 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1311 ):
1312 ):
1312 return False
1313 return False
1313
1314
1314 return True
1315 return True
1315
1316
1316
1317
1317 _knownstatusnames = {
1318 _knownstatusnames = {
1318 b'accepted',
1319 b'accepted',
1319 b'needsreview',
1320 b'needsreview',
1320 b'needsrevision',
1321 b'needsrevision',
1321 b'closed',
1322 b'closed',
1322 b'abandoned',
1323 b'abandoned',
1323 b'changesplanned',
1324 b'changesplanned',
1324 }
1325 }
1325
1326
1326
1327
1327 def _getstatusname(drev):
1328 def _getstatusname(drev):
1328 """get normalized status name from a Differential Revision"""
1329 """get normalized status name from a Differential Revision"""
1329 return drev[b'statusName'].replace(b' ', b'').lower()
1330 return drev[b'statusName'].replace(b' ', b'').lower()
1330
1331
1331
1332
1332 # Small language to specify differential revisions. Support symbols: (), :X,
1333 # Small language to specify differential revisions. Support symbols: (), :X,
1333 # +, and -.
1334 # +, and -.
1334
1335
1335 _elements = {
1336 _elements = {
1336 # token-type: binding-strength, primary, prefix, infix, suffix
1337 # token-type: binding-strength, primary, prefix, infix, suffix
1337 b'(': (12, None, (b'group', 1, b')'), None, None),
1338 b'(': (12, None, (b'group', 1, b')'), None, None),
1338 b':': (8, None, (b'ancestors', 8), None, None),
1339 b':': (8, None, (b'ancestors', 8), None, None),
1339 b'&': (5, None, None, (b'and_', 5), None),
1340 b'&': (5, None, None, (b'and_', 5), None),
1340 b'+': (4, None, None, (b'add', 4), None),
1341 b'+': (4, None, None, (b'add', 4), None),
1341 b'-': (4, None, None, (b'sub', 4), None),
1342 b'-': (4, None, None, (b'sub', 4), None),
1342 b')': (0, None, None, None, None),
1343 b')': (0, None, None, None, None),
1343 b'symbol': (0, b'symbol', None, None, None),
1344 b'symbol': (0, b'symbol', None, None, None),
1344 b'end': (0, None, None, None, None),
1345 b'end': (0, None, None, None, None),
1345 }
1346 }
1346
1347
1347
1348
1348 def _tokenize(text):
1349 def _tokenize(text):
1349 view = memoryview(text) # zero-copy slice
1350 view = memoryview(text) # zero-copy slice
1350 special = b'():+-& '
1351 special = b'():+-& '
1351 pos = 0
1352 pos = 0
1352 length = len(text)
1353 length = len(text)
1353 while pos < length:
1354 while pos < length:
1354 symbol = b''.join(
1355 symbol = b''.join(
1355 itertools.takewhile(
1356 itertools.takewhile(
1356 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1357 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1357 )
1358 )
1358 )
1359 )
1359 if symbol:
1360 if symbol:
1360 yield (b'symbol', symbol, pos)
1361 yield (b'symbol', symbol, pos)
1361 pos += len(symbol)
1362 pos += len(symbol)
1362 else: # special char, ignore space
1363 else: # special char, ignore space
1363 if text[pos : pos + 1] != b' ':
1364 if text[pos : pos + 1] != b' ':
1364 yield (text[pos : pos + 1], None, pos)
1365 yield (text[pos : pos + 1], None, pos)
1365 pos += 1
1366 pos += 1
1366 yield (b'end', None, pos)
1367 yield (b'end', None, pos)
1367
1368
1368
1369
1369 def _parse(text):
1370 def _parse(text):
1370 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1371 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1371 if pos != len(text):
1372 if pos != len(text):
1372 raise error.ParseError(b'invalid token', pos)
1373 raise error.ParseError(b'invalid token', pos)
1373 return tree
1374 return tree
1374
1375
1375
1376
1376 def _parsedrev(symbol):
1377 def _parsedrev(symbol):
1377 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1378 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1378 if symbol.startswith(b'D') and symbol[1:].isdigit():
1379 if symbol.startswith(b'D') and symbol[1:].isdigit():
1379 return int(symbol[1:])
1380 return int(symbol[1:])
1380 if symbol.isdigit():
1381 if symbol.isdigit():
1381 return int(symbol)
1382 return int(symbol)
1382
1383
1383
1384
1384 def _prefetchdrevs(tree):
1385 def _prefetchdrevs(tree):
1385 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1386 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1386 drevs = set()
1387 drevs = set()
1387 ancestordrevs = set()
1388 ancestordrevs = set()
1388 op = tree[0]
1389 op = tree[0]
1389 if op == b'symbol':
1390 if op == b'symbol':
1390 r = _parsedrev(tree[1])
1391 r = _parsedrev(tree[1])
1391 if r:
1392 if r:
1392 drevs.add(r)
1393 drevs.add(r)
1393 elif op == b'ancestors':
1394 elif op == b'ancestors':
1394 r, a = _prefetchdrevs(tree[1])
1395 r, a = _prefetchdrevs(tree[1])
1395 drevs.update(r)
1396 drevs.update(r)
1396 ancestordrevs.update(r)
1397 ancestordrevs.update(r)
1397 ancestordrevs.update(a)
1398 ancestordrevs.update(a)
1398 else:
1399 else:
1399 for t in tree[1:]:
1400 for t in tree[1:]:
1400 r, a = _prefetchdrevs(t)
1401 r, a = _prefetchdrevs(t)
1401 drevs.update(r)
1402 drevs.update(r)
1402 ancestordrevs.update(a)
1403 ancestordrevs.update(a)
1403 return drevs, ancestordrevs
1404 return drevs, ancestordrevs
1404
1405
1405
1406
1406 def querydrev(ui, spec):
1407 def querydrev(ui, spec):
1407 """return a list of "Differential Revision" dicts
1408 """return a list of "Differential Revision" dicts
1408
1409
1409 spec is a string using a simple query language, see docstring in phabread
1410 spec is a string using a simple query language, see docstring in phabread
1410 for details.
1411 for details.
1411
1412
1412 A "Differential Revision dict" looks like:
1413 A "Differential Revision dict" looks like:
1413
1414
1414 {
1415 {
1415 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1416 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1416 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1417 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1417 "auxiliary": {
1418 "auxiliary": {
1418 "phabricator:depends-on": [
1419 "phabricator:depends-on": [
1419 "PHID-DREV-gbapp366kutjebt7agcd"
1420 "PHID-DREV-gbapp366kutjebt7agcd"
1420 ]
1421 ]
1421 "phabricator:projects": [],
1422 "phabricator:projects": [],
1422 },
1423 },
1423 "branch": "default",
1424 "branch": "default",
1424 "ccs": [],
1425 "ccs": [],
1425 "commits": [],
1426 "commits": [],
1426 "dateCreated": "1499181406",
1427 "dateCreated": "1499181406",
1427 "dateModified": "1499182103",
1428 "dateModified": "1499182103",
1428 "diffs": [
1429 "diffs": [
1429 "3",
1430 "3",
1430 "4",
1431 "4",
1431 ],
1432 ],
1432 "hashes": [],
1433 "hashes": [],
1433 "id": "2",
1434 "id": "2",
1434 "lineCount": "2",
1435 "lineCount": "2",
1435 "phid": "PHID-DREV-672qvysjcczopag46qty",
1436 "phid": "PHID-DREV-672qvysjcczopag46qty",
1436 "properties": {},
1437 "properties": {},
1437 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1438 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1438 "reviewers": [],
1439 "reviewers": [],
1439 "sourcePath": null
1440 "sourcePath": null
1440 "status": "0",
1441 "status": "0",
1441 "statusName": "Needs Review",
1442 "statusName": "Needs Review",
1442 "summary": "",
1443 "summary": "",
1443 "testPlan": "",
1444 "testPlan": "",
1444 "title": "example",
1445 "title": "example",
1445 "uri": "https://phab.example.com/D2",
1446 "uri": "https://phab.example.com/D2",
1446 }
1447 }
1447 """
1448 """
1448 # TODO: replace differential.query and differential.querydiffs with
1449 # TODO: replace differential.query and differential.querydiffs with
1449 # differential.diff.search because the former (and their output) are
1450 # differential.diff.search because the former (and their output) are
1450 # frozen, and planned to be deprecated and removed.
1451 # frozen, and planned to be deprecated and removed.
1451
1452
1452 def fetch(params):
1453 def fetch(params):
1453 """params -> single drev or None"""
1454 """params -> single drev or None"""
1454 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1455 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1455 if key in prefetched:
1456 if key in prefetched:
1456 return prefetched[key]
1457 return prefetched[key]
1457 drevs = callconduit(ui, b'differential.query', params)
1458 drevs = callconduit(ui, b'differential.query', params)
1458 # Fill prefetched with the result
1459 # Fill prefetched with the result
1459 for drev in drevs:
1460 for drev in drevs:
1460 prefetched[drev[b'phid']] = drev
1461 prefetched[drev[b'phid']] = drev
1461 prefetched[int(drev[b'id'])] = drev
1462 prefetched[int(drev[b'id'])] = drev
1462 if key not in prefetched:
1463 if key not in prefetched:
1463 raise error.Abort(
1464 raise error.Abort(
1464 _(b'cannot get Differential Revision %r') % params
1465 _(b'cannot get Differential Revision %r') % params
1465 )
1466 )
1466 return prefetched[key]
1467 return prefetched[key]
1467
1468
1468 def getstack(topdrevids):
1469 def getstack(topdrevids):
1469 """given a top, get a stack from the bottom, [id] -> [id]"""
1470 """given a top, get a stack from the bottom, [id] -> [id]"""
1470 visited = set()
1471 visited = set()
1471 result = []
1472 result = []
1472 queue = [{b'ids': [i]} for i in topdrevids]
1473 queue = [{b'ids': [i]} for i in topdrevids]
1473 while queue:
1474 while queue:
1474 params = queue.pop()
1475 params = queue.pop()
1475 drev = fetch(params)
1476 drev = fetch(params)
1476 if drev[b'id'] in visited:
1477 if drev[b'id'] in visited:
1477 continue
1478 continue
1478 visited.add(drev[b'id'])
1479 visited.add(drev[b'id'])
1479 result.append(int(drev[b'id']))
1480 result.append(int(drev[b'id']))
1480 auxiliary = drev.get(b'auxiliary', {})
1481 auxiliary = drev.get(b'auxiliary', {})
1481 depends = auxiliary.get(b'phabricator:depends-on', [])
1482 depends = auxiliary.get(b'phabricator:depends-on', [])
1482 for phid in depends:
1483 for phid in depends:
1483 queue.append({b'phids': [phid]})
1484 queue.append({b'phids': [phid]})
1484 result.reverse()
1485 result.reverse()
1485 return smartset.baseset(result)
1486 return smartset.baseset(result)
1486
1487
1487 # Initialize prefetch cache
1488 # Initialize prefetch cache
1488 prefetched = {} # {id or phid: drev}
1489 prefetched = {} # {id or phid: drev}
1489
1490
1490 tree = _parse(spec)
1491 tree = _parse(spec)
1491 drevs, ancestordrevs = _prefetchdrevs(tree)
1492 drevs, ancestordrevs = _prefetchdrevs(tree)
1492
1493
1493 # developer config: phabricator.batchsize
1494 # developer config: phabricator.batchsize
1494 batchsize = ui.configint(b'phabricator', b'batchsize')
1495 batchsize = ui.configint(b'phabricator', b'batchsize')
1495
1496
1496 # Prefetch Differential Revisions in batch
1497 # Prefetch Differential Revisions in batch
1497 tofetch = set(drevs)
1498 tofetch = set(drevs)
1498 for r in ancestordrevs:
1499 for r in ancestordrevs:
1499 tofetch.update(range(max(1, r - batchsize), r + 1))
1500 tofetch.update(range(max(1, r - batchsize), r + 1))
1500 if drevs:
1501 if drevs:
1501 fetch({b'ids': list(tofetch)})
1502 fetch({b'ids': list(tofetch)})
1502 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1503 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1503
1504
1504 # Walk through the tree, return smartsets
1505 # Walk through the tree, return smartsets
1505 def walk(tree):
1506 def walk(tree):
1506 op = tree[0]
1507 op = tree[0]
1507 if op == b'symbol':
1508 if op == b'symbol':
1508 drev = _parsedrev(tree[1])
1509 drev = _parsedrev(tree[1])
1509 if drev:
1510 if drev:
1510 return smartset.baseset([drev])
1511 return smartset.baseset([drev])
1511 elif tree[1] in _knownstatusnames:
1512 elif tree[1] in _knownstatusnames:
1512 drevs = [
1513 drevs = [
1513 r
1514 r
1514 for r in validids
1515 for r in validids
1515 if _getstatusname(prefetched[r]) == tree[1]
1516 if _getstatusname(prefetched[r]) == tree[1]
1516 ]
1517 ]
1517 return smartset.baseset(drevs)
1518 return smartset.baseset(drevs)
1518 else:
1519 else:
1519 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1520 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1520 elif op in {b'and_', b'add', b'sub'}:
1521 elif op in {b'and_', b'add', b'sub'}:
1521 assert len(tree) == 3
1522 assert len(tree) == 3
1522 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1523 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1523 elif op == b'group':
1524 elif op == b'group':
1524 return walk(tree[1])
1525 return walk(tree[1])
1525 elif op == b'ancestors':
1526 elif op == b'ancestors':
1526 return getstack(walk(tree[1]))
1527 return getstack(walk(tree[1]))
1527 else:
1528 else:
1528 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1529 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1529
1530
1530 return [prefetched[r] for r in walk(tree)]
1531 return [prefetched[r] for r in walk(tree)]
1531
1532
1532
1533
1533 def getdescfromdrev(drev):
1534 def getdescfromdrev(drev):
1534 """get description (commit message) from "Differential Revision"
1535 """get description (commit message) from "Differential Revision"
1535
1536
1536 This is similar to differential.getcommitmessage API. But we only care
1537 This is similar to differential.getcommitmessage API. But we only care
1537 about limited fields: title, summary, test plan, and URL.
1538 about limited fields: title, summary, test plan, and URL.
1538 """
1539 """
1539 title = drev[b'title']
1540 title = drev[b'title']
1540 summary = drev[b'summary'].rstrip()
1541 summary = drev[b'summary'].rstrip()
1541 testplan = drev[b'testPlan'].rstrip()
1542 testplan = drev[b'testPlan'].rstrip()
1542 if testplan:
1543 if testplan:
1543 testplan = b'Test Plan:\n%s' % testplan
1544 testplan = b'Test Plan:\n%s' % testplan
1544 uri = b'Differential Revision: %s' % drev[b'uri']
1545 uri = b'Differential Revision: %s' % drev[b'uri']
1545 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1546 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1546
1547
1547
1548
1548 def getdiffmeta(diff):
1549 def getdiffmeta(diff):
1549 """get commit metadata (date, node, user, p1) from a diff object
1550 """get commit metadata (date, node, user, p1) from a diff object
1550
1551
1551 The metadata could be "hg:meta", sent by phabsend, like:
1552 The metadata could be "hg:meta", sent by phabsend, like:
1552
1553
1553 "properties": {
1554 "properties": {
1554 "hg:meta": {
1555 "hg:meta": {
1555 "branch": "default",
1556 "branch": "default",
1556 "date": "1499571514 25200",
1557 "date": "1499571514 25200",
1557 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1558 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1558 "user": "Foo Bar <foo@example.com>",
1559 "user": "Foo Bar <foo@example.com>",
1559 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1560 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1560 }
1561 }
1561 }
1562 }
1562
1563
1563 Or converted from "local:commits", sent by "arc", like:
1564 Or converted from "local:commits", sent by "arc", like:
1564
1565
1565 "properties": {
1566 "properties": {
1566 "local:commits": {
1567 "local:commits": {
1567 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1568 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1568 "author": "Foo Bar",
1569 "author": "Foo Bar",
1569 "authorEmail": "foo@example.com"
1570 "authorEmail": "foo@example.com"
1570 "branch": "default",
1571 "branch": "default",
1571 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1572 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1572 "local": "1000",
1573 "local": "1000",
1573 "message": "...",
1574 "message": "...",
1574 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1575 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1575 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 "summary": "...",
1577 "summary": "...",
1577 "tag": "",
1578 "tag": "",
1578 "time": 1499546314,
1579 "time": 1499546314,
1579 }
1580 }
1580 }
1581 }
1581 }
1582 }
1582
1583
1583 Note: metadata extracted from "local:commits" will lose time zone
1584 Note: metadata extracted from "local:commits" will lose time zone
1584 information.
1585 information.
1585 """
1586 """
1586 props = diff.get(b'properties') or {}
1587 props = diff.get(b'properties') or {}
1587 meta = props.get(b'hg:meta')
1588 meta = props.get(b'hg:meta')
1588 if not meta:
1589 if not meta:
1589 if props.get(b'local:commits'):
1590 if props.get(b'local:commits'):
1590 commit = sorted(props[b'local:commits'].values())[0]
1591 commit = sorted(props[b'local:commits'].values())[0]
1591 meta = {}
1592 meta = {}
1592 if b'author' in commit and b'authorEmail' in commit:
1593 if b'author' in commit and b'authorEmail' in commit:
1593 meta[b'user'] = b'%s <%s>' % (
1594 meta[b'user'] = b'%s <%s>' % (
1594 commit[b'author'],
1595 commit[b'author'],
1595 commit[b'authorEmail'],
1596 commit[b'authorEmail'],
1596 )
1597 )
1597 if b'time' in commit:
1598 if b'time' in commit:
1598 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1599 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1599 if b'branch' in commit:
1600 if b'branch' in commit:
1600 meta[b'branch'] = commit[b'branch']
1601 meta[b'branch'] = commit[b'branch']
1601 node = commit.get(b'commit', commit.get(b'rev'))
1602 node = commit.get(b'commit', commit.get(b'rev'))
1602 if node:
1603 if node:
1603 meta[b'node'] = node
1604 meta[b'node'] = node
1604 if len(commit.get(b'parents', ())) >= 1:
1605 if len(commit.get(b'parents', ())) >= 1:
1605 meta[b'parent'] = commit[b'parents'][0]
1606 meta[b'parent'] = commit[b'parents'][0]
1606 else:
1607 else:
1607 meta = {}
1608 meta = {}
1608 if b'date' not in meta and b'dateCreated' in diff:
1609 if b'date' not in meta and b'dateCreated' in diff:
1609 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1610 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1610 if b'branch' not in meta and diff.get(b'branch'):
1611 if b'branch' not in meta and diff.get(b'branch'):
1611 meta[b'branch'] = diff[b'branch']
1612 meta[b'branch'] = diff[b'branch']
1612 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1613 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1613 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1614 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1614 return meta
1615 return meta
1615
1616
1616
1617
1617 def readpatch(ui, drevs, write):
1618 def readpatch(ui, drevs, write):
1618 """generate plain-text patch readable by 'hg import'
1619 """generate plain-text patch readable by 'hg import'
1619
1620
1620 write takes a list of (DREV, bytes), where DREV is the differential number
1621 write takes a list of (DREV, bytes), where DREV is the differential number
1621 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1622 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1622 to be imported. drevs is what "querydrev" returns, results of
1623 to be imported. drevs is what "querydrev" returns, results of
1623 "differential.query".
1624 "differential.query".
1624 """
1625 """
1625 # Prefetch hg:meta property for all diffs
1626 # Prefetch hg:meta property for all diffs
1626 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1627 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1627 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1628 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1628
1629
1629 patches = []
1630 patches = []
1630
1631
1631 # Generate patch for each drev
1632 # Generate patch for each drev
1632 for drev in drevs:
1633 for drev in drevs:
1633 ui.note(_(b'reading D%s\n') % drev[b'id'])
1634 ui.note(_(b'reading D%s\n') % drev[b'id'])
1634
1635
1635 diffid = max(int(v) for v in drev[b'diffs'])
1636 diffid = max(int(v) for v in drev[b'diffs'])
1636 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1637 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1637 desc = getdescfromdrev(drev)
1638 desc = getdescfromdrev(drev)
1638 header = b'# HG changeset patch\n'
1639 header = b'# HG changeset patch\n'
1639
1640
1640 # Try to preserve metadata from hg:meta property. Write hg patch
1641 # Try to preserve metadata from hg:meta property. Write hg patch
1641 # headers that can be read by the "import" command. See patchheadermap
1642 # headers that can be read by the "import" command. See patchheadermap
1642 # and extract in mercurial/patch.py for supported headers.
1643 # and extract in mercurial/patch.py for supported headers.
1643 meta = getdiffmeta(diffs[b'%d' % diffid])
1644 meta = getdiffmeta(diffs[b'%d' % diffid])
1644 for k in _metanamemap.keys():
1645 for k in _metanamemap.keys():
1645 if k in meta:
1646 if k in meta:
1646 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1647 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1647
1648
1648 content = b'%s%s\n%s' % (header, desc, body)
1649 content = b'%s%s\n%s' % (header, desc, body)
1649 patches.append((drev[b'id'], content))
1650 patches.append((drev[b'id'], content))
1650
1651
1651 # Write patches to the supplied callback
1652 # Write patches to the supplied callback
1652 write(patches)
1653 write(patches)
1653
1654
1654
1655
1655 @vcrcommand(
1656 @vcrcommand(
1656 b'phabread',
1657 b'phabread',
1657 [(b'', b'stack', False, _(b'read dependencies'))],
1658 [(b'', b'stack', False, _(b'read dependencies'))],
1658 _(b'DREVSPEC [OPTIONS]'),
1659 _(b'DREVSPEC [OPTIONS]'),
1659 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1660 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1660 optionalrepo=True,
1661 optionalrepo=True,
1661 )
1662 )
1662 def phabread(ui, repo, spec, **opts):
1663 def phabread(ui, repo, spec, **opts):
1663 """print patches from Phabricator suitable for importing
1664 """print patches from Phabricator suitable for importing
1664
1665
1665 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1666 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1666 the number ``123``. It could also have common operators like ``+``, ``-``,
1667 the number ``123``. It could also have common operators like ``+``, ``-``,
1667 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1668 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1668 select a stack.
1669 select a stack.
1669
1670
1670 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1671 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1671 could be used to filter patches by status. For performance reason, they
1672 could be used to filter patches by status. For performance reason, they
1672 only represent a subset of non-status selections and cannot be used alone.
1673 only represent a subset of non-status selections and cannot be used alone.
1673
1674
1674 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1675 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1675 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1676 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1676 stack up to D9.
1677 stack up to D9.
1677
1678
1678 If --stack is given, follow dependencies information and read all patches.
1679 If --stack is given, follow dependencies information and read all patches.
1679 It is equivalent to the ``:`` operator.
1680 It is equivalent to the ``:`` operator.
1680 """
1681 """
1681 opts = pycompat.byteskwargs(opts)
1682 opts = pycompat.byteskwargs(opts)
1682 if opts.get(b'stack'):
1683 if opts.get(b'stack'):
1683 spec = b':(%s)' % spec
1684 spec = b':(%s)' % spec
1684 drevs = querydrev(ui, spec)
1685 drevs = querydrev(ui, spec)
1685
1686
1686 def _write(patches):
1687 def _write(patches):
1687 for drev, content in patches:
1688 for drev, content in patches:
1688 ui.write(content)
1689 ui.write(content)
1689
1690
1690 readpatch(ui, drevs, _write)
1691 readpatch(ui, drevs, _write)
1691
1692
1692
1693
1693 @vcrcommand(
1694 @vcrcommand(
1694 b'phabupdate',
1695 b'phabupdate',
1695 [
1696 [
1696 (b'', b'accept', False, _(b'accept revisions')),
1697 (b'', b'accept', False, _(b'accept revisions')),
1697 (b'', b'reject', False, _(b'reject revisions')),
1698 (b'', b'reject', False, _(b'reject revisions')),
1698 (b'', b'abandon', False, _(b'abandon revisions')),
1699 (b'', b'abandon', False, _(b'abandon revisions')),
1699 (b'', b'reclaim', False, _(b'reclaim revisions')),
1700 (b'', b'reclaim', False, _(b'reclaim revisions')),
1700 (b'm', b'comment', b'', _(b'comment on the last revision')),
1701 (b'm', b'comment', b'', _(b'comment on the last revision')),
1701 ],
1702 ],
1702 _(b'DREVSPEC [OPTIONS]'),
1703 _(b'DREVSPEC [OPTIONS]'),
1703 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1704 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1704 optionalrepo=True,
1705 optionalrepo=True,
1705 )
1706 )
1706 def phabupdate(ui, repo, spec, **opts):
1707 def phabupdate(ui, repo, spec, **opts):
1707 """update Differential Revision in batch
1708 """update Differential Revision in batch
1708
1709
1709 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1710 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1710 """
1711 """
1711 opts = pycompat.byteskwargs(opts)
1712 opts = pycompat.byteskwargs(opts)
1712 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1713 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1713 if len(flags) > 1:
1714 if len(flags) > 1:
1714 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1715 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1715
1716
1716 actions = []
1717 actions = []
1717 for f in flags:
1718 for f in flags:
1718 actions.append({b'type': f, b'value': True})
1719 actions.append({b'type': f, b'value': True})
1719
1720
1720 drevs = querydrev(ui, spec)
1721 drevs = querydrev(ui, spec)
1721 for i, drev in enumerate(drevs):
1722 for i, drev in enumerate(drevs):
1722 if i + 1 == len(drevs) and opts.get(b'comment'):
1723 if i + 1 == len(drevs) and opts.get(b'comment'):
1723 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1724 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1724 if actions:
1725 if actions:
1725 params = {
1726 params = {
1726 b'objectIdentifier': drev[b'phid'],
1727 b'objectIdentifier': drev[b'phid'],
1727 b'transactions': actions,
1728 b'transactions': actions,
1728 }
1729 }
1729 callconduit(ui, b'differential.revision.edit', params)
1730 callconduit(ui, b'differential.revision.edit', params)
1730
1731
1731
1732
1732 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1733 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1733 def template_review(context, mapping):
1734 def template_review(context, mapping):
1734 """:phabreview: Object describing the review for this changeset.
1735 """:phabreview: Object describing the review for this changeset.
1735 Has attributes `url` and `id`.
1736 Has attributes `url` and `id`.
1736 """
1737 """
1737 ctx = context.resource(mapping, b'ctx')
1738 ctx = context.resource(mapping, b'ctx')
1738 m = _differentialrevisiondescre.search(ctx.description())
1739 m = _differentialrevisiondescre.search(ctx.description())
1739 if m:
1740 if m:
1740 return templateutil.hybriddict(
1741 return templateutil.hybriddict(
1741 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1742 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1742 )
1743 )
1743 else:
1744 else:
1744 tags = ctx.repo().nodetags(ctx.node())
1745 tags = ctx.repo().nodetags(ctx.node())
1745 for t in tags:
1746 for t in tags:
1746 if _differentialrevisiontagre.match(t):
1747 if _differentialrevisiontagre.match(t):
1747 url = ctx.repo().ui.config(b'phabricator', b'url')
1748 url = ctx.repo().ui.config(b'phabricator', b'url')
1748 if not url.endswith(b'/'):
1749 if not url.endswith(b'/'):
1749 url += b'/'
1750 url += b'/'
1750 url += t
1751 url += t
1751
1752
1752 return templateutil.hybriddict({b'url': url, b'id': t,})
1753 return templateutil.hybriddict({b'url': url, b'id': t,})
1753 return None
1754 return None
1754
1755
1755
1756
1756 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1757 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1757 def template_status(context, mapping):
1758 def template_status(context, mapping):
1758 """:phabstatus: String. Status of Phabricator differential.
1759 """:phabstatus: String. Status of Phabricator differential.
1759 """
1760 """
1760 ctx = context.resource(mapping, b'ctx')
1761 ctx = context.resource(mapping, b'ctx')
1761 repo = context.resource(mapping, b'repo')
1762 repo = context.resource(mapping, b'repo')
1762 ui = context.resource(mapping, b'ui')
1763 ui = context.resource(mapping, b'ui')
1763
1764
1764 rev = ctx.rev()
1765 rev = ctx.rev()
1765 try:
1766 try:
1766 drevid = getdrevmap(repo, [rev])[rev]
1767 drevid = getdrevmap(repo, [rev])[rev]
1767 except KeyError:
1768 except KeyError:
1768 return None
1769 return None
1769 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1770 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1770 for drev in drevs:
1771 for drev in drevs:
1771 if int(drev[b'id']) == drevid:
1772 if int(drev[b'id']) == drevid:
1772 return templateutil.hybriddict(
1773 return templateutil.hybriddict(
1773 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1774 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1774 )
1775 )
1775 return None
1776 return None
1776
1777
1777
1778
1778 @show.showview(b'phabstatus', csettopic=b'work')
1779 @show.showview(b'phabstatus', csettopic=b'work')
1779 def phabstatusshowview(ui, repo, displayer):
1780 def phabstatusshowview(ui, repo, displayer):
1780 """Phabricator differiential status"""
1781 """Phabricator differiential status"""
1781 revs = repo.revs('sort(_underway(), topo)')
1782 revs = repo.revs('sort(_underway(), topo)')
1782 drevmap = getdrevmap(repo, revs)
1783 drevmap = getdrevmap(repo, revs)
1783 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1784 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1784 for rev, drevid in pycompat.iteritems(drevmap):
1785 for rev, drevid in pycompat.iteritems(drevmap):
1785 if drevid is not None:
1786 if drevid is not None:
1786 drevids.add(drevid)
1787 drevids.add(drevid)
1787 revsbydrevid.setdefault(drevid, set([])).add(rev)
1788 revsbydrevid.setdefault(drevid, set([])).add(rev)
1788 else:
1789 else:
1789 unknownrevs.append(rev)
1790 unknownrevs.append(rev)
1790
1791
1791 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1792 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1792 drevsbyrev = {}
1793 drevsbyrev = {}
1793 for drev in drevs:
1794 for drev in drevs:
1794 for rev in revsbydrevid[int(drev[b'id'])]:
1795 for rev in revsbydrevid[int(drev[b'id'])]:
1795 drevsbyrev[rev] = drev
1796 drevsbyrev[rev] = drev
1796
1797
1797 def phabstatus(ctx):
1798 def phabstatus(ctx):
1798 drev = drevsbyrev[ctx.rev()]
1799 drev = drevsbyrev[ctx.rev()]
1799 status = ui.label(
1800 status = ui.label(
1800 b'%(statusName)s' % drev,
1801 b'%(statusName)s' % drev,
1801 b'phabricator.status.%s' % _getstatusname(drev),
1802 b'phabricator.status.%s' % _getstatusname(drev),
1802 )
1803 )
1803 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1804 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1804
1805
1805 revs -= smartset.baseset(unknownrevs)
1806 revs -= smartset.baseset(unknownrevs)
1806 revdag = graphmod.dagwalker(repo, revs)
1807 revdag = graphmod.dagwalker(repo, revs)
1807
1808
1808 ui.setconfig(b'experimental', b'graphshorten', True)
1809 ui.setconfig(b'experimental', b'graphshorten', True)
1809 displayer._exthook = phabstatus
1810 displayer._exthook = phabstatus
1810 nodelen = show.longestshortest(repo, revs)
1811 nodelen = show.longestshortest(repo, revs)
1811 logcmdutil.displaygraph(
1812 logcmdutil.displaygraph(
1812 ui,
1813 ui,
1813 repo,
1814 repo,
1814 revdag,
1815 revdag,
1815 displayer,
1816 displayer,
1816 graphmod.asciiedges,
1817 graphmod.asciiedges,
1817 props={b'nodelen': nodelen},
1818 props={b'nodelen': nodelen},
1818 )
1819 )
General Comments 0
You need to be logged in to leave comments. Login now