##// END OF EJS Templates
phabricator: pass ui instead of repo to `querydrev()`...
Matt Harbison -
r44906:df805308 default
parent child Browse files
Show More
@@ -1,1799 +1,1799 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
261 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
262 return command(
262 return command(
263 name,
263 name,
264 fullflags,
264 fullflags,
265 spec,
265 spec,
266 helpcategory=helpcategory,
266 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
268 )(inner)
268 )(inner)
269
269
270 return decorate
270 return decorate
271
271
272
272
273 def urlencodenested(params):
273 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
275
275
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
279 """
280 flatparams = util.sortdict()
280 flatparams = util.sortdict()
281
281
282 def process(prefix, obj):
282 def process(prefix, obj):
283 if isinstance(obj, bool):
283 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
287 if items is None:
288 flatparams[prefix] = obj
288 flatparams[prefix] = obj
289 else:
289 else:
290 for k, v in items(obj):
290 for k, v in items(obj):
291 if prefix:
291 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
293 else:
293 else:
294 process(k, v)
294 process(k, v)
295
295
296 process(b'', params)
296 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
298
298
299
299
300 def readurltoken(ui):
300 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
302
302
303 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
305 """
305 """
306 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
307 if not url:
307 if not url:
308 raise error.Abort(
308 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
310 )
311
311
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
313 token = None
314
314
315 if res:
315 if res:
316 group, auth = res
316 group, auth = res
317
317
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
319
320 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
321
321
322 if not token:
322 if not token:
323 raise error.Abort(
323 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
325 )
326
326
327 return url, token
327 return url, token
328
328
329
329
330 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
335 params = params.copy()
336 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
337 b'token': token,
337 b'token': token,
338 }
338 }
339 rawdata = {
339 rawdata = {
340 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
341 b'output': b'json',
341 b'output': b'json',
342 b'__conduit__': 1,
342 b'__conduit__': 1,
343 }
343 }
344 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
346 if curlcmd:
347 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
349 )
350 sin.write(data)
350 sin.write(data)
351 sin.close()
351 sin.close()
352 body = sout.read()
352 body = sout.read()
353 else:
353 else:
354 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
357 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
362 else x,
362 else x,
363 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
365 )
366 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
368 parsed[b'error_code'],
369 parsed[b'error_info'],
369 parsed[b'error_info'],
370 )
370 )
371 raise error.Abort(msg)
371 raise error.Abort(msg)
372 return parsed[b'result']
372 return parsed[b'result']
373
373
374
374
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
377 """call Conduit API
377 """call Conduit API
378
378
379 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
380 to stdout as a JSON blob.
381 """
381 """
382 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
385 params = pycompat.rapply(
385 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
388 else x,
388 else x,
389 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
390 )
390 )
391 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
392 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
394 callconduit(ui, name, params),
395 )
395 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
398
398
399
399
400 def getrepophid(repo):
400 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
404 if repophid:
405 return repophid
405 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
407 if not callsign:
408 return None
408 return None
409 query = callconduit(
409 query = callconduit(
410 repo.ui,
410 repo.ui,
411 b'diffusion.repository.search',
411 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
413 )
413 )
414 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
415 return None
415 return None
416 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
418 return repophid
419
419
420
420
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
424 )
425
425
426
426
427 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
429
429
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
433 be ``None``.
434
434
435 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
436 association information.
436 association information.
437
437
438 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
441
441
442 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
444 """
444 """
445 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
447
447
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
450 for node in nodelist:
451 ctx = unfi[node]
451 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
454 for n in precnodes:
455 if has_node(n):
455 if has_node(n):
456 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
458 if m:
458 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
460 break
461 else:
461 else:
462 continue # move to next predecessor
462 continue # move to next predecessor
463 break # found a tag, stop
463 break # found a tag, stop
464 else:
464 else:
465 # Check commit message
465 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
467 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
469
470 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
472 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
474 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
476 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
479 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
481 ]
482
482
483 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
485
485
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
490 tags.tag(
490 tags.tag(
491 repo,
491 repo,
492 tagname,
492 tagname,
493 nullid,
493 nullid,
494 message=None,
494 message=None,
495 user=None,
495 user=None,
496 date=None,
496 date=None,
497 local=True,
497 local=True,
498 )
498 )
499 unfi.ui.warn(
499 unfi.ui.warn(
500 _(
500 _(
501 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
502 b'Differential history\n'
503 )
503 )
504 % drev
504 % drev
505 )
505 )
506 continue
506 continue
507
507
508 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
509 # exists in the repo
510 oldnode = lastdiff = None
510 oldnode = lastdiff = None
511 if diffs:
511 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
515 oldnode = None
515 oldnode = None
516
516
517 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
518
518
519 return result
519 return result
520
520
521
521
522 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
524 ID or None.
525 """
525 """
526 result = {}
526 result = {}
527 for rev in revs:
527 for rev in revs:
528 result[rev] = None
528 result[rev] = None
529 ctx = repo[rev]
529 ctx = repo[rev]
530 # Check commit message
530 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
532 if m:
533 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
534 continue
534 continue
535 # Check tags
535 # Check tags
536 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
538 if m:
538 if m:
539 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
540 break
540 break
541
541
542 return result
542 return result
543
543
544
544
545 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
547 output = util.stringio()
548 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
550 ):
551 output.write(chunk)
551 output.write(chunk)
552 return output.getvalue()
552 return output.getvalue()
553
553
554
554
555 class DiffChangeType(object):
555 class DiffChangeType(object):
556 ADD = 1
556 ADD = 1
557 CHANGE = 2
557 CHANGE = 2
558 DELETE = 3
558 DELETE = 3
559 MOVE_AWAY = 4
559 MOVE_AWAY = 4
560 COPY_AWAY = 5
560 COPY_AWAY = 5
561 MOVE_HERE = 6
561 MOVE_HERE = 6
562 COPY_HERE = 7
562 COPY_HERE = 7
563 MULTICOPY = 8
563 MULTICOPY = 8
564
564
565
565
566 class DiffFileType(object):
566 class DiffFileType(object):
567 TEXT = 1
567 TEXT = 1
568 IMAGE = 2
568 IMAGE = 2
569 BINARY = 3
569 BINARY = 3
570
570
571
571
572 @attr.s
572 @attr.s
573 class phabhunk(dict):
573 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
575 """
575 """
576
576
577 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
585
585
586
586
587 @attr.s
587 @attr.s
588 class phabchange(object):
588 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
591 """
591 """
592
592
593 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
605
605
606 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
610
610
611 def addoldmode(self, value):
611 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
613
613
614 def addnewmode(self, value):
614 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
616
616
617 def addhunk(self, hunk):
617 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
626
626
627
627
628 @attr.s
628 @attr.s
629 class phabdiff(object):
629 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
631 to a commit.
632 """
632 """
633
633
634 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
647
647
648 def addchange(self, change):
648 def addchange(self, change):
649 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
652 attr.asdict(change)
653 )
653 )
654
654
655
655
656 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
658 repo = ctx.repo()
658 repo = ctx.repo()
659 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
663 )
664
664
665 for fhunk in fhunks:
665 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
668 shunk = list(header)
668 shunk = list(header)
669 shunk.extend(lines)
669 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
672 )
672 )
673 pchange.addhunk(
673 pchange.addhunk(
674 phabhunk(
674 phabhunk(
675 oldOffset,
675 oldOffset,
676 oldLength,
676 oldLength,
677 newOffset,
677 newOffset,
678 newLength,
678 newLength,
679 corpus,
679 corpus,
680 addLines,
680 addLines,
681 delLines,
681 delLines,
682 )
682 )
683 )
683 )
684
684
685
685
686 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
689 """
690 ui = fctx.repo().ui
690 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
692 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
694 ) as progress:
695 for chunk in chunks:
695 for chunk in chunks:
696 progress.increment()
696 progress.increment()
697 if chunk[b'complete']:
697 if chunk[b'complete']:
698 continue
698 continue
699 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
701 callconduit(
701 callconduit(
702 ui,
702 ui,
703 b'file.uploadchunk',
703 b'file.uploadchunk',
704 {
704 {
705 b'filePHID': fphid,
705 b'filePHID': fphid,
706 b'byteStart': bstart,
706 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
709 },
709 },
710 )
710 )
711
711
712
712
713 def uploadfile(fctx):
713 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
715 repo = fctx.repo()
715 repo = fctx.repo()
716 ui = repo.ui
716 ui = repo.ui
717 fname = fctx.path()
717 fname = fctx.path()
718 size = fctx.size()
718 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
720
721 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
723 allocateparams = {
724 b'name': fname,
724 b'name': fname,
725 b'contentLength': size,
725 b'contentLength': size,
726 b'contentHash': fhash,
726 b'contentHash': fhash,
727 }
727 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
730
730
731 if filealloc[b'upload']:
731 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
733 if not fphid:
734 uploadparams = {
734 uploadparams = {
735 b'name': fname,
735 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
737 }
737 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
739 else:
740 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
741 else:
741 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
743
743
744 if not fphid:
744 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
746
747 return fphid
747 return fphid
748
748
749
749
750 def addoldbinary(pchange, fctx):
750 def addoldbinary(pchange, fctx):
751 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
752 phabchange for the new version
753 """
753 """
754 oldfctx = fctx.p1()
754 oldfctx = fctx.p1()
755 if fctx.cmp(oldfctx):
755 if fctx.cmp(oldfctx):
756 # Files differ, add the old one
756 # Files differ, add the old one
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 mimeguess, _enc = mimetypes.guess_type(
758 mimeguess, _enc = mimetypes.guess_type(
759 encoding.unifromlocal(oldfctx.path())
759 encoding.unifromlocal(oldfctx.path())
760 )
760 )
761 if mimeguess:
761 if mimeguess:
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 mimeguess
763 mimeguess
764 )
764 )
765 fphid = uploadfile(oldfctx)
765 fphid = uploadfile(oldfctx)
766 pchange.metadata[b'old:binary-phid'] = fphid
766 pchange.metadata[b'old:binary-phid'] = fphid
767 else:
767 else:
768 # If it's left as IMAGE/BINARY web UI might try to display it
768 # If it's left as IMAGE/BINARY web UI might try to display it
769 pchange.fileType = DiffFileType.TEXT
769 pchange.fileType = DiffFileType.TEXT
770 pchange.copynewmetadatatoold()
770 pchange.copynewmetadatatoold()
771
771
772
772
773 def makebinary(pchange, fctx):
773 def makebinary(pchange, fctx):
774 """populate the phabchange for a binary file"""
774 """populate the phabchange for a binary file"""
775 pchange.fileType = DiffFileType.BINARY
775 pchange.fileType = DiffFileType.BINARY
776 fphid = uploadfile(fctx)
776 fphid = uploadfile(fctx)
777 pchange.metadata[b'new:binary-phid'] = fphid
777 pchange.metadata[b'new:binary-phid'] = fphid
778 pchange.metadata[b'new:file:size'] = fctx.size()
778 pchange.metadata[b'new:file:size'] = fctx.size()
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 if mimeguess:
780 if mimeguess:
781 mimeguess = pycompat.bytestr(mimeguess)
781 mimeguess = pycompat.bytestr(mimeguess)
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 if mimeguess.startswith(b'image/'):
783 if mimeguess.startswith(b'image/'):
784 pchange.fileType = DiffFileType.IMAGE
784 pchange.fileType = DiffFileType.IMAGE
785
785
786
786
787 # Copied from mercurial/patch.py
787 # Copied from mercurial/patch.py
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789
789
790
790
791 def notutf8(fctx):
791 def notutf8(fctx):
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 as binary
793 as binary
794 """
794 """
795 try:
795 try:
796 fctx.data().decode('utf-8')
796 fctx.data().decode('utf-8')
797 if fctx.parents():
797 if fctx.parents():
798 fctx.p1().data().decode('utf-8')
798 fctx.p1().data().decode('utf-8')
799 return False
799 return False
800 except UnicodeDecodeError:
800 except UnicodeDecodeError:
801 fctx.repo().ui.write(
801 fctx.repo().ui.write(
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 % fctx.path()
803 % fctx.path()
804 )
804 )
805 return True
805 return True
806
806
807
807
808 def addremoved(pdiff, ctx, removed):
808 def addremoved(pdiff, ctx, removed):
809 """add removed files to the phabdiff. Shouldn't include moves"""
809 """add removed files to the phabdiff. Shouldn't include moves"""
810 for fname in removed:
810 for fname in removed:
811 pchange = phabchange(
811 pchange = phabchange(
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 )
813 )
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 fctx = ctx.p1()[fname]
815 fctx = ctx.p1()[fname]
816 if not (fctx.isbinary() or notutf8(fctx)):
816 if not (fctx.isbinary() or notutf8(fctx)):
817 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
818
818
819 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
820
820
821
821
822 def addmodified(pdiff, ctx, modified):
822 def addmodified(pdiff, ctx, modified):
823 """add modified files to the phabdiff"""
823 """add modified files to the phabdiff"""
824 for fname in modified:
824 for fname in modified:
825 fctx = ctx[fname]
825 fctx = ctx[fname]
826 pchange = phabchange(currentPath=fname, oldPath=fname)
826 pchange = phabchange(currentPath=fname, oldPath=fname)
827 filemode = gitmode[ctx[fname].flags()]
827 filemode = gitmode[ctx[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
829 if filemode != originalmode:
829 if filemode != originalmode:
830 pchange.addoldmode(originalmode)
830 pchange.addoldmode(originalmode)
831 pchange.addnewmode(filemode)
831 pchange.addnewmode(filemode)
832
832
833 if fctx.isbinary() or notutf8(fctx):
833 if fctx.isbinary() or notutf8(fctx):
834 makebinary(pchange, fctx)
834 makebinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
836 else:
836 else:
837 maketext(pchange, ctx, fname)
837 maketext(pchange, ctx, fname)
838
838
839 pdiff.addchange(pchange)
839 pdiff.addchange(pchange)
840
840
841
841
842 def addadded(pdiff, ctx, added, removed):
842 def addadded(pdiff, ctx, added, removed):
843 """add file adds to the phabdiff, both new files and copies/moves"""
843 """add file adds to the phabdiff, both new files and copies/moves"""
844 # Keep track of files that've been recorded as moved/copied, so if there are
844 # Keep track of files that've been recorded as moved/copied, so if there are
845 # additional copies we can mark them (moves get removed from removed)
845 # additional copies we can mark them (moves get removed from removed)
846 copiedchanges = {}
846 copiedchanges = {}
847 movedchanges = {}
847 movedchanges = {}
848 for fname in added:
848 for fname in added:
849 fctx = ctx[fname]
849 fctx = ctx[fname]
850 pchange = phabchange(currentPath=fname)
850 pchange = phabchange(currentPath=fname)
851
851
852 filemode = gitmode[ctx[fname].flags()]
852 filemode = gitmode[ctx[fname].flags()]
853 renamed = fctx.renamed()
853 renamed = fctx.renamed()
854
854
855 if renamed:
855 if renamed:
856 originalfname = renamed[0]
856 originalfname = renamed[0]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 pchange.oldPath = originalfname
858 pchange.oldPath = originalfname
859
859
860 if originalfname in removed:
860 if originalfname in removed:
861 origpchange = phabchange(
861 origpchange = phabchange(
862 currentPath=originalfname,
862 currentPath=originalfname,
863 oldPath=originalfname,
863 oldPath=originalfname,
864 type=DiffChangeType.MOVE_AWAY,
864 type=DiffChangeType.MOVE_AWAY,
865 awayPaths=[fname],
865 awayPaths=[fname],
866 )
866 )
867 movedchanges[originalfname] = origpchange
867 movedchanges[originalfname] = origpchange
868 removed.remove(originalfname)
868 removed.remove(originalfname)
869 pchange.type = DiffChangeType.MOVE_HERE
869 pchange.type = DiffChangeType.MOVE_HERE
870 elif originalfname in movedchanges:
870 elif originalfname in movedchanges:
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 movedchanges[originalfname].awayPaths.append(fname)
872 movedchanges[originalfname].awayPaths.append(fname)
873 pchange.type = DiffChangeType.COPY_HERE
873 pchange.type = DiffChangeType.COPY_HERE
874 else: # pure copy
874 else: # pure copy
875 if originalfname not in copiedchanges:
875 if originalfname not in copiedchanges:
876 origpchange = phabchange(
876 origpchange = phabchange(
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 )
878 )
879 copiedchanges[originalfname] = origpchange
879 copiedchanges[originalfname] = origpchange
880 else:
880 else:
881 origpchange = copiedchanges[originalfname]
881 origpchange = copiedchanges[originalfname]
882 origpchange.awayPaths.append(fname)
882 origpchange.awayPaths.append(fname)
883 pchange.type = DiffChangeType.COPY_HERE
883 pchange.type = DiffChangeType.COPY_HERE
884
884
885 if filemode != originalmode:
885 if filemode != originalmode:
886 pchange.addoldmode(originalmode)
886 pchange.addoldmode(originalmode)
887 pchange.addnewmode(filemode)
887 pchange.addnewmode(filemode)
888 else: # Brand-new file
888 else: # Brand-new file
889 pchange.addnewmode(gitmode[fctx.flags()])
889 pchange.addnewmode(gitmode[fctx.flags()])
890 pchange.type = DiffChangeType.ADD
890 pchange.type = DiffChangeType.ADD
891
891
892 if fctx.isbinary() or notutf8(fctx):
892 if fctx.isbinary() or notutf8(fctx):
893 makebinary(pchange, fctx)
893 makebinary(pchange, fctx)
894 if renamed:
894 if renamed:
895 addoldbinary(pchange, fctx)
895 addoldbinary(pchange, fctx)
896 else:
896 else:
897 maketext(pchange, ctx, fname)
897 maketext(pchange, ctx, fname)
898
898
899 pdiff.addchange(pchange)
899 pdiff.addchange(pchange)
900
900
901 for _path, copiedchange in copiedchanges.items():
901 for _path, copiedchange in copiedchanges.items():
902 pdiff.addchange(copiedchange)
902 pdiff.addchange(copiedchange)
903 for _path, movedchange in movedchanges.items():
903 for _path, movedchange in movedchanges.items():
904 pdiff.addchange(movedchange)
904 pdiff.addchange(movedchange)
905
905
906
906
907 def creatediff(ctx):
907 def creatediff(ctx):
908 """create a Differential Diff"""
908 """create a Differential Diff"""
909 repo = ctx.repo()
909 repo = ctx.repo()
910 repophid = getrepophid(repo)
910 repophid = getrepophid(repo)
911 # Create a "Differential Diff" via "differential.creatediff" API
911 # Create a "Differential Diff" via "differential.creatediff" API
912 pdiff = phabdiff(
912 pdiff = phabdiff(
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 branch=b'%s' % ctx.branch(),
914 branch=b'%s' % ctx.branch(),
915 )
915 )
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 # addadded will remove moved files from removed, so addremoved won't get
917 # addadded will remove moved files from removed, so addremoved won't get
918 # them
918 # them
919 addadded(pdiff, ctx, added, removed)
919 addadded(pdiff, ctx, added, removed)
920 addmodified(pdiff, ctx, modified)
920 addmodified(pdiff, ctx, modified)
921 addremoved(pdiff, ctx, removed)
921 addremoved(pdiff, ctx, removed)
922 if repophid:
922 if repophid:
923 pdiff.repositoryPHID = repophid
923 pdiff.repositoryPHID = repophid
924 diff = callconduit(
924 diff = callconduit(
925 repo.ui,
925 repo.ui,
926 b'differential.creatediff',
926 b'differential.creatediff',
927 pycompat.byteskwargs(attr.asdict(pdiff)),
927 pycompat.byteskwargs(attr.asdict(pdiff)),
928 )
928 )
929 if not diff:
929 if not diff:
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 return diff
931 return diff
932
932
933
933
934 def writediffproperties(ctx, diff):
934 def writediffproperties(ctx, diff):
935 """write metadata to diff so patches could be applied losslessly"""
935 """write metadata to diff so patches could be applied losslessly"""
936 # creatediff returns with a diffid but query returns with an id
936 # creatediff returns with a diffid but query returns with an id
937 diffid = diff.get(b'diffid', diff.get(b'id'))
937 diffid = diff.get(b'diffid', diff.get(b'id'))
938 params = {
938 params = {
939 b'diff_id': diffid,
939 b'diff_id': diffid,
940 b'name': b'hg:meta',
940 b'name': b'hg:meta',
941 b'data': templatefilters.json(
941 b'data': templatefilters.json(
942 {
942 {
943 b'user': ctx.user(),
943 b'user': ctx.user(),
944 b'date': b'%d %d' % ctx.date(),
944 b'date': b'%d %d' % ctx.date(),
945 b'branch': ctx.branch(),
945 b'branch': ctx.branch(),
946 b'node': ctx.hex(),
946 b'node': ctx.hex(),
947 b'parent': ctx.p1().hex(),
947 b'parent': ctx.p1().hex(),
948 }
948 }
949 ),
949 ),
950 }
950 }
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952
952
953 params = {
953 params = {
954 b'diff_id': diffid,
954 b'diff_id': diffid,
955 b'name': b'local:commits',
955 b'name': b'local:commits',
956 b'data': templatefilters.json(
956 b'data': templatefilters.json(
957 {
957 {
958 ctx.hex(): {
958 ctx.hex(): {
959 b'author': stringutil.person(ctx.user()),
959 b'author': stringutil.person(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
961 b'time': int(ctx.date()[0]),
961 b'time': int(ctx.date()[0]),
962 b'commit': ctx.hex(),
962 b'commit': ctx.hex(),
963 b'parents': [ctx.p1().hex()],
963 b'parents': [ctx.p1().hex()],
964 b'branch': ctx.branch(),
964 b'branch': ctx.branch(),
965 },
965 },
966 }
966 }
967 ),
967 ),
968 }
968 }
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970
970
971
971
972 def createdifferentialrevision(
972 def createdifferentialrevision(
973 ctx,
973 ctx,
974 revid=None,
974 revid=None,
975 parentrevphid=None,
975 parentrevphid=None,
976 oldnode=None,
976 oldnode=None,
977 olddiff=None,
977 olddiff=None,
978 actions=None,
978 actions=None,
979 comment=None,
979 comment=None,
980 ):
980 ):
981 """create or update a Differential Revision
981 """create or update a Differential Revision
982
982
983 If revid is None, create a new Differential Revision, otherwise update
983 If revid is None, create a new Differential Revision, otherwise update
984 revid. If parentrevphid is not None, set it as a dependency.
984 revid. If parentrevphid is not None, set it as a dependency.
985
985
986 If oldnode is not None, check if the patch content (without commit message
986 If oldnode is not None, check if the patch content (without commit message
987 and metadata) has changed before creating another diff.
987 and metadata) has changed before creating another diff.
988
988
989 If actions is not None, they will be appended to the transaction.
989 If actions is not None, they will be appended to the transaction.
990 """
990 """
991 repo = ctx.repo()
991 repo = ctx.repo()
992 if oldnode:
992 if oldnode:
993 diffopts = mdiff.diffopts(git=True, context=32767)
993 diffopts = mdiff.diffopts(git=True, context=32767)
994 oldctx = repo.unfiltered()[oldnode]
994 oldctx = repo.unfiltered()[oldnode]
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 else:
996 else:
997 neednewdiff = True
997 neednewdiff = True
998
998
999 transactions = []
999 transactions = []
1000 if neednewdiff:
1000 if neednewdiff:
1001 diff = creatediff(ctx)
1001 diff = creatediff(ctx)
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 if comment:
1003 if comment:
1004 transactions.append({b'type': b'comment', b'value': comment})
1004 transactions.append({b'type': b'comment', b'value': comment})
1005 else:
1005 else:
1006 # Even if we don't need to upload a new diff because the patch content
1006 # Even if we don't need to upload a new diff because the patch content
1007 # does not change. We might still need to update its metadata so
1007 # does not change. We might still need to update its metadata so
1008 # pushers could know the correct node metadata.
1008 # pushers could know the correct node metadata.
1009 assert olddiff
1009 assert olddiff
1010 diff = olddiff
1010 diff = olddiff
1011 writediffproperties(ctx, diff)
1011 writediffproperties(ctx, diff)
1012
1012
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 if parentrevphid:
1014 if parentrevphid:
1015 transactions.append(
1015 transactions.append(
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 )
1017 )
1018
1018
1019 if actions:
1019 if actions:
1020 transactions += actions
1020 transactions += actions
1021
1021
1022 # Parse commit message and update related fields.
1022 # Parse commit message and update related fields.
1023 desc = ctx.description()
1023 desc = ctx.description()
1024 info = callconduit(
1024 info = callconduit(
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 )
1026 )
1027 for k, v in info[b'fields'].items():
1027 for k, v in info[b'fields'].items():
1028 if k in [b'title', b'summary', b'testPlan']:
1028 if k in [b'title', b'summary', b'testPlan']:
1029 transactions.append({b'type': k, b'value': v})
1029 transactions.append({b'type': k, b'value': v})
1030
1030
1031 params = {b'transactions': transactions}
1031 params = {b'transactions': transactions}
1032 if revid is not None:
1032 if revid is not None:
1033 # Update an existing Differential Revision
1033 # Update an existing Differential Revision
1034 params[b'objectIdentifier'] = revid
1034 params[b'objectIdentifier'] = revid
1035
1035
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 if not revision:
1037 if not revision:
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039
1039
1040 return revision, diff
1040 return revision, diff
1041
1041
1042
1042
1043 def userphids(repo, names):
1043 def userphids(repo, names):
1044 """convert user names to PHIDs"""
1044 """convert user names to PHIDs"""
1045 names = [name.lower() for name in names]
1045 names = [name.lower() for name in names]
1046 query = {b'constraints': {b'usernames': names}}
1046 query = {b'constraints': {b'usernames': names}}
1047 result = callconduit(repo.ui, b'user.search', query)
1047 result = callconduit(repo.ui, b'user.search', query)
1048 # username not found is not an error of the API. So check if we have missed
1048 # username not found is not an error of the API. So check if we have missed
1049 # some names here.
1049 # some names here.
1050 data = result[b'data']
1050 data = result[b'data']
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 unresolved = set(names) - resolved
1052 unresolved = set(names) - resolved
1053 if unresolved:
1053 if unresolved:
1054 raise error.Abort(
1054 raise error.Abort(
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 )
1056 )
1057 return [entry[b'phid'] for entry in data]
1057 return [entry[b'phid'] for entry in data]
1058
1058
1059
1059
1060 @vcrcommand(
1060 @vcrcommand(
1061 b'phabsend',
1061 b'phabsend',
1062 [
1062 [
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 (
1067 (
1068 b'm',
1068 b'm',
1069 b'comment',
1069 b'comment',
1070 b'',
1070 b'',
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1072 ),
1072 ),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 ],
1074 ],
1075 _(b'REV [OPTIONS]'),
1075 _(b'REV [OPTIONS]'),
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 )
1077 )
1078 def phabsend(ui, repo, *revs, **opts):
1078 def phabsend(ui, repo, *revs, **opts):
1079 """upload changesets to Phabricator
1079 """upload changesets to Phabricator
1080
1080
1081 If there are multiple revisions specified, they will be send as a stack
1081 If there are multiple revisions specified, they will be send as a stack
1082 with a linear dependencies relationship using the order specified by the
1082 with a linear dependencies relationship using the order specified by the
1083 revset.
1083 revset.
1084
1084
1085 For the first time uploading changesets, local tags will be created to
1085 For the first time uploading changesets, local tags will be created to
1086 maintain the association. After the first time, phabsend will check
1086 maintain the association. After the first time, phabsend will check
1087 obsstore and tags information so it can figure out whether to update an
1087 obsstore and tags information so it can figure out whether to update an
1088 existing Differential Revision, or create a new one.
1088 existing Differential Revision, or create a new one.
1089
1089
1090 If --amend is set, update commit messages so they have the
1090 If --amend is set, update commit messages so they have the
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 use local tags to record the ``Differential Revision`` association.
1093 use local tags to record the ``Differential Revision`` association.
1094
1094
1095 The --confirm option lets you confirm changesets before sending them. You
1095 The --confirm option lets you confirm changesets before sending them. You
1096 can also add following to your configuration file to make it default
1096 can also add following to your configuration file to make it default
1097 behaviour::
1097 behaviour::
1098
1098
1099 [phabsend]
1099 [phabsend]
1100 confirm = true
1100 confirm = true
1101
1101
1102 phabsend will check obsstore and the above association to decide whether to
1102 phabsend will check obsstore and the above association to decide whether to
1103 update an existing Differential Revision, or create a new one.
1103 update an existing Differential Revision, or create a new one.
1104 """
1104 """
1105 opts = pycompat.byteskwargs(opts)
1105 opts = pycompat.byteskwargs(opts)
1106 revs = list(revs) + opts.get(b'rev', [])
1106 revs = list(revs) + opts.get(b'rev', [])
1107 revs = scmutil.revrange(repo, revs)
1107 revs = scmutil.revrange(repo, revs)
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1109
1109
1110 if not revs:
1110 if not revs:
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 if opts.get(b'amend'):
1112 if opts.get(b'amend'):
1113 cmdutil.checkunfinished(repo)
1113 cmdutil.checkunfinished(repo)
1114
1114
1115 # {newnode: (oldnode, olddiff, olddrev}
1115 # {newnode: (oldnode, olddiff, olddrev}
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117
1117
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1119 confirm |= bool(opts.get(b'confirm'))
1119 confirm |= bool(opts.get(b'confirm'))
1120 if confirm:
1120 if confirm:
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 if not confirmed:
1122 if not confirmed:
1123 raise error.Abort(_(b'phabsend cancelled'))
1123 raise error.Abort(_(b'phabsend cancelled'))
1124
1124
1125 actions = []
1125 actions = []
1126 reviewers = opts.get(b'reviewer', [])
1126 reviewers = opts.get(b'reviewer', [])
1127 blockers = opts.get(b'blocker', [])
1127 blockers = opts.get(b'blocker', [])
1128 phids = []
1128 phids = []
1129 if reviewers:
1129 if reviewers:
1130 phids.extend(userphids(repo, reviewers))
1130 phids.extend(userphids(repo, reviewers))
1131 if blockers:
1131 if blockers:
1132 phids.extend(
1132 phids.extend(
1133 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1133 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1134 )
1134 )
1135 if phids:
1135 if phids:
1136 actions.append({b'type': b'reviewers.add', b'value': phids})
1136 actions.append({b'type': b'reviewers.add', b'value': phids})
1137
1137
1138 drevids = [] # [int]
1138 drevids = [] # [int]
1139 diffmap = {} # {newnode: diff}
1139 diffmap = {} # {newnode: diff}
1140
1140
1141 # Send patches one by one so we know their Differential Revision PHIDs and
1141 # Send patches one by one so we know their Differential Revision PHIDs and
1142 # can provide dependency relationship
1142 # can provide dependency relationship
1143 lastrevphid = None
1143 lastrevphid = None
1144 for rev in revs:
1144 for rev in revs:
1145 ui.debug(b'sending rev %d\n' % rev)
1145 ui.debug(b'sending rev %d\n' % rev)
1146 ctx = repo[rev]
1146 ctx = repo[rev]
1147
1147
1148 # Get Differential Revision ID
1148 # Get Differential Revision ID
1149 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1149 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1150 if oldnode != ctx.node() or opts.get(b'amend'):
1150 if oldnode != ctx.node() or opts.get(b'amend'):
1151 # Create or update Differential Revision
1151 # Create or update Differential Revision
1152 revision, diff = createdifferentialrevision(
1152 revision, diff = createdifferentialrevision(
1153 ctx,
1153 ctx,
1154 revid,
1154 revid,
1155 lastrevphid,
1155 lastrevphid,
1156 oldnode,
1156 oldnode,
1157 olddiff,
1157 olddiff,
1158 actions,
1158 actions,
1159 opts.get(b'comment'),
1159 opts.get(b'comment'),
1160 )
1160 )
1161 diffmap[ctx.node()] = diff
1161 diffmap[ctx.node()] = diff
1162 newrevid = int(revision[b'object'][b'id'])
1162 newrevid = int(revision[b'object'][b'id'])
1163 newrevphid = revision[b'object'][b'phid']
1163 newrevphid = revision[b'object'][b'phid']
1164 if revid:
1164 if revid:
1165 action = b'updated'
1165 action = b'updated'
1166 else:
1166 else:
1167 action = b'created'
1167 action = b'created'
1168
1168
1169 # Create a local tag to note the association, if commit message
1169 # Create a local tag to note the association, if commit message
1170 # does not have it already
1170 # does not have it already
1171 m = _differentialrevisiondescre.search(ctx.description())
1171 m = _differentialrevisiondescre.search(ctx.description())
1172 if not m or int(m.group('id')) != newrevid:
1172 if not m or int(m.group('id')) != newrevid:
1173 tagname = b'D%d' % newrevid
1173 tagname = b'D%d' % newrevid
1174 tags.tag(
1174 tags.tag(
1175 repo,
1175 repo,
1176 tagname,
1176 tagname,
1177 ctx.node(),
1177 ctx.node(),
1178 message=None,
1178 message=None,
1179 user=None,
1179 user=None,
1180 date=None,
1180 date=None,
1181 local=True,
1181 local=True,
1182 )
1182 )
1183 else:
1183 else:
1184 # Nothing changed. But still set "newrevphid" so the next revision
1184 # Nothing changed. But still set "newrevphid" so the next revision
1185 # could depend on this one and "newrevid" for the summary line.
1185 # could depend on this one and "newrevid" for the summary line.
1186 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1186 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1187 newrevid = revid
1187 newrevid = revid
1188 action = b'skipped'
1188 action = b'skipped'
1189
1189
1190 actiondesc = ui.label(
1190 actiondesc = ui.label(
1191 {
1191 {
1192 b'created': _(b'created'),
1192 b'created': _(b'created'),
1193 b'skipped': _(b'skipped'),
1193 b'skipped': _(b'skipped'),
1194 b'updated': _(b'updated'),
1194 b'updated': _(b'updated'),
1195 }[action],
1195 }[action],
1196 b'phabricator.action.%s' % action,
1196 b'phabricator.action.%s' % action,
1197 )
1197 )
1198 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1198 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1199 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1199 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1200 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1200 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1201 ui.write(
1201 ui.write(
1202 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1202 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1203 )
1203 )
1204 drevids.append(newrevid)
1204 drevids.append(newrevid)
1205 lastrevphid = newrevphid
1205 lastrevphid = newrevphid
1206
1206
1207 # Update commit messages and remove tags
1207 # Update commit messages and remove tags
1208 if opts.get(b'amend'):
1208 if opts.get(b'amend'):
1209 unfi = repo.unfiltered()
1209 unfi = repo.unfiltered()
1210 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1210 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1211 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1211 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1212 wnode = unfi[b'.'].node()
1212 wnode = unfi[b'.'].node()
1213 mapping = {} # {oldnode: [newnode]}
1213 mapping = {} # {oldnode: [newnode]}
1214 for i, rev in enumerate(revs):
1214 for i, rev in enumerate(revs):
1215 old = unfi[rev]
1215 old = unfi[rev]
1216 drevid = drevids[i]
1216 drevid = drevids[i]
1217 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1217 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1218 newdesc = getdescfromdrev(drev)
1218 newdesc = getdescfromdrev(drev)
1219 # Make sure commit message contain "Differential Revision"
1219 # Make sure commit message contain "Differential Revision"
1220 if old.description() != newdesc:
1220 if old.description() != newdesc:
1221 if old.phase() == phases.public:
1221 if old.phase() == phases.public:
1222 ui.warn(
1222 ui.warn(
1223 _(b"warning: not updating public commit %s\n")
1223 _(b"warning: not updating public commit %s\n")
1224 % scmutil.formatchangeid(old)
1224 % scmutil.formatchangeid(old)
1225 )
1225 )
1226 continue
1226 continue
1227 parents = [
1227 parents = [
1228 mapping.get(old.p1().node(), (old.p1(),))[0],
1228 mapping.get(old.p1().node(), (old.p1(),))[0],
1229 mapping.get(old.p2().node(), (old.p2(),))[0],
1229 mapping.get(old.p2().node(), (old.p2(),))[0],
1230 ]
1230 ]
1231 new = context.metadataonlyctx(
1231 new = context.metadataonlyctx(
1232 repo,
1232 repo,
1233 old,
1233 old,
1234 parents=parents,
1234 parents=parents,
1235 text=newdesc,
1235 text=newdesc,
1236 user=old.user(),
1236 user=old.user(),
1237 date=old.date(),
1237 date=old.date(),
1238 extra=old.extra(),
1238 extra=old.extra(),
1239 )
1239 )
1240
1240
1241 newnode = new.commit()
1241 newnode = new.commit()
1242
1242
1243 mapping[old.node()] = [newnode]
1243 mapping[old.node()] = [newnode]
1244 # Update diff property
1244 # Update diff property
1245 # If it fails just warn and keep going, otherwise the DREV
1245 # If it fails just warn and keep going, otherwise the DREV
1246 # associations will be lost
1246 # associations will be lost
1247 try:
1247 try:
1248 writediffproperties(unfi[newnode], diffmap[old.node()])
1248 writediffproperties(unfi[newnode], diffmap[old.node()])
1249 except util.urlerr.urlerror:
1249 except util.urlerr.urlerror:
1250 ui.warnnoi18n(
1250 ui.warnnoi18n(
1251 b'Failed to update metadata for D%d\n' % drevid
1251 b'Failed to update metadata for D%d\n' % drevid
1252 )
1252 )
1253 # Remove local tags since it's no longer necessary
1253 # Remove local tags since it's no longer necessary
1254 tagname = b'D%d' % drevid
1254 tagname = b'D%d' % drevid
1255 if tagname in repo.tags():
1255 if tagname in repo.tags():
1256 tags.tag(
1256 tags.tag(
1257 repo,
1257 repo,
1258 tagname,
1258 tagname,
1259 nullid,
1259 nullid,
1260 message=None,
1260 message=None,
1261 user=None,
1261 user=None,
1262 date=None,
1262 date=None,
1263 local=True,
1263 local=True,
1264 )
1264 )
1265 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1265 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1266 if wnode in mapping:
1266 if wnode in mapping:
1267 unfi.setparents(mapping[wnode][0])
1267 unfi.setparents(mapping[wnode][0])
1268
1268
1269
1269
1270 # Map from "hg:meta" keys to header understood by "hg import". The order is
1270 # Map from "hg:meta" keys to header understood by "hg import". The order is
1271 # consistent with "hg export" output.
1271 # consistent with "hg export" output.
1272 _metanamemap = util.sortdict(
1272 _metanamemap = util.sortdict(
1273 [
1273 [
1274 (b'user', b'User'),
1274 (b'user', b'User'),
1275 (b'date', b'Date'),
1275 (b'date', b'Date'),
1276 (b'branch', b'Branch'),
1276 (b'branch', b'Branch'),
1277 (b'node', b'Node ID'),
1277 (b'node', b'Node ID'),
1278 (b'parent', b'Parent '),
1278 (b'parent', b'Parent '),
1279 ]
1279 ]
1280 )
1280 )
1281
1281
1282
1282
1283 def _confirmbeforesend(repo, revs, oldmap):
1283 def _confirmbeforesend(repo, revs, oldmap):
1284 url, token = readurltoken(repo.ui)
1284 url, token = readurltoken(repo.ui)
1285 ui = repo.ui
1285 ui = repo.ui
1286 for rev in revs:
1286 for rev in revs:
1287 ctx = repo[rev]
1287 ctx = repo[rev]
1288 desc = ctx.description().splitlines()[0]
1288 desc = ctx.description().splitlines()[0]
1289 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1289 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1290 if drevid:
1290 if drevid:
1291 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1291 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1292 else:
1292 else:
1293 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1293 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1294
1294
1295 ui.write(
1295 ui.write(
1296 _(b'%s - %s: %s\n')
1296 _(b'%s - %s: %s\n')
1297 % (
1297 % (
1298 drevdesc,
1298 drevdesc,
1299 ui.label(bytes(ctx), b'phabricator.node'),
1299 ui.label(bytes(ctx), b'phabricator.node'),
1300 ui.label(desc, b'phabricator.desc'),
1300 ui.label(desc, b'phabricator.desc'),
1301 )
1301 )
1302 )
1302 )
1303
1303
1304 if ui.promptchoice(
1304 if ui.promptchoice(
1305 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1305 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1306 ):
1306 ):
1307 return False
1307 return False
1308
1308
1309 return True
1309 return True
1310
1310
1311
1311
1312 _knownstatusnames = {
1312 _knownstatusnames = {
1313 b'accepted',
1313 b'accepted',
1314 b'needsreview',
1314 b'needsreview',
1315 b'needsrevision',
1315 b'needsrevision',
1316 b'closed',
1316 b'closed',
1317 b'abandoned',
1317 b'abandoned',
1318 b'changesplanned',
1318 b'changesplanned',
1319 }
1319 }
1320
1320
1321
1321
1322 def _getstatusname(drev):
1322 def _getstatusname(drev):
1323 """get normalized status name from a Differential Revision"""
1323 """get normalized status name from a Differential Revision"""
1324 return drev[b'statusName'].replace(b' ', b'').lower()
1324 return drev[b'statusName'].replace(b' ', b'').lower()
1325
1325
1326
1326
1327 # Small language to specify differential revisions. Support symbols: (), :X,
1327 # Small language to specify differential revisions. Support symbols: (), :X,
1328 # +, and -.
1328 # +, and -.
1329
1329
1330 _elements = {
1330 _elements = {
1331 # token-type: binding-strength, primary, prefix, infix, suffix
1331 # token-type: binding-strength, primary, prefix, infix, suffix
1332 b'(': (12, None, (b'group', 1, b')'), None, None),
1332 b'(': (12, None, (b'group', 1, b')'), None, None),
1333 b':': (8, None, (b'ancestors', 8), None, None),
1333 b':': (8, None, (b'ancestors', 8), None, None),
1334 b'&': (5, None, None, (b'and_', 5), None),
1334 b'&': (5, None, None, (b'and_', 5), None),
1335 b'+': (4, None, None, (b'add', 4), None),
1335 b'+': (4, None, None, (b'add', 4), None),
1336 b'-': (4, None, None, (b'sub', 4), None),
1336 b'-': (4, None, None, (b'sub', 4), None),
1337 b')': (0, None, None, None, None),
1337 b')': (0, None, None, None, None),
1338 b'symbol': (0, b'symbol', None, None, None),
1338 b'symbol': (0, b'symbol', None, None, None),
1339 b'end': (0, None, None, None, None),
1339 b'end': (0, None, None, None, None),
1340 }
1340 }
1341
1341
1342
1342
1343 def _tokenize(text):
1343 def _tokenize(text):
1344 view = memoryview(text) # zero-copy slice
1344 view = memoryview(text) # zero-copy slice
1345 special = b'():+-& '
1345 special = b'():+-& '
1346 pos = 0
1346 pos = 0
1347 length = len(text)
1347 length = len(text)
1348 while pos < length:
1348 while pos < length:
1349 symbol = b''.join(
1349 symbol = b''.join(
1350 itertools.takewhile(
1350 itertools.takewhile(
1351 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1351 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1352 )
1352 )
1353 )
1353 )
1354 if symbol:
1354 if symbol:
1355 yield (b'symbol', symbol, pos)
1355 yield (b'symbol', symbol, pos)
1356 pos += len(symbol)
1356 pos += len(symbol)
1357 else: # special char, ignore space
1357 else: # special char, ignore space
1358 if text[pos : pos + 1] != b' ':
1358 if text[pos : pos + 1] != b' ':
1359 yield (text[pos : pos + 1], None, pos)
1359 yield (text[pos : pos + 1], None, pos)
1360 pos += 1
1360 pos += 1
1361 yield (b'end', None, pos)
1361 yield (b'end', None, pos)
1362
1362
1363
1363
1364 def _parse(text):
1364 def _parse(text):
1365 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1365 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1366 if pos != len(text):
1366 if pos != len(text):
1367 raise error.ParseError(b'invalid token', pos)
1367 raise error.ParseError(b'invalid token', pos)
1368 return tree
1368 return tree
1369
1369
1370
1370
1371 def _parsedrev(symbol):
1371 def _parsedrev(symbol):
1372 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1372 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1373 if symbol.startswith(b'D') and symbol[1:].isdigit():
1373 if symbol.startswith(b'D') and symbol[1:].isdigit():
1374 return int(symbol[1:])
1374 return int(symbol[1:])
1375 if symbol.isdigit():
1375 if symbol.isdigit():
1376 return int(symbol)
1376 return int(symbol)
1377
1377
1378
1378
1379 def _prefetchdrevs(tree):
1379 def _prefetchdrevs(tree):
1380 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1380 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1381 drevs = set()
1381 drevs = set()
1382 ancestordrevs = set()
1382 ancestordrevs = set()
1383 op = tree[0]
1383 op = tree[0]
1384 if op == b'symbol':
1384 if op == b'symbol':
1385 r = _parsedrev(tree[1])
1385 r = _parsedrev(tree[1])
1386 if r:
1386 if r:
1387 drevs.add(r)
1387 drevs.add(r)
1388 elif op == b'ancestors':
1388 elif op == b'ancestors':
1389 r, a = _prefetchdrevs(tree[1])
1389 r, a = _prefetchdrevs(tree[1])
1390 drevs.update(r)
1390 drevs.update(r)
1391 ancestordrevs.update(r)
1391 ancestordrevs.update(r)
1392 ancestordrevs.update(a)
1392 ancestordrevs.update(a)
1393 else:
1393 else:
1394 for t in tree[1:]:
1394 for t in tree[1:]:
1395 r, a = _prefetchdrevs(t)
1395 r, a = _prefetchdrevs(t)
1396 drevs.update(r)
1396 drevs.update(r)
1397 ancestordrevs.update(a)
1397 ancestordrevs.update(a)
1398 return drevs, ancestordrevs
1398 return drevs, ancestordrevs
1399
1399
1400
1400
1401 def querydrev(repo, spec):
1401 def querydrev(ui, spec):
1402 """return a list of "Differential Revision" dicts
1402 """return a list of "Differential Revision" dicts
1403
1403
1404 spec is a string using a simple query language, see docstring in phabread
1404 spec is a string using a simple query language, see docstring in phabread
1405 for details.
1405 for details.
1406
1406
1407 A "Differential Revision dict" looks like:
1407 A "Differential Revision dict" looks like:
1408
1408
1409 {
1409 {
1410 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1410 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1411 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1411 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1412 "auxiliary": {
1412 "auxiliary": {
1413 "phabricator:depends-on": [
1413 "phabricator:depends-on": [
1414 "PHID-DREV-gbapp366kutjebt7agcd"
1414 "PHID-DREV-gbapp366kutjebt7agcd"
1415 ]
1415 ]
1416 "phabricator:projects": [],
1416 "phabricator:projects": [],
1417 },
1417 },
1418 "branch": "default",
1418 "branch": "default",
1419 "ccs": [],
1419 "ccs": [],
1420 "commits": [],
1420 "commits": [],
1421 "dateCreated": "1499181406",
1421 "dateCreated": "1499181406",
1422 "dateModified": "1499182103",
1422 "dateModified": "1499182103",
1423 "diffs": [
1423 "diffs": [
1424 "3",
1424 "3",
1425 "4",
1425 "4",
1426 ],
1426 ],
1427 "hashes": [],
1427 "hashes": [],
1428 "id": "2",
1428 "id": "2",
1429 "lineCount": "2",
1429 "lineCount": "2",
1430 "phid": "PHID-DREV-672qvysjcczopag46qty",
1430 "phid": "PHID-DREV-672qvysjcczopag46qty",
1431 "properties": {},
1431 "properties": {},
1432 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1432 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1433 "reviewers": [],
1433 "reviewers": [],
1434 "sourcePath": null
1434 "sourcePath": null
1435 "status": "0",
1435 "status": "0",
1436 "statusName": "Needs Review",
1436 "statusName": "Needs Review",
1437 "summary": "",
1437 "summary": "",
1438 "testPlan": "",
1438 "testPlan": "",
1439 "title": "example",
1439 "title": "example",
1440 "uri": "https://phab.example.com/D2",
1440 "uri": "https://phab.example.com/D2",
1441 }
1441 }
1442 """
1442 """
1443 # TODO: replace differential.query and differential.querydiffs with
1443 # TODO: replace differential.query and differential.querydiffs with
1444 # differential.diff.search because the former (and their output) are
1444 # differential.diff.search because the former (and their output) are
1445 # frozen, and planned to be deprecated and removed.
1445 # frozen, and planned to be deprecated and removed.
1446
1446
1447 def fetch(params):
1447 def fetch(params):
1448 """params -> single drev or None"""
1448 """params -> single drev or None"""
1449 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1449 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1450 if key in prefetched:
1450 if key in prefetched:
1451 return prefetched[key]
1451 return prefetched[key]
1452 drevs = callconduit(repo.ui, b'differential.query', params)
1452 drevs = callconduit(ui, b'differential.query', params)
1453 # Fill prefetched with the result
1453 # Fill prefetched with the result
1454 for drev in drevs:
1454 for drev in drevs:
1455 prefetched[drev[b'phid']] = drev
1455 prefetched[drev[b'phid']] = drev
1456 prefetched[int(drev[b'id'])] = drev
1456 prefetched[int(drev[b'id'])] = drev
1457 if key not in prefetched:
1457 if key not in prefetched:
1458 raise error.Abort(
1458 raise error.Abort(
1459 _(b'cannot get Differential Revision %r') % params
1459 _(b'cannot get Differential Revision %r') % params
1460 )
1460 )
1461 return prefetched[key]
1461 return prefetched[key]
1462
1462
1463 def getstack(topdrevids):
1463 def getstack(topdrevids):
1464 """given a top, get a stack from the bottom, [id] -> [id]"""
1464 """given a top, get a stack from the bottom, [id] -> [id]"""
1465 visited = set()
1465 visited = set()
1466 result = []
1466 result = []
1467 queue = [{b'ids': [i]} for i in topdrevids]
1467 queue = [{b'ids': [i]} for i in topdrevids]
1468 while queue:
1468 while queue:
1469 params = queue.pop()
1469 params = queue.pop()
1470 drev = fetch(params)
1470 drev = fetch(params)
1471 if drev[b'id'] in visited:
1471 if drev[b'id'] in visited:
1472 continue
1472 continue
1473 visited.add(drev[b'id'])
1473 visited.add(drev[b'id'])
1474 result.append(int(drev[b'id']))
1474 result.append(int(drev[b'id']))
1475 auxiliary = drev.get(b'auxiliary', {})
1475 auxiliary = drev.get(b'auxiliary', {})
1476 depends = auxiliary.get(b'phabricator:depends-on', [])
1476 depends = auxiliary.get(b'phabricator:depends-on', [])
1477 for phid in depends:
1477 for phid in depends:
1478 queue.append({b'phids': [phid]})
1478 queue.append({b'phids': [phid]})
1479 result.reverse()
1479 result.reverse()
1480 return smartset.baseset(result)
1480 return smartset.baseset(result)
1481
1481
1482 # Initialize prefetch cache
1482 # Initialize prefetch cache
1483 prefetched = {} # {id or phid: drev}
1483 prefetched = {} # {id or phid: drev}
1484
1484
1485 tree = _parse(spec)
1485 tree = _parse(spec)
1486 drevs, ancestordrevs = _prefetchdrevs(tree)
1486 drevs, ancestordrevs = _prefetchdrevs(tree)
1487
1487
1488 # developer config: phabricator.batchsize
1488 # developer config: phabricator.batchsize
1489 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1489 batchsize = ui.configint(b'phabricator', b'batchsize')
1490
1490
1491 # Prefetch Differential Revisions in batch
1491 # Prefetch Differential Revisions in batch
1492 tofetch = set(drevs)
1492 tofetch = set(drevs)
1493 for r in ancestordrevs:
1493 for r in ancestordrevs:
1494 tofetch.update(range(max(1, r - batchsize), r + 1))
1494 tofetch.update(range(max(1, r - batchsize), r + 1))
1495 if drevs:
1495 if drevs:
1496 fetch({b'ids': list(tofetch)})
1496 fetch({b'ids': list(tofetch)})
1497 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1497 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1498
1498
1499 # Walk through the tree, return smartsets
1499 # Walk through the tree, return smartsets
1500 def walk(tree):
1500 def walk(tree):
1501 op = tree[0]
1501 op = tree[0]
1502 if op == b'symbol':
1502 if op == b'symbol':
1503 drev = _parsedrev(tree[1])
1503 drev = _parsedrev(tree[1])
1504 if drev:
1504 if drev:
1505 return smartset.baseset([drev])
1505 return smartset.baseset([drev])
1506 elif tree[1] in _knownstatusnames:
1506 elif tree[1] in _knownstatusnames:
1507 drevs = [
1507 drevs = [
1508 r
1508 r
1509 for r in validids
1509 for r in validids
1510 if _getstatusname(prefetched[r]) == tree[1]
1510 if _getstatusname(prefetched[r]) == tree[1]
1511 ]
1511 ]
1512 return smartset.baseset(drevs)
1512 return smartset.baseset(drevs)
1513 else:
1513 else:
1514 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1514 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1515 elif op in {b'and_', b'add', b'sub'}:
1515 elif op in {b'and_', b'add', b'sub'}:
1516 assert len(tree) == 3
1516 assert len(tree) == 3
1517 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1517 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1518 elif op == b'group':
1518 elif op == b'group':
1519 return walk(tree[1])
1519 return walk(tree[1])
1520 elif op == b'ancestors':
1520 elif op == b'ancestors':
1521 return getstack(walk(tree[1]))
1521 return getstack(walk(tree[1]))
1522 else:
1522 else:
1523 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1523 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1524
1524
1525 return [prefetched[r] for r in walk(tree)]
1525 return [prefetched[r] for r in walk(tree)]
1526
1526
1527
1527
1528 def getdescfromdrev(drev):
1528 def getdescfromdrev(drev):
1529 """get description (commit message) from "Differential Revision"
1529 """get description (commit message) from "Differential Revision"
1530
1530
1531 This is similar to differential.getcommitmessage API. But we only care
1531 This is similar to differential.getcommitmessage API. But we only care
1532 about limited fields: title, summary, test plan, and URL.
1532 about limited fields: title, summary, test plan, and URL.
1533 """
1533 """
1534 title = drev[b'title']
1534 title = drev[b'title']
1535 summary = drev[b'summary'].rstrip()
1535 summary = drev[b'summary'].rstrip()
1536 testplan = drev[b'testPlan'].rstrip()
1536 testplan = drev[b'testPlan'].rstrip()
1537 if testplan:
1537 if testplan:
1538 testplan = b'Test Plan:\n%s' % testplan
1538 testplan = b'Test Plan:\n%s' % testplan
1539 uri = b'Differential Revision: %s' % drev[b'uri']
1539 uri = b'Differential Revision: %s' % drev[b'uri']
1540 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1540 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1541
1541
1542
1542
1543 def getdiffmeta(diff):
1543 def getdiffmeta(diff):
1544 """get commit metadata (date, node, user, p1) from a diff object
1544 """get commit metadata (date, node, user, p1) from a diff object
1545
1545
1546 The metadata could be "hg:meta", sent by phabsend, like:
1546 The metadata could be "hg:meta", sent by phabsend, like:
1547
1547
1548 "properties": {
1548 "properties": {
1549 "hg:meta": {
1549 "hg:meta": {
1550 "branch": "default",
1550 "branch": "default",
1551 "date": "1499571514 25200",
1551 "date": "1499571514 25200",
1552 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1552 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1553 "user": "Foo Bar <foo@example.com>",
1553 "user": "Foo Bar <foo@example.com>",
1554 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1554 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1555 }
1555 }
1556 }
1556 }
1557
1557
1558 Or converted from "local:commits", sent by "arc", like:
1558 Or converted from "local:commits", sent by "arc", like:
1559
1559
1560 "properties": {
1560 "properties": {
1561 "local:commits": {
1561 "local:commits": {
1562 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1562 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1563 "author": "Foo Bar",
1563 "author": "Foo Bar",
1564 "authorEmail": "foo@example.com"
1564 "authorEmail": "foo@example.com"
1565 "branch": "default",
1565 "branch": "default",
1566 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1566 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1567 "local": "1000",
1567 "local": "1000",
1568 "message": "...",
1568 "message": "...",
1569 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1569 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1570 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1571 "summary": "...",
1571 "summary": "...",
1572 "tag": "",
1572 "tag": "",
1573 "time": 1499546314,
1573 "time": 1499546314,
1574 }
1574 }
1575 }
1575 }
1576 }
1576 }
1577
1577
1578 Note: metadata extracted from "local:commits" will lose time zone
1578 Note: metadata extracted from "local:commits" will lose time zone
1579 information.
1579 information.
1580 """
1580 """
1581 props = diff.get(b'properties') or {}
1581 props = diff.get(b'properties') or {}
1582 meta = props.get(b'hg:meta')
1582 meta = props.get(b'hg:meta')
1583 if not meta:
1583 if not meta:
1584 if props.get(b'local:commits'):
1584 if props.get(b'local:commits'):
1585 commit = sorted(props[b'local:commits'].values())[0]
1585 commit = sorted(props[b'local:commits'].values())[0]
1586 meta = {}
1586 meta = {}
1587 if b'author' in commit and b'authorEmail' in commit:
1587 if b'author' in commit and b'authorEmail' in commit:
1588 meta[b'user'] = b'%s <%s>' % (
1588 meta[b'user'] = b'%s <%s>' % (
1589 commit[b'author'],
1589 commit[b'author'],
1590 commit[b'authorEmail'],
1590 commit[b'authorEmail'],
1591 )
1591 )
1592 if b'time' in commit:
1592 if b'time' in commit:
1593 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1593 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1594 if b'branch' in commit:
1594 if b'branch' in commit:
1595 meta[b'branch'] = commit[b'branch']
1595 meta[b'branch'] = commit[b'branch']
1596 node = commit.get(b'commit', commit.get(b'rev'))
1596 node = commit.get(b'commit', commit.get(b'rev'))
1597 if node:
1597 if node:
1598 meta[b'node'] = node
1598 meta[b'node'] = node
1599 if len(commit.get(b'parents', ())) >= 1:
1599 if len(commit.get(b'parents', ())) >= 1:
1600 meta[b'parent'] = commit[b'parents'][0]
1600 meta[b'parent'] = commit[b'parents'][0]
1601 else:
1601 else:
1602 meta = {}
1602 meta = {}
1603 if b'date' not in meta and b'dateCreated' in diff:
1603 if b'date' not in meta and b'dateCreated' in diff:
1604 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1604 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1605 if b'branch' not in meta and diff.get(b'branch'):
1605 if b'branch' not in meta and diff.get(b'branch'):
1606 meta[b'branch'] = diff[b'branch']
1606 meta[b'branch'] = diff[b'branch']
1607 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1607 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1608 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1608 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1609 return meta
1609 return meta
1610
1610
1611
1611
1612 def readpatch(ui, drevs, write):
1612 def readpatch(ui, drevs, write):
1613 """generate plain-text patch readable by 'hg import'
1613 """generate plain-text patch readable by 'hg import'
1614
1614
1615 write is usually ui.write. drevs is what "querydrev" returns, results of
1615 write is usually ui.write. drevs is what "querydrev" returns, results of
1616 "differential.query".
1616 "differential.query".
1617 """
1617 """
1618 # Prefetch hg:meta property for all diffs
1618 # Prefetch hg:meta property for all diffs
1619 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1619 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1620 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1620 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1621
1621
1622 # Generate patch for each drev
1622 # Generate patch for each drev
1623 for drev in drevs:
1623 for drev in drevs:
1624 ui.note(_(b'reading D%s\n') % drev[b'id'])
1624 ui.note(_(b'reading D%s\n') % drev[b'id'])
1625
1625
1626 diffid = max(int(v) for v in drev[b'diffs'])
1626 diffid = max(int(v) for v in drev[b'diffs'])
1627 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1627 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1628 desc = getdescfromdrev(drev)
1628 desc = getdescfromdrev(drev)
1629 header = b'# HG changeset patch\n'
1629 header = b'# HG changeset patch\n'
1630
1630
1631 # Try to preserve metadata from hg:meta property. Write hg patch
1631 # Try to preserve metadata from hg:meta property. Write hg patch
1632 # headers that can be read by the "import" command. See patchheadermap
1632 # headers that can be read by the "import" command. See patchheadermap
1633 # and extract in mercurial/patch.py for supported headers.
1633 # and extract in mercurial/patch.py for supported headers.
1634 meta = getdiffmeta(diffs[b'%d' % diffid])
1634 meta = getdiffmeta(diffs[b'%d' % diffid])
1635 for k in _metanamemap.keys():
1635 for k in _metanamemap.keys():
1636 if k in meta:
1636 if k in meta:
1637 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1637 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1638
1638
1639 content = b'%s%s\n%s' % (header, desc, body)
1639 content = b'%s%s\n%s' % (header, desc, body)
1640 write(content)
1640 write(content)
1641
1641
1642
1642
1643 @vcrcommand(
1643 @vcrcommand(
1644 b'phabread',
1644 b'phabread',
1645 [(b'', b'stack', False, _(b'read dependencies'))],
1645 [(b'', b'stack', False, _(b'read dependencies'))],
1646 _(b'DREVSPEC [OPTIONS]'),
1646 _(b'DREVSPEC [OPTIONS]'),
1647 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1647 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1648 )
1648 )
1649 def phabread(ui, repo, spec, **opts):
1649 def phabread(ui, repo, spec, **opts):
1650 """print patches from Phabricator suitable for importing
1650 """print patches from Phabricator suitable for importing
1651
1651
1652 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1652 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1653 the number ``123``. It could also have common operators like ``+``, ``-``,
1653 the number ``123``. It could also have common operators like ``+``, ``-``,
1654 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1654 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1655 select a stack.
1655 select a stack.
1656
1656
1657 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1657 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1658 could be used to filter patches by status. For performance reason, they
1658 could be used to filter patches by status. For performance reason, they
1659 only represent a subset of non-status selections and cannot be used alone.
1659 only represent a subset of non-status selections and cannot be used alone.
1660
1660
1661 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1661 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1662 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1662 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1663 stack up to D9.
1663 stack up to D9.
1664
1664
1665 If --stack is given, follow dependencies information and read all patches.
1665 If --stack is given, follow dependencies information and read all patches.
1666 It is equivalent to the ``:`` operator.
1666 It is equivalent to the ``:`` operator.
1667 """
1667 """
1668 opts = pycompat.byteskwargs(opts)
1668 opts = pycompat.byteskwargs(opts)
1669 if opts.get(b'stack'):
1669 if opts.get(b'stack'):
1670 spec = b':(%s)' % spec
1670 spec = b':(%s)' % spec
1671 drevs = querydrev(repo, spec)
1671 drevs = querydrev(repo.ui, spec)
1672 readpatch(repo.ui, drevs, ui.write)
1672 readpatch(repo.ui, drevs, ui.write)
1673
1673
1674
1674
1675 @vcrcommand(
1675 @vcrcommand(
1676 b'phabupdate',
1676 b'phabupdate',
1677 [
1677 [
1678 (b'', b'accept', False, _(b'accept revisions')),
1678 (b'', b'accept', False, _(b'accept revisions')),
1679 (b'', b'reject', False, _(b'reject revisions')),
1679 (b'', b'reject', False, _(b'reject revisions')),
1680 (b'', b'abandon', False, _(b'abandon revisions')),
1680 (b'', b'abandon', False, _(b'abandon revisions')),
1681 (b'', b'reclaim', False, _(b'reclaim revisions')),
1681 (b'', b'reclaim', False, _(b'reclaim revisions')),
1682 (b'm', b'comment', b'', _(b'comment on the last revision')),
1682 (b'm', b'comment', b'', _(b'comment on the last revision')),
1683 ],
1683 ],
1684 _(b'DREVSPEC [OPTIONS]'),
1684 _(b'DREVSPEC [OPTIONS]'),
1685 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1685 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1686 )
1686 )
1687 def phabupdate(ui, repo, spec, **opts):
1687 def phabupdate(ui, repo, spec, **opts):
1688 """update Differential Revision in batch
1688 """update Differential Revision in batch
1689
1689
1690 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1690 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1691 """
1691 """
1692 opts = pycompat.byteskwargs(opts)
1692 opts = pycompat.byteskwargs(opts)
1693 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1693 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1694 if len(flags) > 1:
1694 if len(flags) > 1:
1695 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1695 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1696
1696
1697 actions = []
1697 actions = []
1698 for f in flags:
1698 for f in flags:
1699 actions.append({b'type': f, b'value': True})
1699 actions.append({b'type': f, b'value': True})
1700
1700
1701 drevs = querydrev(repo, spec)
1701 drevs = querydrev(repo.ui, spec)
1702 for i, drev in enumerate(drevs):
1702 for i, drev in enumerate(drevs):
1703 if i + 1 == len(drevs) and opts.get(b'comment'):
1703 if i + 1 == len(drevs) and opts.get(b'comment'):
1704 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1704 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1705 if actions:
1705 if actions:
1706 params = {
1706 params = {
1707 b'objectIdentifier': drev[b'phid'],
1707 b'objectIdentifier': drev[b'phid'],
1708 b'transactions': actions,
1708 b'transactions': actions,
1709 }
1709 }
1710 callconduit(ui, b'differential.revision.edit', params)
1710 callconduit(ui, b'differential.revision.edit', params)
1711
1711
1712
1712
1713 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1713 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1714 def template_review(context, mapping):
1714 def template_review(context, mapping):
1715 """:phabreview: Object describing the review for this changeset.
1715 """:phabreview: Object describing the review for this changeset.
1716 Has attributes `url` and `id`.
1716 Has attributes `url` and `id`.
1717 """
1717 """
1718 ctx = context.resource(mapping, b'ctx')
1718 ctx = context.resource(mapping, b'ctx')
1719 m = _differentialrevisiondescre.search(ctx.description())
1719 m = _differentialrevisiondescre.search(ctx.description())
1720 if m:
1720 if m:
1721 return templateutil.hybriddict(
1721 return templateutil.hybriddict(
1722 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1722 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1723 )
1723 )
1724 else:
1724 else:
1725 tags = ctx.repo().nodetags(ctx.node())
1725 tags = ctx.repo().nodetags(ctx.node())
1726 for t in tags:
1726 for t in tags:
1727 if _differentialrevisiontagre.match(t):
1727 if _differentialrevisiontagre.match(t):
1728 url = ctx.repo().ui.config(b'phabricator', b'url')
1728 url = ctx.repo().ui.config(b'phabricator', b'url')
1729 if not url.endswith(b'/'):
1729 if not url.endswith(b'/'):
1730 url += b'/'
1730 url += b'/'
1731 url += t
1731 url += t
1732
1732
1733 return templateutil.hybriddict({b'url': url, b'id': t,})
1733 return templateutil.hybriddict({b'url': url, b'id': t,})
1734 return None
1734 return None
1735
1735
1736
1736
1737 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1737 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1738 def template_status(context, mapping):
1738 def template_status(context, mapping):
1739 """:phabstatus: String. Status of Phabricator differential.
1739 """:phabstatus: String. Status of Phabricator differential.
1740 """
1740 """
1741 ctx = context.resource(mapping, b'ctx')
1741 ctx = context.resource(mapping, b'ctx')
1742 repo = context.resource(mapping, b'repo')
1742 repo = context.resource(mapping, b'repo')
1743 ui = context.resource(mapping, b'ui')
1743 ui = context.resource(mapping, b'ui')
1744
1744
1745 rev = ctx.rev()
1745 rev = ctx.rev()
1746 try:
1746 try:
1747 drevid = getdrevmap(repo, [rev])[rev]
1747 drevid = getdrevmap(repo, [rev])[rev]
1748 except KeyError:
1748 except KeyError:
1749 return None
1749 return None
1750 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1750 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1751 for drev in drevs:
1751 for drev in drevs:
1752 if int(drev[b'id']) == drevid:
1752 if int(drev[b'id']) == drevid:
1753 return templateutil.hybriddict(
1753 return templateutil.hybriddict(
1754 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1754 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1755 )
1755 )
1756 return None
1756 return None
1757
1757
1758
1758
1759 @show.showview(b'phabstatus', csettopic=b'work')
1759 @show.showview(b'phabstatus', csettopic=b'work')
1760 def phabstatusshowview(ui, repo, displayer):
1760 def phabstatusshowview(ui, repo, displayer):
1761 """Phabricator differiential status"""
1761 """Phabricator differiential status"""
1762 revs = repo.revs('sort(_underway(), topo)')
1762 revs = repo.revs('sort(_underway(), topo)')
1763 drevmap = getdrevmap(repo, revs)
1763 drevmap = getdrevmap(repo, revs)
1764 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1764 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1765 for rev, drevid in pycompat.iteritems(drevmap):
1765 for rev, drevid in pycompat.iteritems(drevmap):
1766 if drevid is not None:
1766 if drevid is not None:
1767 drevids.add(drevid)
1767 drevids.add(drevid)
1768 revsbydrevid.setdefault(drevid, set([])).add(rev)
1768 revsbydrevid.setdefault(drevid, set([])).add(rev)
1769 else:
1769 else:
1770 unknownrevs.append(rev)
1770 unknownrevs.append(rev)
1771
1771
1772 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1772 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1773 drevsbyrev = {}
1773 drevsbyrev = {}
1774 for drev in drevs:
1774 for drev in drevs:
1775 for rev in revsbydrevid[int(drev[b'id'])]:
1775 for rev in revsbydrevid[int(drev[b'id'])]:
1776 drevsbyrev[rev] = drev
1776 drevsbyrev[rev] = drev
1777
1777
1778 def phabstatus(ctx):
1778 def phabstatus(ctx):
1779 drev = drevsbyrev[ctx.rev()]
1779 drev = drevsbyrev[ctx.rev()]
1780 status = ui.label(
1780 status = ui.label(
1781 b'%(statusName)s' % drev,
1781 b'%(statusName)s' % drev,
1782 b'phabricator.status.%s' % _getstatusname(drev),
1782 b'phabricator.status.%s' % _getstatusname(drev),
1783 )
1783 )
1784 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1784 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1785
1785
1786 revs -= smartset.baseset(unknownrevs)
1786 revs -= smartset.baseset(unknownrevs)
1787 revdag = graphmod.dagwalker(repo, revs)
1787 revdag = graphmod.dagwalker(repo, revs)
1788
1788
1789 ui.setconfig(b'experimental', b'graphshorten', True)
1789 ui.setconfig(b'experimental', b'graphshorten', True)
1790 displayer._exthook = phabstatus
1790 displayer._exthook = phabstatus
1791 nodelen = show.longestshortest(repo, revs)
1791 nodelen = show.longestshortest(repo, revs)
1792 logcmdutil.displaygraph(
1792 logcmdutil.displaygraph(
1793 ui,
1793 ui,
1794 repo,
1794 repo,
1795 revdag,
1795 revdag,
1796 displayer,
1796 displayer,
1797 graphmod.asciiedges,
1797 graphmod.asciiedges,
1798 props={b'nodelen': nodelen},
1798 props={b'nodelen': nodelen},
1799 )
1799 )
General Comments 0
You need to be logged in to leave comments. Login now