##// END OF EJS Templates
phabricator: make `hg phabread` work outside of a repository...
Matt Harbison -
r44910:5e2d74e5 default
parent child Browse files
Show More
@@ -1,1815 +1,1816 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
261 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
262 return command(
262 return command(
263 name,
263 name,
264 fullflags,
264 fullflags,
265 spec,
265 spec,
266 helpcategory=helpcategory,
266 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
268 )(inner)
268 )(inner)
269
269
270 return decorate
270 return decorate
271
271
272
272
273 def urlencodenested(params):
273 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
275
275
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
279 """
280 flatparams = util.sortdict()
280 flatparams = util.sortdict()
281
281
282 def process(prefix, obj):
282 def process(prefix, obj):
283 if isinstance(obj, bool):
283 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
287 if items is None:
288 flatparams[prefix] = obj
288 flatparams[prefix] = obj
289 else:
289 else:
290 for k, v in items(obj):
290 for k, v in items(obj):
291 if prefix:
291 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
293 else:
293 else:
294 process(k, v)
294 process(k, v)
295
295
296 process(b'', params)
296 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
298
298
299
299
300 def readurltoken(ui):
300 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
302
302
303 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
305 """
305 """
306 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
307 if not url:
307 if not url:
308 raise error.Abort(
308 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
310 )
311
311
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
313 token = None
314
314
315 if res:
315 if res:
316 group, auth = res
316 group, auth = res
317
317
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
319
320 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
321
321
322 if not token:
322 if not token:
323 raise error.Abort(
323 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
325 )
326
326
327 return url, token
327 return url, token
328
328
329
329
330 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
335 params = params.copy()
336 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
337 b'token': token,
337 b'token': token,
338 }
338 }
339 rawdata = {
339 rawdata = {
340 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
341 b'output': b'json',
341 b'output': b'json',
342 b'__conduit__': 1,
342 b'__conduit__': 1,
343 }
343 }
344 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
346 if curlcmd:
347 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
349 )
350 sin.write(data)
350 sin.write(data)
351 sin.close()
351 sin.close()
352 body = sout.read()
352 body = sout.read()
353 else:
353 else:
354 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
357 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
362 else x,
362 else x,
363 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
365 )
366 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
368 parsed[b'error_code'],
369 parsed[b'error_info'],
369 parsed[b'error_info'],
370 )
370 )
371 raise error.Abort(msg)
371 raise error.Abort(msg)
372 return parsed[b'result']
372 return parsed[b'result']
373
373
374
374
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
377 """call Conduit API
377 """call Conduit API
378
378
379 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
380 to stdout as a JSON blob.
381 """
381 """
382 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
385 params = pycompat.rapply(
385 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
388 else x,
388 else x,
389 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
390 )
390 )
391 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
392 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
394 callconduit(ui, name, params),
395 )
395 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
398
398
399
399
400 def getrepophid(repo):
400 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
404 if repophid:
405 return repophid
405 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
407 if not callsign:
408 return None
408 return None
409 query = callconduit(
409 query = callconduit(
410 repo.ui,
410 repo.ui,
411 b'diffusion.repository.search',
411 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
413 )
413 )
414 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
415 return None
415 return None
416 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
418 return repophid
419
419
420
420
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
424 )
425
425
426
426
427 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
429
429
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
433 be ``None``.
434
434
435 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
436 association information.
436 association information.
437
437
438 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
441
441
442 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
444 """
444 """
445 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
447
447
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
450 for node in nodelist:
451 ctx = unfi[node]
451 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
454 for n in precnodes:
455 if has_node(n):
455 if has_node(n):
456 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
458 if m:
458 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
460 break
461 else:
461 else:
462 continue # move to next predecessor
462 continue # move to next predecessor
463 break # found a tag, stop
463 break # found a tag, stop
464 else:
464 else:
465 # Check commit message
465 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
467 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
469
470 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
472 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
474 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
476 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
479 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
481 ]
482
482
483 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
485
485
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
490 tags.tag(
490 tags.tag(
491 repo,
491 repo,
492 tagname,
492 tagname,
493 nullid,
493 nullid,
494 message=None,
494 message=None,
495 user=None,
495 user=None,
496 date=None,
496 date=None,
497 local=True,
497 local=True,
498 )
498 )
499 unfi.ui.warn(
499 unfi.ui.warn(
500 _(
500 _(
501 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
502 b'Differential history\n'
503 )
503 )
504 % drev
504 % drev
505 )
505 )
506 continue
506 continue
507
507
508 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
509 # exists in the repo
510 oldnode = lastdiff = None
510 oldnode = lastdiff = None
511 if diffs:
511 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
515 oldnode = None
515 oldnode = None
516
516
517 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
518
518
519 return result
519 return result
520
520
521
521
522 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
524 ID or None.
525 """
525 """
526 result = {}
526 result = {}
527 for rev in revs:
527 for rev in revs:
528 result[rev] = None
528 result[rev] = None
529 ctx = repo[rev]
529 ctx = repo[rev]
530 # Check commit message
530 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
532 if m:
533 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
534 continue
534 continue
535 # Check tags
535 # Check tags
536 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
538 if m:
538 if m:
539 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
540 break
540 break
541
541
542 return result
542 return result
543
543
544
544
545 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
547 output = util.stringio()
548 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
550 ):
551 output.write(chunk)
551 output.write(chunk)
552 return output.getvalue()
552 return output.getvalue()
553
553
554
554
555 class DiffChangeType(object):
555 class DiffChangeType(object):
556 ADD = 1
556 ADD = 1
557 CHANGE = 2
557 CHANGE = 2
558 DELETE = 3
558 DELETE = 3
559 MOVE_AWAY = 4
559 MOVE_AWAY = 4
560 COPY_AWAY = 5
560 COPY_AWAY = 5
561 MOVE_HERE = 6
561 MOVE_HERE = 6
562 COPY_HERE = 7
562 COPY_HERE = 7
563 MULTICOPY = 8
563 MULTICOPY = 8
564
564
565
565
566 class DiffFileType(object):
566 class DiffFileType(object):
567 TEXT = 1
567 TEXT = 1
568 IMAGE = 2
568 IMAGE = 2
569 BINARY = 3
569 BINARY = 3
570
570
571
571
572 @attr.s
572 @attr.s
573 class phabhunk(dict):
573 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
575 """
575 """
576
576
577 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
585
585
586
586
587 @attr.s
587 @attr.s
588 class phabchange(object):
588 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
591 """
591 """
592
592
593 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
605
605
606 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
610
610
611 def addoldmode(self, value):
611 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
613
613
614 def addnewmode(self, value):
614 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
616
616
617 def addhunk(self, hunk):
617 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
626
626
627
627
628 @attr.s
628 @attr.s
629 class phabdiff(object):
629 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
631 to a commit.
632 """
632 """
633
633
634 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
647
647
648 def addchange(self, change):
648 def addchange(self, change):
649 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
652 attr.asdict(change)
653 )
653 )
654
654
655
655
656 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
658 repo = ctx.repo()
658 repo = ctx.repo()
659 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
663 )
664
664
665 for fhunk in fhunks:
665 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
668 shunk = list(header)
668 shunk = list(header)
669 shunk.extend(lines)
669 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
672 )
672 )
673 pchange.addhunk(
673 pchange.addhunk(
674 phabhunk(
674 phabhunk(
675 oldOffset,
675 oldOffset,
676 oldLength,
676 oldLength,
677 newOffset,
677 newOffset,
678 newLength,
678 newLength,
679 corpus,
679 corpus,
680 addLines,
680 addLines,
681 delLines,
681 delLines,
682 )
682 )
683 )
683 )
684
684
685
685
686 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
689 """
690 ui = fctx.repo().ui
690 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
692 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
694 ) as progress:
695 for chunk in chunks:
695 for chunk in chunks:
696 progress.increment()
696 progress.increment()
697 if chunk[b'complete']:
697 if chunk[b'complete']:
698 continue
698 continue
699 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
701 callconduit(
701 callconduit(
702 ui,
702 ui,
703 b'file.uploadchunk',
703 b'file.uploadchunk',
704 {
704 {
705 b'filePHID': fphid,
705 b'filePHID': fphid,
706 b'byteStart': bstart,
706 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
709 },
709 },
710 )
710 )
711
711
712
712
713 def uploadfile(fctx):
713 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
715 repo = fctx.repo()
715 repo = fctx.repo()
716 ui = repo.ui
716 ui = repo.ui
717 fname = fctx.path()
717 fname = fctx.path()
718 size = fctx.size()
718 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
720
721 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
723 allocateparams = {
724 b'name': fname,
724 b'name': fname,
725 b'contentLength': size,
725 b'contentLength': size,
726 b'contentHash': fhash,
726 b'contentHash': fhash,
727 }
727 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
730
730
731 if filealloc[b'upload']:
731 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
733 if not fphid:
734 uploadparams = {
734 uploadparams = {
735 b'name': fname,
735 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
737 }
737 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
739 else:
740 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
741 else:
741 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
743
743
744 if not fphid:
744 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
746
747 return fphid
747 return fphid
748
748
749
749
750 def addoldbinary(pchange, fctx):
750 def addoldbinary(pchange, fctx):
751 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
752 phabchange for the new version
753 """
753 """
754 oldfctx = fctx.p1()
754 oldfctx = fctx.p1()
755 if fctx.cmp(oldfctx):
755 if fctx.cmp(oldfctx):
756 # Files differ, add the old one
756 # Files differ, add the old one
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 mimeguess, _enc = mimetypes.guess_type(
758 mimeguess, _enc = mimetypes.guess_type(
759 encoding.unifromlocal(oldfctx.path())
759 encoding.unifromlocal(oldfctx.path())
760 )
760 )
761 if mimeguess:
761 if mimeguess:
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 mimeguess
763 mimeguess
764 )
764 )
765 fphid = uploadfile(oldfctx)
765 fphid = uploadfile(oldfctx)
766 pchange.metadata[b'old:binary-phid'] = fphid
766 pchange.metadata[b'old:binary-phid'] = fphid
767 else:
767 else:
768 # If it's left as IMAGE/BINARY web UI might try to display it
768 # If it's left as IMAGE/BINARY web UI might try to display it
769 pchange.fileType = DiffFileType.TEXT
769 pchange.fileType = DiffFileType.TEXT
770 pchange.copynewmetadatatoold()
770 pchange.copynewmetadatatoold()
771
771
772
772
773 def makebinary(pchange, fctx):
773 def makebinary(pchange, fctx):
774 """populate the phabchange for a binary file"""
774 """populate the phabchange for a binary file"""
775 pchange.fileType = DiffFileType.BINARY
775 pchange.fileType = DiffFileType.BINARY
776 fphid = uploadfile(fctx)
776 fphid = uploadfile(fctx)
777 pchange.metadata[b'new:binary-phid'] = fphid
777 pchange.metadata[b'new:binary-phid'] = fphid
778 pchange.metadata[b'new:file:size'] = fctx.size()
778 pchange.metadata[b'new:file:size'] = fctx.size()
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 if mimeguess:
780 if mimeguess:
781 mimeguess = pycompat.bytestr(mimeguess)
781 mimeguess = pycompat.bytestr(mimeguess)
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 if mimeguess.startswith(b'image/'):
783 if mimeguess.startswith(b'image/'):
784 pchange.fileType = DiffFileType.IMAGE
784 pchange.fileType = DiffFileType.IMAGE
785
785
786
786
787 # Copied from mercurial/patch.py
787 # Copied from mercurial/patch.py
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789
789
790
790
791 def notutf8(fctx):
791 def notutf8(fctx):
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 as binary
793 as binary
794 """
794 """
795 try:
795 try:
796 fctx.data().decode('utf-8')
796 fctx.data().decode('utf-8')
797 if fctx.parents():
797 if fctx.parents():
798 fctx.p1().data().decode('utf-8')
798 fctx.p1().data().decode('utf-8')
799 return False
799 return False
800 except UnicodeDecodeError:
800 except UnicodeDecodeError:
801 fctx.repo().ui.write(
801 fctx.repo().ui.write(
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 % fctx.path()
803 % fctx.path()
804 )
804 )
805 return True
805 return True
806
806
807
807
808 def addremoved(pdiff, ctx, removed):
808 def addremoved(pdiff, ctx, removed):
809 """add removed files to the phabdiff. Shouldn't include moves"""
809 """add removed files to the phabdiff. Shouldn't include moves"""
810 for fname in removed:
810 for fname in removed:
811 pchange = phabchange(
811 pchange = phabchange(
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 )
813 )
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 fctx = ctx.p1()[fname]
815 fctx = ctx.p1()[fname]
816 if not (fctx.isbinary() or notutf8(fctx)):
816 if not (fctx.isbinary() or notutf8(fctx)):
817 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
818
818
819 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
820
820
821
821
822 def addmodified(pdiff, ctx, modified):
822 def addmodified(pdiff, ctx, modified):
823 """add modified files to the phabdiff"""
823 """add modified files to the phabdiff"""
824 for fname in modified:
824 for fname in modified:
825 fctx = ctx[fname]
825 fctx = ctx[fname]
826 pchange = phabchange(currentPath=fname, oldPath=fname)
826 pchange = phabchange(currentPath=fname, oldPath=fname)
827 filemode = gitmode[ctx[fname].flags()]
827 filemode = gitmode[ctx[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
829 if filemode != originalmode:
829 if filemode != originalmode:
830 pchange.addoldmode(originalmode)
830 pchange.addoldmode(originalmode)
831 pchange.addnewmode(filemode)
831 pchange.addnewmode(filemode)
832
832
833 if fctx.isbinary() or notutf8(fctx):
833 if fctx.isbinary() or notutf8(fctx):
834 makebinary(pchange, fctx)
834 makebinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
836 else:
836 else:
837 maketext(pchange, ctx, fname)
837 maketext(pchange, ctx, fname)
838
838
839 pdiff.addchange(pchange)
839 pdiff.addchange(pchange)
840
840
841
841
842 def addadded(pdiff, ctx, added, removed):
842 def addadded(pdiff, ctx, added, removed):
843 """add file adds to the phabdiff, both new files and copies/moves"""
843 """add file adds to the phabdiff, both new files and copies/moves"""
844 # Keep track of files that've been recorded as moved/copied, so if there are
844 # Keep track of files that've been recorded as moved/copied, so if there are
845 # additional copies we can mark them (moves get removed from removed)
845 # additional copies we can mark them (moves get removed from removed)
846 copiedchanges = {}
846 copiedchanges = {}
847 movedchanges = {}
847 movedchanges = {}
848 for fname in added:
848 for fname in added:
849 fctx = ctx[fname]
849 fctx = ctx[fname]
850 pchange = phabchange(currentPath=fname)
850 pchange = phabchange(currentPath=fname)
851
851
852 filemode = gitmode[ctx[fname].flags()]
852 filemode = gitmode[ctx[fname].flags()]
853 renamed = fctx.renamed()
853 renamed = fctx.renamed()
854
854
855 if renamed:
855 if renamed:
856 originalfname = renamed[0]
856 originalfname = renamed[0]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 pchange.oldPath = originalfname
858 pchange.oldPath = originalfname
859
859
860 if originalfname in removed:
860 if originalfname in removed:
861 origpchange = phabchange(
861 origpchange = phabchange(
862 currentPath=originalfname,
862 currentPath=originalfname,
863 oldPath=originalfname,
863 oldPath=originalfname,
864 type=DiffChangeType.MOVE_AWAY,
864 type=DiffChangeType.MOVE_AWAY,
865 awayPaths=[fname],
865 awayPaths=[fname],
866 )
866 )
867 movedchanges[originalfname] = origpchange
867 movedchanges[originalfname] = origpchange
868 removed.remove(originalfname)
868 removed.remove(originalfname)
869 pchange.type = DiffChangeType.MOVE_HERE
869 pchange.type = DiffChangeType.MOVE_HERE
870 elif originalfname in movedchanges:
870 elif originalfname in movedchanges:
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 movedchanges[originalfname].awayPaths.append(fname)
872 movedchanges[originalfname].awayPaths.append(fname)
873 pchange.type = DiffChangeType.COPY_HERE
873 pchange.type = DiffChangeType.COPY_HERE
874 else: # pure copy
874 else: # pure copy
875 if originalfname not in copiedchanges:
875 if originalfname not in copiedchanges:
876 origpchange = phabchange(
876 origpchange = phabchange(
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 )
878 )
879 copiedchanges[originalfname] = origpchange
879 copiedchanges[originalfname] = origpchange
880 else:
880 else:
881 origpchange = copiedchanges[originalfname]
881 origpchange = copiedchanges[originalfname]
882 origpchange.awayPaths.append(fname)
882 origpchange.awayPaths.append(fname)
883 pchange.type = DiffChangeType.COPY_HERE
883 pchange.type = DiffChangeType.COPY_HERE
884
884
885 if filemode != originalmode:
885 if filemode != originalmode:
886 pchange.addoldmode(originalmode)
886 pchange.addoldmode(originalmode)
887 pchange.addnewmode(filemode)
887 pchange.addnewmode(filemode)
888 else: # Brand-new file
888 else: # Brand-new file
889 pchange.addnewmode(gitmode[fctx.flags()])
889 pchange.addnewmode(gitmode[fctx.flags()])
890 pchange.type = DiffChangeType.ADD
890 pchange.type = DiffChangeType.ADD
891
891
892 if fctx.isbinary() or notutf8(fctx):
892 if fctx.isbinary() or notutf8(fctx):
893 makebinary(pchange, fctx)
893 makebinary(pchange, fctx)
894 if renamed:
894 if renamed:
895 addoldbinary(pchange, fctx)
895 addoldbinary(pchange, fctx)
896 else:
896 else:
897 maketext(pchange, ctx, fname)
897 maketext(pchange, ctx, fname)
898
898
899 pdiff.addchange(pchange)
899 pdiff.addchange(pchange)
900
900
901 for _path, copiedchange in copiedchanges.items():
901 for _path, copiedchange in copiedchanges.items():
902 pdiff.addchange(copiedchange)
902 pdiff.addchange(copiedchange)
903 for _path, movedchange in movedchanges.items():
903 for _path, movedchange in movedchanges.items():
904 pdiff.addchange(movedchange)
904 pdiff.addchange(movedchange)
905
905
906
906
907 def creatediff(ctx):
907 def creatediff(ctx):
908 """create a Differential Diff"""
908 """create a Differential Diff"""
909 repo = ctx.repo()
909 repo = ctx.repo()
910 repophid = getrepophid(repo)
910 repophid = getrepophid(repo)
911 # Create a "Differential Diff" via "differential.creatediff" API
911 # Create a "Differential Diff" via "differential.creatediff" API
912 pdiff = phabdiff(
912 pdiff = phabdiff(
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 branch=b'%s' % ctx.branch(),
914 branch=b'%s' % ctx.branch(),
915 )
915 )
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 # addadded will remove moved files from removed, so addremoved won't get
917 # addadded will remove moved files from removed, so addremoved won't get
918 # them
918 # them
919 addadded(pdiff, ctx, added, removed)
919 addadded(pdiff, ctx, added, removed)
920 addmodified(pdiff, ctx, modified)
920 addmodified(pdiff, ctx, modified)
921 addremoved(pdiff, ctx, removed)
921 addremoved(pdiff, ctx, removed)
922 if repophid:
922 if repophid:
923 pdiff.repositoryPHID = repophid
923 pdiff.repositoryPHID = repophid
924 diff = callconduit(
924 diff = callconduit(
925 repo.ui,
925 repo.ui,
926 b'differential.creatediff',
926 b'differential.creatediff',
927 pycompat.byteskwargs(attr.asdict(pdiff)),
927 pycompat.byteskwargs(attr.asdict(pdiff)),
928 )
928 )
929 if not diff:
929 if not diff:
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 return diff
931 return diff
932
932
933
933
934 def writediffproperties(ctx, diff):
934 def writediffproperties(ctx, diff):
935 """write metadata to diff so patches could be applied losslessly"""
935 """write metadata to diff so patches could be applied losslessly"""
936 # creatediff returns with a diffid but query returns with an id
936 # creatediff returns with a diffid but query returns with an id
937 diffid = diff.get(b'diffid', diff.get(b'id'))
937 diffid = diff.get(b'diffid', diff.get(b'id'))
938 params = {
938 params = {
939 b'diff_id': diffid,
939 b'diff_id': diffid,
940 b'name': b'hg:meta',
940 b'name': b'hg:meta',
941 b'data': templatefilters.json(
941 b'data': templatefilters.json(
942 {
942 {
943 b'user': ctx.user(),
943 b'user': ctx.user(),
944 b'date': b'%d %d' % ctx.date(),
944 b'date': b'%d %d' % ctx.date(),
945 b'branch': ctx.branch(),
945 b'branch': ctx.branch(),
946 b'node': ctx.hex(),
946 b'node': ctx.hex(),
947 b'parent': ctx.p1().hex(),
947 b'parent': ctx.p1().hex(),
948 }
948 }
949 ),
949 ),
950 }
950 }
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952
952
953 params = {
953 params = {
954 b'diff_id': diffid,
954 b'diff_id': diffid,
955 b'name': b'local:commits',
955 b'name': b'local:commits',
956 b'data': templatefilters.json(
956 b'data': templatefilters.json(
957 {
957 {
958 ctx.hex(): {
958 ctx.hex(): {
959 b'author': stringutil.person(ctx.user()),
959 b'author': stringutil.person(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
961 b'time': int(ctx.date()[0]),
961 b'time': int(ctx.date()[0]),
962 b'commit': ctx.hex(),
962 b'commit': ctx.hex(),
963 b'parents': [ctx.p1().hex()],
963 b'parents': [ctx.p1().hex()],
964 b'branch': ctx.branch(),
964 b'branch': ctx.branch(),
965 },
965 },
966 }
966 }
967 ),
967 ),
968 }
968 }
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970
970
971
971
972 def createdifferentialrevision(
972 def createdifferentialrevision(
973 ctx,
973 ctx,
974 revid=None,
974 revid=None,
975 parentrevphid=None,
975 parentrevphid=None,
976 oldnode=None,
976 oldnode=None,
977 olddiff=None,
977 olddiff=None,
978 actions=None,
978 actions=None,
979 comment=None,
979 comment=None,
980 ):
980 ):
981 """create or update a Differential Revision
981 """create or update a Differential Revision
982
982
983 If revid is None, create a new Differential Revision, otherwise update
983 If revid is None, create a new Differential Revision, otherwise update
984 revid. If parentrevphid is not None, set it as a dependency.
984 revid. If parentrevphid is not None, set it as a dependency.
985
985
986 If oldnode is not None, check if the patch content (without commit message
986 If oldnode is not None, check if the patch content (without commit message
987 and metadata) has changed before creating another diff.
987 and metadata) has changed before creating another diff.
988
988
989 If actions is not None, they will be appended to the transaction.
989 If actions is not None, they will be appended to the transaction.
990 """
990 """
991 repo = ctx.repo()
991 repo = ctx.repo()
992 if oldnode:
992 if oldnode:
993 diffopts = mdiff.diffopts(git=True, context=32767)
993 diffopts = mdiff.diffopts(git=True, context=32767)
994 oldctx = repo.unfiltered()[oldnode]
994 oldctx = repo.unfiltered()[oldnode]
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 else:
996 else:
997 neednewdiff = True
997 neednewdiff = True
998
998
999 transactions = []
999 transactions = []
1000 if neednewdiff:
1000 if neednewdiff:
1001 diff = creatediff(ctx)
1001 diff = creatediff(ctx)
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 if comment:
1003 if comment:
1004 transactions.append({b'type': b'comment', b'value': comment})
1004 transactions.append({b'type': b'comment', b'value': comment})
1005 else:
1005 else:
1006 # Even if we don't need to upload a new diff because the patch content
1006 # Even if we don't need to upload a new diff because the patch content
1007 # does not change. We might still need to update its metadata so
1007 # does not change. We might still need to update its metadata so
1008 # pushers could know the correct node metadata.
1008 # pushers could know the correct node metadata.
1009 assert olddiff
1009 assert olddiff
1010 diff = olddiff
1010 diff = olddiff
1011 writediffproperties(ctx, diff)
1011 writediffproperties(ctx, diff)
1012
1012
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 if parentrevphid:
1014 if parentrevphid:
1015 transactions.append(
1015 transactions.append(
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 )
1017 )
1018
1018
1019 if actions:
1019 if actions:
1020 transactions += actions
1020 transactions += actions
1021
1021
1022 # Parse commit message and update related fields.
1022 # Parse commit message and update related fields.
1023 desc = ctx.description()
1023 desc = ctx.description()
1024 info = callconduit(
1024 info = callconduit(
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 )
1026 )
1027 for k, v in info[b'fields'].items():
1027 for k, v in info[b'fields'].items():
1028 if k in [b'title', b'summary', b'testPlan']:
1028 if k in [b'title', b'summary', b'testPlan']:
1029 transactions.append({b'type': k, b'value': v})
1029 transactions.append({b'type': k, b'value': v})
1030
1030
1031 params = {b'transactions': transactions}
1031 params = {b'transactions': transactions}
1032 if revid is not None:
1032 if revid is not None:
1033 # Update an existing Differential Revision
1033 # Update an existing Differential Revision
1034 params[b'objectIdentifier'] = revid
1034 params[b'objectIdentifier'] = revid
1035
1035
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 if not revision:
1037 if not revision:
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039
1039
1040 return revision, diff
1040 return revision, diff
1041
1041
1042
1042
1043 def userphids(ui, names):
1043 def userphids(ui, names):
1044 """convert user names to PHIDs"""
1044 """convert user names to PHIDs"""
1045 names = [name.lower() for name in names]
1045 names = [name.lower() for name in names]
1046 query = {b'constraints': {b'usernames': names}}
1046 query = {b'constraints': {b'usernames': names}}
1047 result = callconduit(ui, b'user.search', query)
1047 result = callconduit(ui, b'user.search', query)
1048 # username not found is not an error of the API. So check if we have missed
1048 # username not found is not an error of the API. So check if we have missed
1049 # some names here.
1049 # some names here.
1050 data = result[b'data']
1050 data = result[b'data']
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 unresolved = set(names) - resolved
1052 unresolved = set(names) - resolved
1053 if unresolved:
1053 if unresolved:
1054 raise error.Abort(
1054 raise error.Abort(
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 )
1056 )
1057 return [entry[b'phid'] for entry in data]
1057 return [entry[b'phid'] for entry in data]
1058
1058
1059
1059
1060 @vcrcommand(
1060 @vcrcommand(
1061 b'phabsend',
1061 b'phabsend',
1062 [
1062 [
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 (
1067 (
1068 b'm',
1068 b'm',
1069 b'comment',
1069 b'comment',
1070 b'',
1070 b'',
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1072 ),
1072 ),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 ],
1074 ],
1075 _(b'REV [OPTIONS]'),
1075 _(b'REV [OPTIONS]'),
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 )
1077 )
1078 def phabsend(ui, repo, *revs, **opts):
1078 def phabsend(ui, repo, *revs, **opts):
1079 """upload changesets to Phabricator
1079 """upload changesets to Phabricator
1080
1080
1081 If there are multiple revisions specified, they will be send as a stack
1081 If there are multiple revisions specified, they will be send as a stack
1082 with a linear dependencies relationship using the order specified by the
1082 with a linear dependencies relationship using the order specified by the
1083 revset.
1083 revset.
1084
1084
1085 For the first time uploading changesets, local tags will be created to
1085 For the first time uploading changesets, local tags will be created to
1086 maintain the association. After the first time, phabsend will check
1086 maintain the association. After the first time, phabsend will check
1087 obsstore and tags information so it can figure out whether to update an
1087 obsstore and tags information so it can figure out whether to update an
1088 existing Differential Revision, or create a new one.
1088 existing Differential Revision, or create a new one.
1089
1089
1090 If --amend is set, update commit messages so they have the
1090 If --amend is set, update commit messages so they have the
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 use local tags to record the ``Differential Revision`` association.
1093 use local tags to record the ``Differential Revision`` association.
1094
1094
1095 The --confirm option lets you confirm changesets before sending them. You
1095 The --confirm option lets you confirm changesets before sending them. You
1096 can also add following to your configuration file to make it default
1096 can also add following to your configuration file to make it default
1097 behaviour::
1097 behaviour::
1098
1098
1099 [phabsend]
1099 [phabsend]
1100 confirm = true
1100 confirm = true
1101
1101
1102 phabsend will check obsstore and the above association to decide whether to
1102 phabsend will check obsstore and the above association to decide whether to
1103 update an existing Differential Revision, or create a new one.
1103 update an existing Differential Revision, or create a new one.
1104 """
1104 """
1105 opts = pycompat.byteskwargs(opts)
1105 opts = pycompat.byteskwargs(opts)
1106 revs = list(revs) + opts.get(b'rev', [])
1106 revs = list(revs) + opts.get(b'rev', [])
1107 revs = scmutil.revrange(repo, revs)
1107 revs = scmutil.revrange(repo, revs)
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1109
1109
1110 if not revs:
1110 if not revs:
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 if opts.get(b'amend'):
1112 if opts.get(b'amend'):
1113 cmdutil.checkunfinished(repo)
1113 cmdutil.checkunfinished(repo)
1114
1114
1115 # {newnode: (oldnode, olddiff, olddrev}
1115 # {newnode: (oldnode, olddiff, olddrev}
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117
1117
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1119 confirm |= bool(opts.get(b'confirm'))
1119 confirm |= bool(opts.get(b'confirm'))
1120 if confirm:
1120 if confirm:
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 if not confirmed:
1122 if not confirmed:
1123 raise error.Abort(_(b'phabsend cancelled'))
1123 raise error.Abort(_(b'phabsend cancelled'))
1124
1124
1125 actions = []
1125 actions = []
1126 reviewers = opts.get(b'reviewer', [])
1126 reviewers = opts.get(b'reviewer', [])
1127 blockers = opts.get(b'blocker', [])
1127 blockers = opts.get(b'blocker', [])
1128 phids = []
1128 phids = []
1129 if reviewers:
1129 if reviewers:
1130 phids.extend(userphids(repo.ui, reviewers))
1130 phids.extend(userphids(repo.ui, reviewers))
1131 if blockers:
1131 if blockers:
1132 phids.extend(
1132 phids.extend(
1133 map(
1133 map(
1134 lambda phid: b'blocking(%s)' % phid,
1134 lambda phid: b'blocking(%s)' % phid,
1135 userphids(repo.ui, blockers),
1135 userphids(repo.ui, blockers),
1136 )
1136 )
1137 )
1137 )
1138 if phids:
1138 if phids:
1139 actions.append({b'type': b'reviewers.add', b'value': phids})
1139 actions.append({b'type': b'reviewers.add', b'value': phids})
1140
1140
1141 drevids = [] # [int]
1141 drevids = [] # [int]
1142 diffmap = {} # {newnode: diff}
1142 diffmap = {} # {newnode: diff}
1143
1143
1144 # Send patches one by one so we know their Differential Revision PHIDs and
1144 # Send patches one by one so we know their Differential Revision PHIDs and
1145 # can provide dependency relationship
1145 # can provide dependency relationship
1146 lastrevphid = None
1146 lastrevphid = None
1147 for rev in revs:
1147 for rev in revs:
1148 ui.debug(b'sending rev %d\n' % rev)
1148 ui.debug(b'sending rev %d\n' % rev)
1149 ctx = repo[rev]
1149 ctx = repo[rev]
1150
1150
1151 # Get Differential Revision ID
1151 # Get Differential Revision ID
1152 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1152 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1153 if oldnode != ctx.node() or opts.get(b'amend'):
1153 if oldnode != ctx.node() or opts.get(b'amend'):
1154 # Create or update Differential Revision
1154 # Create or update Differential Revision
1155 revision, diff = createdifferentialrevision(
1155 revision, diff = createdifferentialrevision(
1156 ctx,
1156 ctx,
1157 revid,
1157 revid,
1158 lastrevphid,
1158 lastrevphid,
1159 oldnode,
1159 oldnode,
1160 olddiff,
1160 olddiff,
1161 actions,
1161 actions,
1162 opts.get(b'comment'),
1162 opts.get(b'comment'),
1163 )
1163 )
1164 diffmap[ctx.node()] = diff
1164 diffmap[ctx.node()] = diff
1165 newrevid = int(revision[b'object'][b'id'])
1165 newrevid = int(revision[b'object'][b'id'])
1166 newrevphid = revision[b'object'][b'phid']
1166 newrevphid = revision[b'object'][b'phid']
1167 if revid:
1167 if revid:
1168 action = b'updated'
1168 action = b'updated'
1169 else:
1169 else:
1170 action = b'created'
1170 action = b'created'
1171
1171
1172 # Create a local tag to note the association, if commit message
1172 # Create a local tag to note the association, if commit message
1173 # does not have it already
1173 # does not have it already
1174 m = _differentialrevisiondescre.search(ctx.description())
1174 m = _differentialrevisiondescre.search(ctx.description())
1175 if not m or int(m.group('id')) != newrevid:
1175 if not m or int(m.group('id')) != newrevid:
1176 tagname = b'D%d' % newrevid
1176 tagname = b'D%d' % newrevid
1177 tags.tag(
1177 tags.tag(
1178 repo,
1178 repo,
1179 tagname,
1179 tagname,
1180 ctx.node(),
1180 ctx.node(),
1181 message=None,
1181 message=None,
1182 user=None,
1182 user=None,
1183 date=None,
1183 date=None,
1184 local=True,
1184 local=True,
1185 )
1185 )
1186 else:
1186 else:
1187 # Nothing changed. But still set "newrevphid" so the next revision
1187 # Nothing changed. But still set "newrevphid" so the next revision
1188 # could depend on this one and "newrevid" for the summary line.
1188 # could depend on this one and "newrevid" for the summary line.
1189 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1189 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1190 newrevid = revid
1190 newrevid = revid
1191 action = b'skipped'
1191 action = b'skipped'
1192
1192
1193 actiondesc = ui.label(
1193 actiondesc = ui.label(
1194 {
1194 {
1195 b'created': _(b'created'),
1195 b'created': _(b'created'),
1196 b'skipped': _(b'skipped'),
1196 b'skipped': _(b'skipped'),
1197 b'updated': _(b'updated'),
1197 b'updated': _(b'updated'),
1198 }[action],
1198 }[action],
1199 b'phabricator.action.%s' % action,
1199 b'phabricator.action.%s' % action,
1200 )
1200 )
1201 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1201 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1202 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1202 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1203 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1203 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1204 ui.write(
1204 ui.write(
1205 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1205 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1206 )
1206 )
1207 drevids.append(newrevid)
1207 drevids.append(newrevid)
1208 lastrevphid = newrevphid
1208 lastrevphid = newrevphid
1209
1209
1210 # Update commit messages and remove tags
1210 # Update commit messages and remove tags
1211 if opts.get(b'amend'):
1211 if opts.get(b'amend'):
1212 unfi = repo.unfiltered()
1212 unfi = repo.unfiltered()
1213 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1213 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1214 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1214 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1215 wnode = unfi[b'.'].node()
1215 wnode = unfi[b'.'].node()
1216 mapping = {} # {oldnode: [newnode]}
1216 mapping = {} # {oldnode: [newnode]}
1217 for i, rev in enumerate(revs):
1217 for i, rev in enumerate(revs):
1218 old = unfi[rev]
1218 old = unfi[rev]
1219 drevid = drevids[i]
1219 drevid = drevids[i]
1220 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1220 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1221 newdesc = getdescfromdrev(drev)
1221 newdesc = getdescfromdrev(drev)
1222 # Make sure commit message contain "Differential Revision"
1222 # Make sure commit message contain "Differential Revision"
1223 if old.description() != newdesc:
1223 if old.description() != newdesc:
1224 if old.phase() == phases.public:
1224 if old.phase() == phases.public:
1225 ui.warn(
1225 ui.warn(
1226 _(b"warning: not updating public commit %s\n")
1226 _(b"warning: not updating public commit %s\n")
1227 % scmutil.formatchangeid(old)
1227 % scmutil.formatchangeid(old)
1228 )
1228 )
1229 continue
1229 continue
1230 parents = [
1230 parents = [
1231 mapping.get(old.p1().node(), (old.p1(),))[0],
1231 mapping.get(old.p1().node(), (old.p1(),))[0],
1232 mapping.get(old.p2().node(), (old.p2(),))[0],
1232 mapping.get(old.p2().node(), (old.p2(),))[0],
1233 ]
1233 ]
1234 new = context.metadataonlyctx(
1234 new = context.metadataonlyctx(
1235 repo,
1235 repo,
1236 old,
1236 old,
1237 parents=parents,
1237 parents=parents,
1238 text=newdesc,
1238 text=newdesc,
1239 user=old.user(),
1239 user=old.user(),
1240 date=old.date(),
1240 date=old.date(),
1241 extra=old.extra(),
1241 extra=old.extra(),
1242 )
1242 )
1243
1243
1244 newnode = new.commit()
1244 newnode = new.commit()
1245
1245
1246 mapping[old.node()] = [newnode]
1246 mapping[old.node()] = [newnode]
1247 # Update diff property
1247 # Update diff property
1248 # If it fails just warn and keep going, otherwise the DREV
1248 # If it fails just warn and keep going, otherwise the DREV
1249 # associations will be lost
1249 # associations will be lost
1250 try:
1250 try:
1251 writediffproperties(unfi[newnode], diffmap[old.node()])
1251 writediffproperties(unfi[newnode], diffmap[old.node()])
1252 except util.urlerr.urlerror:
1252 except util.urlerr.urlerror:
1253 ui.warnnoi18n(
1253 ui.warnnoi18n(
1254 b'Failed to update metadata for D%d\n' % drevid
1254 b'Failed to update metadata for D%d\n' % drevid
1255 )
1255 )
1256 # Remove local tags since it's no longer necessary
1256 # Remove local tags since it's no longer necessary
1257 tagname = b'D%d' % drevid
1257 tagname = b'D%d' % drevid
1258 if tagname in repo.tags():
1258 if tagname in repo.tags():
1259 tags.tag(
1259 tags.tag(
1260 repo,
1260 repo,
1261 tagname,
1261 tagname,
1262 nullid,
1262 nullid,
1263 message=None,
1263 message=None,
1264 user=None,
1264 user=None,
1265 date=None,
1265 date=None,
1266 local=True,
1266 local=True,
1267 )
1267 )
1268 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1268 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1269 if wnode in mapping:
1269 if wnode in mapping:
1270 unfi.setparents(mapping[wnode][0])
1270 unfi.setparents(mapping[wnode][0])
1271
1271
1272
1272
1273 # Map from "hg:meta" keys to header understood by "hg import". The order is
1273 # Map from "hg:meta" keys to header understood by "hg import". The order is
1274 # consistent with "hg export" output.
1274 # consistent with "hg export" output.
1275 _metanamemap = util.sortdict(
1275 _metanamemap = util.sortdict(
1276 [
1276 [
1277 (b'user', b'User'),
1277 (b'user', b'User'),
1278 (b'date', b'Date'),
1278 (b'date', b'Date'),
1279 (b'branch', b'Branch'),
1279 (b'branch', b'Branch'),
1280 (b'node', b'Node ID'),
1280 (b'node', b'Node ID'),
1281 (b'parent', b'Parent '),
1281 (b'parent', b'Parent '),
1282 ]
1282 ]
1283 )
1283 )
1284
1284
1285
1285
1286 def _confirmbeforesend(repo, revs, oldmap):
1286 def _confirmbeforesend(repo, revs, oldmap):
1287 url, token = readurltoken(repo.ui)
1287 url, token = readurltoken(repo.ui)
1288 ui = repo.ui
1288 ui = repo.ui
1289 for rev in revs:
1289 for rev in revs:
1290 ctx = repo[rev]
1290 ctx = repo[rev]
1291 desc = ctx.description().splitlines()[0]
1291 desc = ctx.description().splitlines()[0]
1292 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1292 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1293 if drevid:
1293 if drevid:
1294 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1294 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1295 else:
1295 else:
1296 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1296 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1297
1297
1298 ui.write(
1298 ui.write(
1299 _(b'%s - %s: %s\n')
1299 _(b'%s - %s: %s\n')
1300 % (
1300 % (
1301 drevdesc,
1301 drevdesc,
1302 ui.label(bytes(ctx), b'phabricator.node'),
1302 ui.label(bytes(ctx), b'phabricator.node'),
1303 ui.label(desc, b'phabricator.desc'),
1303 ui.label(desc, b'phabricator.desc'),
1304 )
1304 )
1305 )
1305 )
1306
1306
1307 if ui.promptchoice(
1307 if ui.promptchoice(
1308 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1308 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1309 ):
1309 ):
1310 return False
1310 return False
1311
1311
1312 return True
1312 return True
1313
1313
1314
1314
1315 _knownstatusnames = {
1315 _knownstatusnames = {
1316 b'accepted',
1316 b'accepted',
1317 b'needsreview',
1317 b'needsreview',
1318 b'needsrevision',
1318 b'needsrevision',
1319 b'closed',
1319 b'closed',
1320 b'abandoned',
1320 b'abandoned',
1321 b'changesplanned',
1321 b'changesplanned',
1322 }
1322 }
1323
1323
1324
1324
1325 def _getstatusname(drev):
1325 def _getstatusname(drev):
1326 """get normalized status name from a Differential Revision"""
1326 """get normalized status name from a Differential Revision"""
1327 return drev[b'statusName'].replace(b' ', b'').lower()
1327 return drev[b'statusName'].replace(b' ', b'').lower()
1328
1328
1329
1329
1330 # Small language to specify differential revisions. Support symbols: (), :X,
1330 # Small language to specify differential revisions. Support symbols: (), :X,
1331 # +, and -.
1331 # +, and -.
1332
1332
1333 _elements = {
1333 _elements = {
1334 # token-type: binding-strength, primary, prefix, infix, suffix
1334 # token-type: binding-strength, primary, prefix, infix, suffix
1335 b'(': (12, None, (b'group', 1, b')'), None, None),
1335 b'(': (12, None, (b'group', 1, b')'), None, None),
1336 b':': (8, None, (b'ancestors', 8), None, None),
1336 b':': (8, None, (b'ancestors', 8), None, None),
1337 b'&': (5, None, None, (b'and_', 5), None),
1337 b'&': (5, None, None, (b'and_', 5), None),
1338 b'+': (4, None, None, (b'add', 4), None),
1338 b'+': (4, None, None, (b'add', 4), None),
1339 b'-': (4, None, None, (b'sub', 4), None),
1339 b'-': (4, None, None, (b'sub', 4), None),
1340 b')': (0, None, None, None, None),
1340 b')': (0, None, None, None, None),
1341 b'symbol': (0, b'symbol', None, None, None),
1341 b'symbol': (0, b'symbol', None, None, None),
1342 b'end': (0, None, None, None, None),
1342 b'end': (0, None, None, None, None),
1343 }
1343 }
1344
1344
1345
1345
1346 def _tokenize(text):
1346 def _tokenize(text):
1347 view = memoryview(text) # zero-copy slice
1347 view = memoryview(text) # zero-copy slice
1348 special = b'():+-& '
1348 special = b'():+-& '
1349 pos = 0
1349 pos = 0
1350 length = len(text)
1350 length = len(text)
1351 while pos < length:
1351 while pos < length:
1352 symbol = b''.join(
1352 symbol = b''.join(
1353 itertools.takewhile(
1353 itertools.takewhile(
1354 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1354 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1355 )
1355 )
1356 )
1356 )
1357 if symbol:
1357 if symbol:
1358 yield (b'symbol', symbol, pos)
1358 yield (b'symbol', symbol, pos)
1359 pos += len(symbol)
1359 pos += len(symbol)
1360 else: # special char, ignore space
1360 else: # special char, ignore space
1361 if text[pos : pos + 1] != b' ':
1361 if text[pos : pos + 1] != b' ':
1362 yield (text[pos : pos + 1], None, pos)
1362 yield (text[pos : pos + 1], None, pos)
1363 pos += 1
1363 pos += 1
1364 yield (b'end', None, pos)
1364 yield (b'end', None, pos)
1365
1365
1366
1366
1367 def _parse(text):
1367 def _parse(text):
1368 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1368 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1369 if pos != len(text):
1369 if pos != len(text):
1370 raise error.ParseError(b'invalid token', pos)
1370 raise error.ParseError(b'invalid token', pos)
1371 return tree
1371 return tree
1372
1372
1373
1373
1374 def _parsedrev(symbol):
1374 def _parsedrev(symbol):
1375 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1375 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1376 if symbol.startswith(b'D') and symbol[1:].isdigit():
1376 if symbol.startswith(b'D') and symbol[1:].isdigit():
1377 return int(symbol[1:])
1377 return int(symbol[1:])
1378 if symbol.isdigit():
1378 if symbol.isdigit():
1379 return int(symbol)
1379 return int(symbol)
1380
1380
1381
1381
1382 def _prefetchdrevs(tree):
1382 def _prefetchdrevs(tree):
1383 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1383 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1384 drevs = set()
1384 drevs = set()
1385 ancestordrevs = set()
1385 ancestordrevs = set()
1386 op = tree[0]
1386 op = tree[0]
1387 if op == b'symbol':
1387 if op == b'symbol':
1388 r = _parsedrev(tree[1])
1388 r = _parsedrev(tree[1])
1389 if r:
1389 if r:
1390 drevs.add(r)
1390 drevs.add(r)
1391 elif op == b'ancestors':
1391 elif op == b'ancestors':
1392 r, a = _prefetchdrevs(tree[1])
1392 r, a = _prefetchdrevs(tree[1])
1393 drevs.update(r)
1393 drevs.update(r)
1394 ancestordrevs.update(r)
1394 ancestordrevs.update(r)
1395 ancestordrevs.update(a)
1395 ancestordrevs.update(a)
1396 else:
1396 else:
1397 for t in tree[1:]:
1397 for t in tree[1:]:
1398 r, a = _prefetchdrevs(t)
1398 r, a = _prefetchdrevs(t)
1399 drevs.update(r)
1399 drevs.update(r)
1400 ancestordrevs.update(a)
1400 ancestordrevs.update(a)
1401 return drevs, ancestordrevs
1401 return drevs, ancestordrevs
1402
1402
1403
1403
1404 def querydrev(ui, spec):
1404 def querydrev(ui, spec):
1405 """return a list of "Differential Revision" dicts
1405 """return a list of "Differential Revision" dicts
1406
1406
1407 spec is a string using a simple query language, see docstring in phabread
1407 spec is a string using a simple query language, see docstring in phabread
1408 for details.
1408 for details.
1409
1409
1410 A "Differential Revision dict" looks like:
1410 A "Differential Revision dict" looks like:
1411
1411
1412 {
1412 {
1413 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1413 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1414 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1414 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1415 "auxiliary": {
1415 "auxiliary": {
1416 "phabricator:depends-on": [
1416 "phabricator:depends-on": [
1417 "PHID-DREV-gbapp366kutjebt7agcd"
1417 "PHID-DREV-gbapp366kutjebt7agcd"
1418 ]
1418 ]
1419 "phabricator:projects": [],
1419 "phabricator:projects": [],
1420 },
1420 },
1421 "branch": "default",
1421 "branch": "default",
1422 "ccs": [],
1422 "ccs": [],
1423 "commits": [],
1423 "commits": [],
1424 "dateCreated": "1499181406",
1424 "dateCreated": "1499181406",
1425 "dateModified": "1499182103",
1425 "dateModified": "1499182103",
1426 "diffs": [
1426 "diffs": [
1427 "3",
1427 "3",
1428 "4",
1428 "4",
1429 ],
1429 ],
1430 "hashes": [],
1430 "hashes": [],
1431 "id": "2",
1431 "id": "2",
1432 "lineCount": "2",
1432 "lineCount": "2",
1433 "phid": "PHID-DREV-672qvysjcczopag46qty",
1433 "phid": "PHID-DREV-672qvysjcczopag46qty",
1434 "properties": {},
1434 "properties": {},
1435 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1435 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1436 "reviewers": [],
1436 "reviewers": [],
1437 "sourcePath": null
1437 "sourcePath": null
1438 "status": "0",
1438 "status": "0",
1439 "statusName": "Needs Review",
1439 "statusName": "Needs Review",
1440 "summary": "",
1440 "summary": "",
1441 "testPlan": "",
1441 "testPlan": "",
1442 "title": "example",
1442 "title": "example",
1443 "uri": "https://phab.example.com/D2",
1443 "uri": "https://phab.example.com/D2",
1444 }
1444 }
1445 """
1445 """
1446 # TODO: replace differential.query and differential.querydiffs with
1446 # TODO: replace differential.query and differential.querydiffs with
1447 # differential.diff.search because the former (and their output) are
1447 # differential.diff.search because the former (and their output) are
1448 # frozen, and planned to be deprecated and removed.
1448 # frozen, and planned to be deprecated and removed.
1449
1449
1450 def fetch(params):
1450 def fetch(params):
1451 """params -> single drev or None"""
1451 """params -> single drev or None"""
1452 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1452 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1453 if key in prefetched:
1453 if key in prefetched:
1454 return prefetched[key]
1454 return prefetched[key]
1455 drevs = callconduit(ui, b'differential.query', params)
1455 drevs = callconduit(ui, b'differential.query', params)
1456 # Fill prefetched with the result
1456 # Fill prefetched with the result
1457 for drev in drevs:
1457 for drev in drevs:
1458 prefetched[drev[b'phid']] = drev
1458 prefetched[drev[b'phid']] = drev
1459 prefetched[int(drev[b'id'])] = drev
1459 prefetched[int(drev[b'id'])] = drev
1460 if key not in prefetched:
1460 if key not in prefetched:
1461 raise error.Abort(
1461 raise error.Abort(
1462 _(b'cannot get Differential Revision %r') % params
1462 _(b'cannot get Differential Revision %r') % params
1463 )
1463 )
1464 return prefetched[key]
1464 return prefetched[key]
1465
1465
1466 def getstack(topdrevids):
1466 def getstack(topdrevids):
1467 """given a top, get a stack from the bottom, [id] -> [id]"""
1467 """given a top, get a stack from the bottom, [id] -> [id]"""
1468 visited = set()
1468 visited = set()
1469 result = []
1469 result = []
1470 queue = [{b'ids': [i]} for i in topdrevids]
1470 queue = [{b'ids': [i]} for i in topdrevids]
1471 while queue:
1471 while queue:
1472 params = queue.pop()
1472 params = queue.pop()
1473 drev = fetch(params)
1473 drev = fetch(params)
1474 if drev[b'id'] in visited:
1474 if drev[b'id'] in visited:
1475 continue
1475 continue
1476 visited.add(drev[b'id'])
1476 visited.add(drev[b'id'])
1477 result.append(int(drev[b'id']))
1477 result.append(int(drev[b'id']))
1478 auxiliary = drev.get(b'auxiliary', {})
1478 auxiliary = drev.get(b'auxiliary', {})
1479 depends = auxiliary.get(b'phabricator:depends-on', [])
1479 depends = auxiliary.get(b'phabricator:depends-on', [])
1480 for phid in depends:
1480 for phid in depends:
1481 queue.append({b'phids': [phid]})
1481 queue.append({b'phids': [phid]})
1482 result.reverse()
1482 result.reverse()
1483 return smartset.baseset(result)
1483 return smartset.baseset(result)
1484
1484
1485 # Initialize prefetch cache
1485 # Initialize prefetch cache
1486 prefetched = {} # {id or phid: drev}
1486 prefetched = {} # {id or phid: drev}
1487
1487
1488 tree = _parse(spec)
1488 tree = _parse(spec)
1489 drevs, ancestordrevs = _prefetchdrevs(tree)
1489 drevs, ancestordrevs = _prefetchdrevs(tree)
1490
1490
1491 # developer config: phabricator.batchsize
1491 # developer config: phabricator.batchsize
1492 batchsize = ui.configint(b'phabricator', b'batchsize')
1492 batchsize = ui.configint(b'phabricator', b'batchsize')
1493
1493
1494 # Prefetch Differential Revisions in batch
1494 # Prefetch Differential Revisions in batch
1495 tofetch = set(drevs)
1495 tofetch = set(drevs)
1496 for r in ancestordrevs:
1496 for r in ancestordrevs:
1497 tofetch.update(range(max(1, r - batchsize), r + 1))
1497 tofetch.update(range(max(1, r - batchsize), r + 1))
1498 if drevs:
1498 if drevs:
1499 fetch({b'ids': list(tofetch)})
1499 fetch({b'ids': list(tofetch)})
1500 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1500 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1501
1501
1502 # Walk through the tree, return smartsets
1502 # Walk through the tree, return smartsets
1503 def walk(tree):
1503 def walk(tree):
1504 op = tree[0]
1504 op = tree[0]
1505 if op == b'symbol':
1505 if op == b'symbol':
1506 drev = _parsedrev(tree[1])
1506 drev = _parsedrev(tree[1])
1507 if drev:
1507 if drev:
1508 return smartset.baseset([drev])
1508 return smartset.baseset([drev])
1509 elif tree[1] in _knownstatusnames:
1509 elif tree[1] in _knownstatusnames:
1510 drevs = [
1510 drevs = [
1511 r
1511 r
1512 for r in validids
1512 for r in validids
1513 if _getstatusname(prefetched[r]) == tree[1]
1513 if _getstatusname(prefetched[r]) == tree[1]
1514 ]
1514 ]
1515 return smartset.baseset(drevs)
1515 return smartset.baseset(drevs)
1516 else:
1516 else:
1517 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1517 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1518 elif op in {b'and_', b'add', b'sub'}:
1518 elif op in {b'and_', b'add', b'sub'}:
1519 assert len(tree) == 3
1519 assert len(tree) == 3
1520 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1520 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1521 elif op == b'group':
1521 elif op == b'group':
1522 return walk(tree[1])
1522 return walk(tree[1])
1523 elif op == b'ancestors':
1523 elif op == b'ancestors':
1524 return getstack(walk(tree[1]))
1524 return getstack(walk(tree[1]))
1525 else:
1525 else:
1526 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1526 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1527
1527
1528 return [prefetched[r] for r in walk(tree)]
1528 return [prefetched[r] for r in walk(tree)]
1529
1529
1530
1530
1531 def getdescfromdrev(drev):
1531 def getdescfromdrev(drev):
1532 """get description (commit message) from "Differential Revision"
1532 """get description (commit message) from "Differential Revision"
1533
1533
1534 This is similar to differential.getcommitmessage API. But we only care
1534 This is similar to differential.getcommitmessage API. But we only care
1535 about limited fields: title, summary, test plan, and URL.
1535 about limited fields: title, summary, test plan, and URL.
1536 """
1536 """
1537 title = drev[b'title']
1537 title = drev[b'title']
1538 summary = drev[b'summary'].rstrip()
1538 summary = drev[b'summary'].rstrip()
1539 testplan = drev[b'testPlan'].rstrip()
1539 testplan = drev[b'testPlan'].rstrip()
1540 if testplan:
1540 if testplan:
1541 testplan = b'Test Plan:\n%s' % testplan
1541 testplan = b'Test Plan:\n%s' % testplan
1542 uri = b'Differential Revision: %s' % drev[b'uri']
1542 uri = b'Differential Revision: %s' % drev[b'uri']
1543 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1543 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1544
1544
1545
1545
1546 def getdiffmeta(diff):
1546 def getdiffmeta(diff):
1547 """get commit metadata (date, node, user, p1) from a diff object
1547 """get commit metadata (date, node, user, p1) from a diff object
1548
1548
1549 The metadata could be "hg:meta", sent by phabsend, like:
1549 The metadata could be "hg:meta", sent by phabsend, like:
1550
1550
1551 "properties": {
1551 "properties": {
1552 "hg:meta": {
1552 "hg:meta": {
1553 "branch": "default",
1553 "branch": "default",
1554 "date": "1499571514 25200",
1554 "date": "1499571514 25200",
1555 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1555 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1556 "user": "Foo Bar <foo@example.com>",
1556 "user": "Foo Bar <foo@example.com>",
1557 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1557 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1558 }
1558 }
1559 }
1559 }
1560
1560
1561 Or converted from "local:commits", sent by "arc", like:
1561 Or converted from "local:commits", sent by "arc", like:
1562
1562
1563 "properties": {
1563 "properties": {
1564 "local:commits": {
1564 "local:commits": {
1565 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1565 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1566 "author": "Foo Bar",
1566 "author": "Foo Bar",
1567 "authorEmail": "foo@example.com"
1567 "authorEmail": "foo@example.com"
1568 "branch": "default",
1568 "branch": "default",
1569 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1569 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 "local": "1000",
1570 "local": "1000",
1571 "message": "...",
1571 "message": "...",
1572 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1572 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1573 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1573 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1574 "summary": "...",
1574 "summary": "...",
1575 "tag": "",
1575 "tag": "",
1576 "time": 1499546314,
1576 "time": 1499546314,
1577 }
1577 }
1578 }
1578 }
1579 }
1579 }
1580
1580
1581 Note: metadata extracted from "local:commits" will lose time zone
1581 Note: metadata extracted from "local:commits" will lose time zone
1582 information.
1582 information.
1583 """
1583 """
1584 props = diff.get(b'properties') or {}
1584 props = diff.get(b'properties') or {}
1585 meta = props.get(b'hg:meta')
1585 meta = props.get(b'hg:meta')
1586 if not meta:
1586 if not meta:
1587 if props.get(b'local:commits'):
1587 if props.get(b'local:commits'):
1588 commit = sorted(props[b'local:commits'].values())[0]
1588 commit = sorted(props[b'local:commits'].values())[0]
1589 meta = {}
1589 meta = {}
1590 if b'author' in commit and b'authorEmail' in commit:
1590 if b'author' in commit and b'authorEmail' in commit:
1591 meta[b'user'] = b'%s <%s>' % (
1591 meta[b'user'] = b'%s <%s>' % (
1592 commit[b'author'],
1592 commit[b'author'],
1593 commit[b'authorEmail'],
1593 commit[b'authorEmail'],
1594 )
1594 )
1595 if b'time' in commit:
1595 if b'time' in commit:
1596 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1596 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1597 if b'branch' in commit:
1597 if b'branch' in commit:
1598 meta[b'branch'] = commit[b'branch']
1598 meta[b'branch'] = commit[b'branch']
1599 node = commit.get(b'commit', commit.get(b'rev'))
1599 node = commit.get(b'commit', commit.get(b'rev'))
1600 if node:
1600 if node:
1601 meta[b'node'] = node
1601 meta[b'node'] = node
1602 if len(commit.get(b'parents', ())) >= 1:
1602 if len(commit.get(b'parents', ())) >= 1:
1603 meta[b'parent'] = commit[b'parents'][0]
1603 meta[b'parent'] = commit[b'parents'][0]
1604 else:
1604 else:
1605 meta = {}
1605 meta = {}
1606 if b'date' not in meta and b'dateCreated' in diff:
1606 if b'date' not in meta and b'dateCreated' in diff:
1607 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1607 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1608 if b'branch' not in meta and diff.get(b'branch'):
1608 if b'branch' not in meta and diff.get(b'branch'):
1609 meta[b'branch'] = diff[b'branch']
1609 meta[b'branch'] = diff[b'branch']
1610 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1610 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1611 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1611 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1612 return meta
1612 return meta
1613
1613
1614
1614
1615 def readpatch(ui, drevs, write):
1615 def readpatch(ui, drevs, write):
1616 """generate plain-text patch readable by 'hg import'
1616 """generate plain-text patch readable by 'hg import'
1617
1617
1618 write takes a list of (DREV, bytes), where DREV is the differential number
1618 write takes a list of (DREV, bytes), where DREV is the differential number
1619 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1619 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1620 to be imported. drevs is what "querydrev" returns, results of
1620 to be imported. drevs is what "querydrev" returns, results of
1621 "differential.query".
1621 "differential.query".
1622 """
1622 """
1623 # Prefetch hg:meta property for all diffs
1623 # Prefetch hg:meta property for all diffs
1624 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1624 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1625 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1625 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1626
1626
1627 patches = []
1627 patches = []
1628
1628
1629 # Generate patch for each drev
1629 # Generate patch for each drev
1630 for drev in drevs:
1630 for drev in drevs:
1631 ui.note(_(b'reading D%s\n') % drev[b'id'])
1631 ui.note(_(b'reading D%s\n') % drev[b'id'])
1632
1632
1633 diffid = max(int(v) for v in drev[b'diffs'])
1633 diffid = max(int(v) for v in drev[b'diffs'])
1634 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1634 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1635 desc = getdescfromdrev(drev)
1635 desc = getdescfromdrev(drev)
1636 header = b'# HG changeset patch\n'
1636 header = b'# HG changeset patch\n'
1637
1637
1638 # Try to preserve metadata from hg:meta property. Write hg patch
1638 # Try to preserve metadata from hg:meta property. Write hg patch
1639 # headers that can be read by the "import" command. See patchheadermap
1639 # headers that can be read by the "import" command. See patchheadermap
1640 # and extract in mercurial/patch.py for supported headers.
1640 # and extract in mercurial/patch.py for supported headers.
1641 meta = getdiffmeta(diffs[b'%d' % diffid])
1641 meta = getdiffmeta(diffs[b'%d' % diffid])
1642 for k in _metanamemap.keys():
1642 for k in _metanamemap.keys():
1643 if k in meta:
1643 if k in meta:
1644 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1644 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1645
1645
1646 content = b'%s%s\n%s' % (header, desc, body)
1646 content = b'%s%s\n%s' % (header, desc, body)
1647 patches.append((drev[b'id'], content))
1647 patches.append((drev[b'id'], content))
1648
1648
1649 # Write patches to the supplied callback
1649 # Write patches to the supplied callback
1650 write(patches)
1650 write(patches)
1651
1651
1652
1652
1653 @vcrcommand(
1653 @vcrcommand(
1654 b'phabread',
1654 b'phabread',
1655 [(b'', b'stack', False, _(b'read dependencies'))],
1655 [(b'', b'stack', False, _(b'read dependencies'))],
1656 _(b'DREVSPEC [OPTIONS]'),
1656 _(b'DREVSPEC [OPTIONS]'),
1657 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1657 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1658 optionalrepo=True,
1658 )
1659 )
1659 def phabread(ui, repo, spec, **opts):
1660 def phabread(ui, repo, spec, **opts):
1660 """print patches from Phabricator suitable for importing
1661 """print patches from Phabricator suitable for importing
1661
1662
1662 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1663 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1663 the number ``123``. It could also have common operators like ``+``, ``-``,
1664 the number ``123``. It could also have common operators like ``+``, ``-``,
1664 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1665 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1665 select a stack.
1666 select a stack.
1666
1667
1667 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1668 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1668 could be used to filter patches by status. For performance reason, they
1669 could be used to filter patches by status. For performance reason, they
1669 only represent a subset of non-status selections and cannot be used alone.
1670 only represent a subset of non-status selections and cannot be used alone.
1670
1671
1671 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1672 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1672 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1673 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1673 stack up to D9.
1674 stack up to D9.
1674
1675
1675 If --stack is given, follow dependencies information and read all patches.
1676 If --stack is given, follow dependencies information and read all patches.
1676 It is equivalent to the ``:`` operator.
1677 It is equivalent to the ``:`` operator.
1677 """
1678 """
1678 opts = pycompat.byteskwargs(opts)
1679 opts = pycompat.byteskwargs(opts)
1679 if opts.get(b'stack'):
1680 if opts.get(b'stack'):
1680 spec = b':(%s)' % spec
1681 spec = b':(%s)' % spec
1681 drevs = querydrev(repo.ui, spec)
1682 drevs = querydrev(ui, spec)
1682
1683
1683 def _write(patches):
1684 def _write(patches):
1684 for drev, content in patches:
1685 for drev, content in patches:
1685 ui.write(content)
1686 ui.write(content)
1686
1687
1687 readpatch(repo.ui, drevs, _write)
1688 readpatch(ui, drevs, _write)
1688
1689
1689
1690
1690 @vcrcommand(
1691 @vcrcommand(
1691 b'phabupdate',
1692 b'phabupdate',
1692 [
1693 [
1693 (b'', b'accept', False, _(b'accept revisions')),
1694 (b'', b'accept', False, _(b'accept revisions')),
1694 (b'', b'reject', False, _(b'reject revisions')),
1695 (b'', b'reject', False, _(b'reject revisions')),
1695 (b'', b'abandon', False, _(b'abandon revisions')),
1696 (b'', b'abandon', False, _(b'abandon revisions')),
1696 (b'', b'reclaim', False, _(b'reclaim revisions')),
1697 (b'', b'reclaim', False, _(b'reclaim revisions')),
1697 (b'm', b'comment', b'', _(b'comment on the last revision')),
1698 (b'm', b'comment', b'', _(b'comment on the last revision')),
1698 ],
1699 ],
1699 _(b'DREVSPEC [OPTIONS]'),
1700 _(b'DREVSPEC [OPTIONS]'),
1700 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1701 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1701 optionalrepo=True,
1702 optionalrepo=True,
1702 )
1703 )
1703 def phabupdate(ui, repo, spec, **opts):
1704 def phabupdate(ui, repo, spec, **opts):
1704 """update Differential Revision in batch
1705 """update Differential Revision in batch
1705
1706
1706 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1707 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1707 """
1708 """
1708 opts = pycompat.byteskwargs(opts)
1709 opts = pycompat.byteskwargs(opts)
1709 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1710 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1710 if len(flags) > 1:
1711 if len(flags) > 1:
1711 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1712 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1712
1713
1713 actions = []
1714 actions = []
1714 for f in flags:
1715 for f in flags:
1715 actions.append({b'type': f, b'value': True})
1716 actions.append({b'type': f, b'value': True})
1716
1717
1717 drevs = querydrev(ui, spec)
1718 drevs = querydrev(ui, spec)
1718 for i, drev in enumerate(drevs):
1719 for i, drev in enumerate(drevs):
1719 if i + 1 == len(drevs) and opts.get(b'comment'):
1720 if i + 1 == len(drevs) and opts.get(b'comment'):
1720 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1721 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1721 if actions:
1722 if actions:
1722 params = {
1723 params = {
1723 b'objectIdentifier': drev[b'phid'],
1724 b'objectIdentifier': drev[b'phid'],
1724 b'transactions': actions,
1725 b'transactions': actions,
1725 }
1726 }
1726 callconduit(ui, b'differential.revision.edit', params)
1727 callconduit(ui, b'differential.revision.edit', params)
1727
1728
1728
1729
1729 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1730 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1730 def template_review(context, mapping):
1731 def template_review(context, mapping):
1731 """:phabreview: Object describing the review for this changeset.
1732 """:phabreview: Object describing the review for this changeset.
1732 Has attributes `url` and `id`.
1733 Has attributes `url` and `id`.
1733 """
1734 """
1734 ctx = context.resource(mapping, b'ctx')
1735 ctx = context.resource(mapping, b'ctx')
1735 m = _differentialrevisiondescre.search(ctx.description())
1736 m = _differentialrevisiondescre.search(ctx.description())
1736 if m:
1737 if m:
1737 return templateutil.hybriddict(
1738 return templateutil.hybriddict(
1738 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1739 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1739 )
1740 )
1740 else:
1741 else:
1741 tags = ctx.repo().nodetags(ctx.node())
1742 tags = ctx.repo().nodetags(ctx.node())
1742 for t in tags:
1743 for t in tags:
1743 if _differentialrevisiontagre.match(t):
1744 if _differentialrevisiontagre.match(t):
1744 url = ctx.repo().ui.config(b'phabricator', b'url')
1745 url = ctx.repo().ui.config(b'phabricator', b'url')
1745 if not url.endswith(b'/'):
1746 if not url.endswith(b'/'):
1746 url += b'/'
1747 url += b'/'
1747 url += t
1748 url += t
1748
1749
1749 return templateutil.hybriddict({b'url': url, b'id': t,})
1750 return templateutil.hybriddict({b'url': url, b'id': t,})
1750 return None
1751 return None
1751
1752
1752
1753
1753 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1754 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1754 def template_status(context, mapping):
1755 def template_status(context, mapping):
1755 """:phabstatus: String. Status of Phabricator differential.
1756 """:phabstatus: String. Status of Phabricator differential.
1756 """
1757 """
1757 ctx = context.resource(mapping, b'ctx')
1758 ctx = context.resource(mapping, b'ctx')
1758 repo = context.resource(mapping, b'repo')
1759 repo = context.resource(mapping, b'repo')
1759 ui = context.resource(mapping, b'ui')
1760 ui = context.resource(mapping, b'ui')
1760
1761
1761 rev = ctx.rev()
1762 rev = ctx.rev()
1762 try:
1763 try:
1763 drevid = getdrevmap(repo, [rev])[rev]
1764 drevid = getdrevmap(repo, [rev])[rev]
1764 except KeyError:
1765 except KeyError:
1765 return None
1766 return None
1766 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1767 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1767 for drev in drevs:
1768 for drev in drevs:
1768 if int(drev[b'id']) == drevid:
1769 if int(drev[b'id']) == drevid:
1769 return templateutil.hybriddict(
1770 return templateutil.hybriddict(
1770 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1771 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1771 )
1772 )
1772 return None
1773 return None
1773
1774
1774
1775
1775 @show.showview(b'phabstatus', csettopic=b'work')
1776 @show.showview(b'phabstatus', csettopic=b'work')
1776 def phabstatusshowview(ui, repo, displayer):
1777 def phabstatusshowview(ui, repo, displayer):
1777 """Phabricator differiential status"""
1778 """Phabricator differiential status"""
1778 revs = repo.revs('sort(_underway(), topo)')
1779 revs = repo.revs('sort(_underway(), topo)')
1779 drevmap = getdrevmap(repo, revs)
1780 drevmap = getdrevmap(repo, revs)
1780 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1781 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1781 for rev, drevid in pycompat.iteritems(drevmap):
1782 for rev, drevid in pycompat.iteritems(drevmap):
1782 if drevid is not None:
1783 if drevid is not None:
1783 drevids.add(drevid)
1784 drevids.add(drevid)
1784 revsbydrevid.setdefault(drevid, set([])).add(rev)
1785 revsbydrevid.setdefault(drevid, set([])).add(rev)
1785 else:
1786 else:
1786 unknownrevs.append(rev)
1787 unknownrevs.append(rev)
1787
1788
1788 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1789 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1789 drevsbyrev = {}
1790 drevsbyrev = {}
1790 for drev in drevs:
1791 for drev in drevs:
1791 for rev in revsbydrevid[int(drev[b'id'])]:
1792 for rev in revsbydrevid[int(drev[b'id'])]:
1792 drevsbyrev[rev] = drev
1793 drevsbyrev[rev] = drev
1793
1794
1794 def phabstatus(ctx):
1795 def phabstatus(ctx):
1795 drev = drevsbyrev[ctx.rev()]
1796 drev = drevsbyrev[ctx.rev()]
1796 status = ui.label(
1797 status = ui.label(
1797 b'%(statusName)s' % drev,
1798 b'%(statusName)s' % drev,
1798 b'phabricator.status.%s' % _getstatusname(drev),
1799 b'phabricator.status.%s' % _getstatusname(drev),
1799 )
1800 )
1800 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1801 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1801
1802
1802 revs -= smartset.baseset(unknownrevs)
1803 revs -= smartset.baseset(unknownrevs)
1803 revdag = graphmod.dagwalker(repo, revs)
1804 revdag = graphmod.dagwalker(repo, revs)
1804
1805
1805 ui.setconfig(b'experimental', b'graphshorten', True)
1806 ui.setconfig(b'experimental', b'graphshorten', True)
1806 displayer._exthook = phabstatus
1807 displayer._exthook = phabstatus
1807 nodelen = show.longestshortest(repo, revs)
1808 nodelen = show.longestshortest(repo, revs)
1808 logcmdutil.displaygraph(
1809 logcmdutil.displaygraph(
1809 ui,
1810 ui,
1810 repo,
1811 repo,
1811 revdag,
1812 revdag,
1812 displayer,
1813 displayer,
1813 graphmod.asciiedges,
1814 graphmod.asciiedges,
1814 props={b'nodelen': nodelen},
1815 props={b'nodelen': nodelen},
1815 )
1816 )
General Comments 0
You need to be logged in to leave comments. Login now