##// END OF EJS Templates
phabricator: pass old `fctx` to `addoldbinary()` instead of inferring it...
Matt Harbison -
r44911:98f7b9cf default
parent child Browse files
Show More
@@ -1,1816 +1,1818 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
261 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
262 return command(
262 return command(
263 name,
263 name,
264 fullflags,
264 fullflags,
265 spec,
265 spec,
266 helpcategory=helpcategory,
266 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
268 )(inner)
268 )(inner)
269
269
270 return decorate
270 return decorate
271
271
272
272
273 def urlencodenested(params):
273 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
275
275
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
279 """
280 flatparams = util.sortdict()
280 flatparams = util.sortdict()
281
281
282 def process(prefix, obj):
282 def process(prefix, obj):
283 if isinstance(obj, bool):
283 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
287 if items is None:
288 flatparams[prefix] = obj
288 flatparams[prefix] = obj
289 else:
289 else:
290 for k, v in items(obj):
290 for k, v in items(obj):
291 if prefix:
291 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
293 else:
293 else:
294 process(k, v)
294 process(k, v)
295
295
296 process(b'', params)
296 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
298
298
299
299
300 def readurltoken(ui):
300 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
302
302
303 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
305 """
305 """
306 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
307 if not url:
307 if not url:
308 raise error.Abort(
308 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
310 )
311
311
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
313 token = None
314
314
315 if res:
315 if res:
316 group, auth = res
316 group, auth = res
317
317
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
319
320 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
321
321
322 if not token:
322 if not token:
323 raise error.Abort(
323 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
325 )
326
326
327 return url, token
327 return url, token
328
328
329
329
330 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
335 params = params.copy()
336 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
337 b'token': token,
337 b'token': token,
338 }
338 }
339 rawdata = {
339 rawdata = {
340 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
341 b'output': b'json',
341 b'output': b'json',
342 b'__conduit__': 1,
342 b'__conduit__': 1,
343 }
343 }
344 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
346 if curlcmd:
347 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
349 )
350 sin.write(data)
350 sin.write(data)
351 sin.close()
351 sin.close()
352 body = sout.read()
352 body = sout.read()
353 else:
353 else:
354 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
357 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
362 else x,
362 else x,
363 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
365 )
366 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
368 parsed[b'error_code'],
369 parsed[b'error_info'],
369 parsed[b'error_info'],
370 )
370 )
371 raise error.Abort(msg)
371 raise error.Abort(msg)
372 return parsed[b'result']
372 return parsed[b'result']
373
373
374
374
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
377 """call Conduit API
377 """call Conduit API
378
378
379 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
380 to stdout as a JSON blob.
381 """
381 """
382 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
385 params = pycompat.rapply(
385 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
388 else x,
388 else x,
389 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
390 )
390 )
391 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
392 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
394 callconduit(ui, name, params),
395 )
395 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
398
398
399
399
400 def getrepophid(repo):
400 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
404 if repophid:
405 return repophid
405 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
407 if not callsign:
408 return None
408 return None
409 query = callconduit(
409 query = callconduit(
410 repo.ui,
410 repo.ui,
411 b'diffusion.repository.search',
411 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
413 )
413 )
414 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
415 return None
415 return None
416 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
418 return repophid
419
419
420
420
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
424 )
425
425
426
426
427 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
429
429
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
433 be ``None``.
434
434
435 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
436 association information.
436 association information.
437
437
438 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
441
441
442 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
444 """
444 """
445 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
447
447
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
450 for node in nodelist:
451 ctx = unfi[node]
451 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
454 for n in precnodes:
455 if has_node(n):
455 if has_node(n):
456 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
458 if m:
458 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
460 break
461 else:
461 else:
462 continue # move to next predecessor
462 continue # move to next predecessor
463 break # found a tag, stop
463 break # found a tag, stop
464 else:
464 else:
465 # Check commit message
465 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
467 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
469
470 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
472 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
474 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
476 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
479 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
481 ]
482
482
483 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
485
485
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
490 tags.tag(
490 tags.tag(
491 repo,
491 repo,
492 tagname,
492 tagname,
493 nullid,
493 nullid,
494 message=None,
494 message=None,
495 user=None,
495 user=None,
496 date=None,
496 date=None,
497 local=True,
497 local=True,
498 )
498 )
499 unfi.ui.warn(
499 unfi.ui.warn(
500 _(
500 _(
501 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
502 b'Differential history\n'
503 )
503 )
504 % drev
504 % drev
505 )
505 )
506 continue
506 continue
507
507
508 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
509 # exists in the repo
510 oldnode = lastdiff = None
510 oldnode = lastdiff = None
511 if diffs:
511 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
515 oldnode = None
515 oldnode = None
516
516
517 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
518
518
519 return result
519 return result
520
520
521
521
522 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
524 ID or None.
525 """
525 """
526 result = {}
526 result = {}
527 for rev in revs:
527 for rev in revs:
528 result[rev] = None
528 result[rev] = None
529 ctx = repo[rev]
529 ctx = repo[rev]
530 # Check commit message
530 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
532 if m:
533 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
534 continue
534 continue
535 # Check tags
535 # Check tags
536 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
538 if m:
538 if m:
539 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
540 break
540 break
541
541
542 return result
542 return result
543
543
544
544
545 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
547 output = util.stringio()
548 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
550 ):
551 output.write(chunk)
551 output.write(chunk)
552 return output.getvalue()
552 return output.getvalue()
553
553
554
554
555 class DiffChangeType(object):
555 class DiffChangeType(object):
556 ADD = 1
556 ADD = 1
557 CHANGE = 2
557 CHANGE = 2
558 DELETE = 3
558 DELETE = 3
559 MOVE_AWAY = 4
559 MOVE_AWAY = 4
560 COPY_AWAY = 5
560 COPY_AWAY = 5
561 MOVE_HERE = 6
561 MOVE_HERE = 6
562 COPY_HERE = 7
562 COPY_HERE = 7
563 MULTICOPY = 8
563 MULTICOPY = 8
564
564
565
565
566 class DiffFileType(object):
566 class DiffFileType(object):
567 TEXT = 1
567 TEXT = 1
568 IMAGE = 2
568 IMAGE = 2
569 BINARY = 3
569 BINARY = 3
570
570
571
571
572 @attr.s
572 @attr.s
573 class phabhunk(dict):
573 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
575 """
575 """
576
576
577 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
585
585
586
586
587 @attr.s
587 @attr.s
588 class phabchange(object):
588 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
591 """
591 """
592
592
593 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
605
605
606 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
610
610
611 def addoldmode(self, value):
611 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
613
613
614 def addnewmode(self, value):
614 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
616
616
617 def addhunk(self, hunk):
617 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
626
626
627
627
628 @attr.s
628 @attr.s
629 class phabdiff(object):
629 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
631 to a commit.
632 """
632 """
633
633
634 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
647
647
648 def addchange(self, change):
648 def addchange(self, change):
649 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
652 attr.asdict(change)
653 )
653 )
654
654
655
655
656 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
658 repo = ctx.repo()
658 repo = ctx.repo()
659 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
663 )
664
664
665 for fhunk in fhunks:
665 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
668 shunk = list(header)
668 shunk = list(header)
669 shunk.extend(lines)
669 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
672 )
672 )
673 pchange.addhunk(
673 pchange.addhunk(
674 phabhunk(
674 phabhunk(
675 oldOffset,
675 oldOffset,
676 oldLength,
676 oldLength,
677 newOffset,
677 newOffset,
678 newLength,
678 newLength,
679 corpus,
679 corpus,
680 addLines,
680 addLines,
681 delLines,
681 delLines,
682 )
682 )
683 )
683 )
684
684
685
685
686 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
689 """
690 ui = fctx.repo().ui
690 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
692 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
694 ) as progress:
695 for chunk in chunks:
695 for chunk in chunks:
696 progress.increment()
696 progress.increment()
697 if chunk[b'complete']:
697 if chunk[b'complete']:
698 continue
698 continue
699 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
701 callconduit(
701 callconduit(
702 ui,
702 ui,
703 b'file.uploadchunk',
703 b'file.uploadchunk',
704 {
704 {
705 b'filePHID': fphid,
705 b'filePHID': fphid,
706 b'byteStart': bstart,
706 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
709 },
709 },
710 )
710 )
711
711
712
712
713 def uploadfile(fctx):
713 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
715 repo = fctx.repo()
715 repo = fctx.repo()
716 ui = repo.ui
716 ui = repo.ui
717 fname = fctx.path()
717 fname = fctx.path()
718 size = fctx.size()
718 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
720
721 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
723 allocateparams = {
724 b'name': fname,
724 b'name': fname,
725 b'contentLength': size,
725 b'contentLength': size,
726 b'contentHash': fhash,
726 b'contentHash': fhash,
727 }
727 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
730
730
731 if filealloc[b'upload']:
731 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
733 if not fphid:
734 uploadparams = {
734 uploadparams = {
735 b'name': fname,
735 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
737 }
737 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
739 else:
740 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
741 else:
741 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
743
743
744 if not fphid:
744 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
746
747 return fphid
747 return fphid
748
748
749
749
750 def addoldbinary(pchange, fctx):
750 def addoldbinary(pchange, oldfctx, fctx):
751 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
752 phabchange for the new version
753
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 version of the file, or None if the file is being removed.
753 """
756 """
754 oldfctx = fctx.p1()
757 if not fctx or fctx.cmp(oldfctx):
755 if fctx.cmp(oldfctx):
756 # Files differ, add the old one
758 # Files differ, add the old one
757 pchange.metadata[b'old:file:size'] = oldfctx.size()
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 mimeguess, _enc = mimetypes.guess_type(
760 mimeguess, _enc = mimetypes.guess_type(
759 encoding.unifromlocal(oldfctx.path())
761 encoding.unifromlocal(oldfctx.path())
760 )
762 )
761 if mimeguess:
763 if mimeguess:
762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 mimeguess
765 mimeguess
764 )
766 )
765 fphid = uploadfile(oldfctx)
767 fphid = uploadfile(oldfctx)
766 pchange.metadata[b'old:binary-phid'] = fphid
768 pchange.metadata[b'old:binary-phid'] = fphid
767 else:
769 else:
768 # If it's left as IMAGE/BINARY web UI might try to display it
770 # If it's left as IMAGE/BINARY web UI might try to display it
769 pchange.fileType = DiffFileType.TEXT
771 pchange.fileType = DiffFileType.TEXT
770 pchange.copynewmetadatatoold()
772 pchange.copynewmetadatatoold()
771
773
772
774
773 def makebinary(pchange, fctx):
775 def makebinary(pchange, fctx):
774 """populate the phabchange for a binary file"""
776 """populate the phabchange for a binary file"""
775 pchange.fileType = DiffFileType.BINARY
777 pchange.fileType = DiffFileType.BINARY
776 fphid = uploadfile(fctx)
778 fphid = uploadfile(fctx)
777 pchange.metadata[b'new:binary-phid'] = fphid
779 pchange.metadata[b'new:binary-phid'] = fphid
778 pchange.metadata[b'new:file:size'] = fctx.size()
780 pchange.metadata[b'new:file:size'] = fctx.size()
779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 if mimeguess:
782 if mimeguess:
781 mimeguess = pycompat.bytestr(mimeguess)
783 mimeguess = pycompat.bytestr(mimeguess)
782 pchange.metadata[b'new:file:mime-type'] = mimeguess
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 if mimeguess.startswith(b'image/'):
785 if mimeguess.startswith(b'image/'):
784 pchange.fileType = DiffFileType.IMAGE
786 pchange.fileType = DiffFileType.IMAGE
785
787
786
788
787 # Copied from mercurial/patch.py
789 # Copied from mercurial/patch.py
788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789
791
790
792
791 def notutf8(fctx):
793 def notutf8(fctx):
792 """detect non-UTF-8 text files since Phabricator requires them to be marked
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 as binary
795 as binary
794 """
796 """
795 try:
797 try:
796 fctx.data().decode('utf-8')
798 fctx.data().decode('utf-8')
797 if fctx.parents():
799 if fctx.parents():
798 fctx.p1().data().decode('utf-8')
800 fctx.p1().data().decode('utf-8')
799 return False
801 return False
800 except UnicodeDecodeError:
802 except UnicodeDecodeError:
801 fctx.repo().ui.write(
803 fctx.repo().ui.write(
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
804 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 % fctx.path()
805 % fctx.path()
804 )
806 )
805 return True
807 return True
806
808
807
809
808 def addremoved(pdiff, ctx, removed):
810 def addremoved(pdiff, ctx, removed):
809 """add removed files to the phabdiff. Shouldn't include moves"""
811 """add removed files to the phabdiff. Shouldn't include moves"""
810 for fname in removed:
812 for fname in removed:
811 pchange = phabchange(
813 pchange = phabchange(
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 )
815 )
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 fctx = ctx.p1()[fname]
817 fctx = ctx.p1()[fname]
816 if not (fctx.isbinary() or notutf8(fctx)):
818 if not (fctx.isbinary() or notutf8(fctx)):
817 maketext(pchange, ctx, fname)
819 maketext(pchange, ctx, fname)
818
820
819 pdiff.addchange(pchange)
821 pdiff.addchange(pchange)
820
822
821
823
822 def addmodified(pdiff, ctx, modified):
824 def addmodified(pdiff, ctx, modified):
823 """add modified files to the phabdiff"""
825 """add modified files to the phabdiff"""
824 for fname in modified:
826 for fname in modified:
825 fctx = ctx[fname]
827 fctx = ctx[fname]
826 pchange = phabchange(currentPath=fname, oldPath=fname)
828 pchange = phabchange(currentPath=fname, oldPath=fname)
827 filemode = gitmode[ctx[fname].flags()]
829 filemode = gitmode[ctx[fname].flags()]
828 originalmode = gitmode[ctx.p1()[fname].flags()]
830 originalmode = gitmode[ctx.p1()[fname].flags()]
829 if filemode != originalmode:
831 if filemode != originalmode:
830 pchange.addoldmode(originalmode)
832 pchange.addoldmode(originalmode)
831 pchange.addnewmode(filemode)
833 pchange.addnewmode(filemode)
832
834
833 if fctx.isbinary() or notutf8(fctx):
835 if fctx.isbinary() or notutf8(fctx):
834 makebinary(pchange, fctx)
836 makebinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
837 addoldbinary(pchange, fctx.p1(), fctx)
836 else:
838 else:
837 maketext(pchange, ctx, fname)
839 maketext(pchange, ctx, fname)
838
840
839 pdiff.addchange(pchange)
841 pdiff.addchange(pchange)
840
842
841
843
842 def addadded(pdiff, ctx, added, removed):
844 def addadded(pdiff, ctx, added, removed):
843 """add file adds to the phabdiff, both new files and copies/moves"""
845 """add file adds to the phabdiff, both new files and copies/moves"""
844 # Keep track of files that've been recorded as moved/copied, so if there are
846 # Keep track of files that've been recorded as moved/copied, so if there are
845 # additional copies we can mark them (moves get removed from removed)
847 # additional copies we can mark them (moves get removed from removed)
846 copiedchanges = {}
848 copiedchanges = {}
847 movedchanges = {}
849 movedchanges = {}
848 for fname in added:
850 for fname in added:
849 fctx = ctx[fname]
851 fctx = ctx[fname]
850 pchange = phabchange(currentPath=fname)
852 pchange = phabchange(currentPath=fname)
851
853
852 filemode = gitmode[ctx[fname].flags()]
854 filemode = gitmode[ctx[fname].flags()]
853 renamed = fctx.renamed()
855 renamed = fctx.renamed()
854
856
855 if renamed:
857 if renamed:
856 originalfname = renamed[0]
858 originalfname = renamed[0]
857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
859 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 pchange.oldPath = originalfname
860 pchange.oldPath = originalfname
859
861
860 if originalfname in removed:
862 if originalfname in removed:
861 origpchange = phabchange(
863 origpchange = phabchange(
862 currentPath=originalfname,
864 currentPath=originalfname,
863 oldPath=originalfname,
865 oldPath=originalfname,
864 type=DiffChangeType.MOVE_AWAY,
866 type=DiffChangeType.MOVE_AWAY,
865 awayPaths=[fname],
867 awayPaths=[fname],
866 )
868 )
867 movedchanges[originalfname] = origpchange
869 movedchanges[originalfname] = origpchange
868 removed.remove(originalfname)
870 removed.remove(originalfname)
869 pchange.type = DiffChangeType.MOVE_HERE
871 pchange.type = DiffChangeType.MOVE_HERE
870 elif originalfname in movedchanges:
872 elif originalfname in movedchanges:
871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
873 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 movedchanges[originalfname].awayPaths.append(fname)
874 movedchanges[originalfname].awayPaths.append(fname)
873 pchange.type = DiffChangeType.COPY_HERE
875 pchange.type = DiffChangeType.COPY_HERE
874 else: # pure copy
876 else: # pure copy
875 if originalfname not in copiedchanges:
877 if originalfname not in copiedchanges:
876 origpchange = phabchange(
878 origpchange = phabchange(
877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
879 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 )
880 )
879 copiedchanges[originalfname] = origpchange
881 copiedchanges[originalfname] = origpchange
880 else:
882 else:
881 origpchange = copiedchanges[originalfname]
883 origpchange = copiedchanges[originalfname]
882 origpchange.awayPaths.append(fname)
884 origpchange.awayPaths.append(fname)
883 pchange.type = DiffChangeType.COPY_HERE
885 pchange.type = DiffChangeType.COPY_HERE
884
886
885 if filemode != originalmode:
887 if filemode != originalmode:
886 pchange.addoldmode(originalmode)
888 pchange.addoldmode(originalmode)
887 pchange.addnewmode(filemode)
889 pchange.addnewmode(filemode)
888 else: # Brand-new file
890 else: # Brand-new file
889 pchange.addnewmode(gitmode[fctx.flags()])
891 pchange.addnewmode(gitmode[fctx.flags()])
890 pchange.type = DiffChangeType.ADD
892 pchange.type = DiffChangeType.ADD
891
893
892 if fctx.isbinary() or notutf8(fctx):
894 if fctx.isbinary() or notutf8(fctx):
893 makebinary(pchange, fctx)
895 makebinary(pchange, fctx)
894 if renamed:
896 if renamed:
895 addoldbinary(pchange, fctx)
897 addoldbinary(pchange, fctx.p1(), fctx)
896 else:
898 else:
897 maketext(pchange, ctx, fname)
899 maketext(pchange, ctx, fname)
898
900
899 pdiff.addchange(pchange)
901 pdiff.addchange(pchange)
900
902
901 for _path, copiedchange in copiedchanges.items():
903 for _path, copiedchange in copiedchanges.items():
902 pdiff.addchange(copiedchange)
904 pdiff.addchange(copiedchange)
903 for _path, movedchange in movedchanges.items():
905 for _path, movedchange in movedchanges.items():
904 pdiff.addchange(movedchange)
906 pdiff.addchange(movedchange)
905
907
906
908
907 def creatediff(ctx):
909 def creatediff(ctx):
908 """create a Differential Diff"""
910 """create a Differential Diff"""
909 repo = ctx.repo()
911 repo = ctx.repo()
910 repophid = getrepophid(repo)
912 repophid = getrepophid(repo)
911 # Create a "Differential Diff" via "differential.creatediff" API
913 # Create a "Differential Diff" via "differential.creatediff" API
912 pdiff = phabdiff(
914 pdiff = phabdiff(
913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
915 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 branch=b'%s' % ctx.branch(),
916 branch=b'%s' % ctx.branch(),
915 )
917 )
916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
918 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 # addadded will remove moved files from removed, so addremoved won't get
919 # addadded will remove moved files from removed, so addremoved won't get
918 # them
920 # them
919 addadded(pdiff, ctx, added, removed)
921 addadded(pdiff, ctx, added, removed)
920 addmodified(pdiff, ctx, modified)
922 addmodified(pdiff, ctx, modified)
921 addremoved(pdiff, ctx, removed)
923 addremoved(pdiff, ctx, removed)
922 if repophid:
924 if repophid:
923 pdiff.repositoryPHID = repophid
925 pdiff.repositoryPHID = repophid
924 diff = callconduit(
926 diff = callconduit(
925 repo.ui,
927 repo.ui,
926 b'differential.creatediff',
928 b'differential.creatediff',
927 pycompat.byteskwargs(attr.asdict(pdiff)),
929 pycompat.byteskwargs(attr.asdict(pdiff)),
928 )
930 )
929 if not diff:
931 if not diff:
930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
932 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 return diff
933 return diff
932
934
933
935
934 def writediffproperties(ctx, diff):
936 def writediffproperties(ctx, diff):
935 """write metadata to diff so patches could be applied losslessly"""
937 """write metadata to diff so patches could be applied losslessly"""
936 # creatediff returns with a diffid but query returns with an id
938 # creatediff returns with a diffid but query returns with an id
937 diffid = diff.get(b'diffid', diff.get(b'id'))
939 diffid = diff.get(b'diffid', diff.get(b'id'))
938 params = {
940 params = {
939 b'diff_id': diffid,
941 b'diff_id': diffid,
940 b'name': b'hg:meta',
942 b'name': b'hg:meta',
941 b'data': templatefilters.json(
943 b'data': templatefilters.json(
942 {
944 {
943 b'user': ctx.user(),
945 b'user': ctx.user(),
944 b'date': b'%d %d' % ctx.date(),
946 b'date': b'%d %d' % ctx.date(),
945 b'branch': ctx.branch(),
947 b'branch': ctx.branch(),
946 b'node': ctx.hex(),
948 b'node': ctx.hex(),
947 b'parent': ctx.p1().hex(),
949 b'parent': ctx.p1().hex(),
948 }
950 }
949 ),
951 ),
950 }
952 }
951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
953 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952
954
953 params = {
955 params = {
954 b'diff_id': diffid,
956 b'diff_id': diffid,
955 b'name': b'local:commits',
957 b'name': b'local:commits',
956 b'data': templatefilters.json(
958 b'data': templatefilters.json(
957 {
959 {
958 ctx.hex(): {
960 ctx.hex(): {
959 b'author': stringutil.person(ctx.user()),
961 b'author': stringutil.person(ctx.user()),
960 b'authorEmail': stringutil.email(ctx.user()),
962 b'authorEmail': stringutil.email(ctx.user()),
961 b'time': int(ctx.date()[0]),
963 b'time': int(ctx.date()[0]),
962 b'commit': ctx.hex(),
964 b'commit': ctx.hex(),
963 b'parents': [ctx.p1().hex()],
965 b'parents': [ctx.p1().hex()],
964 b'branch': ctx.branch(),
966 b'branch': ctx.branch(),
965 },
967 },
966 }
968 }
967 ),
969 ),
968 }
970 }
969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970
972
971
973
972 def createdifferentialrevision(
974 def createdifferentialrevision(
973 ctx,
975 ctx,
974 revid=None,
976 revid=None,
975 parentrevphid=None,
977 parentrevphid=None,
976 oldnode=None,
978 oldnode=None,
977 olddiff=None,
979 olddiff=None,
978 actions=None,
980 actions=None,
979 comment=None,
981 comment=None,
980 ):
982 ):
981 """create or update a Differential Revision
983 """create or update a Differential Revision
982
984
983 If revid is None, create a new Differential Revision, otherwise update
985 If revid is None, create a new Differential Revision, otherwise update
984 revid. If parentrevphid is not None, set it as a dependency.
986 revid. If parentrevphid is not None, set it as a dependency.
985
987
986 If oldnode is not None, check if the patch content (without commit message
988 If oldnode is not None, check if the patch content (without commit message
987 and metadata) has changed before creating another diff.
989 and metadata) has changed before creating another diff.
988
990
989 If actions is not None, they will be appended to the transaction.
991 If actions is not None, they will be appended to the transaction.
990 """
992 """
991 repo = ctx.repo()
993 repo = ctx.repo()
992 if oldnode:
994 if oldnode:
993 diffopts = mdiff.diffopts(git=True, context=32767)
995 diffopts = mdiff.diffopts(git=True, context=32767)
994 oldctx = repo.unfiltered()[oldnode]
996 oldctx = repo.unfiltered()[oldnode]
995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
997 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 else:
998 else:
997 neednewdiff = True
999 neednewdiff = True
998
1000
999 transactions = []
1001 transactions = []
1000 if neednewdiff:
1002 if neednewdiff:
1001 diff = creatediff(ctx)
1003 diff = creatediff(ctx)
1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1004 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 if comment:
1005 if comment:
1004 transactions.append({b'type': b'comment', b'value': comment})
1006 transactions.append({b'type': b'comment', b'value': comment})
1005 else:
1007 else:
1006 # Even if we don't need to upload a new diff because the patch content
1008 # Even if we don't need to upload a new diff because the patch content
1007 # does not change. We might still need to update its metadata so
1009 # does not change. We might still need to update its metadata so
1008 # pushers could know the correct node metadata.
1010 # pushers could know the correct node metadata.
1009 assert olddiff
1011 assert olddiff
1010 diff = olddiff
1012 diff = olddiff
1011 writediffproperties(ctx, diff)
1013 writediffproperties(ctx, diff)
1012
1014
1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1015 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 if parentrevphid:
1016 if parentrevphid:
1015 transactions.append(
1017 transactions.append(
1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1018 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 )
1019 )
1018
1020
1019 if actions:
1021 if actions:
1020 transactions += actions
1022 transactions += actions
1021
1023
1022 # Parse commit message and update related fields.
1024 # Parse commit message and update related fields.
1023 desc = ctx.description()
1025 desc = ctx.description()
1024 info = callconduit(
1026 info = callconduit(
1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1027 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 )
1028 )
1027 for k, v in info[b'fields'].items():
1029 for k, v in info[b'fields'].items():
1028 if k in [b'title', b'summary', b'testPlan']:
1030 if k in [b'title', b'summary', b'testPlan']:
1029 transactions.append({b'type': k, b'value': v})
1031 transactions.append({b'type': k, b'value': v})
1030
1032
1031 params = {b'transactions': transactions}
1033 params = {b'transactions': transactions}
1032 if revid is not None:
1034 if revid is not None:
1033 # Update an existing Differential Revision
1035 # Update an existing Differential Revision
1034 params[b'objectIdentifier'] = revid
1036 params[b'objectIdentifier'] = revid
1035
1037
1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1038 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 if not revision:
1039 if not revision:
1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1040 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039
1041
1040 return revision, diff
1042 return revision, diff
1041
1043
1042
1044
1043 def userphids(ui, names):
1045 def userphids(ui, names):
1044 """convert user names to PHIDs"""
1046 """convert user names to PHIDs"""
1045 names = [name.lower() for name in names]
1047 names = [name.lower() for name in names]
1046 query = {b'constraints': {b'usernames': names}}
1048 query = {b'constraints': {b'usernames': names}}
1047 result = callconduit(ui, b'user.search', query)
1049 result = callconduit(ui, b'user.search', query)
1048 # username not found is not an error of the API. So check if we have missed
1050 # username not found is not an error of the API. So check if we have missed
1049 # some names here.
1051 # some names here.
1050 data = result[b'data']
1052 data = result[b'data']
1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1053 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 unresolved = set(names) - resolved
1054 unresolved = set(names) - resolved
1053 if unresolved:
1055 if unresolved:
1054 raise error.Abort(
1056 raise error.Abort(
1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1057 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 )
1058 )
1057 return [entry[b'phid'] for entry in data]
1059 return [entry[b'phid'] for entry in data]
1058
1060
1059
1061
1060 @vcrcommand(
1062 @vcrcommand(
1061 b'phabsend',
1063 b'phabsend',
1062 [
1064 [
1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1065 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 (b'', b'amend', True, _(b'update commit messages')),
1066 (b'', b'amend', True, _(b'update commit messages')),
1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1067 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1068 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 (
1069 (
1068 b'm',
1070 b'm',
1069 b'comment',
1071 b'comment',
1070 b'',
1072 b'',
1071 _(b'add a comment to Revisions with new/updated Diffs'),
1073 _(b'add a comment to Revisions with new/updated Diffs'),
1072 ),
1074 ),
1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1075 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 ],
1076 ],
1075 _(b'REV [OPTIONS]'),
1077 _(b'REV [OPTIONS]'),
1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1078 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 )
1079 )
1078 def phabsend(ui, repo, *revs, **opts):
1080 def phabsend(ui, repo, *revs, **opts):
1079 """upload changesets to Phabricator
1081 """upload changesets to Phabricator
1080
1082
1081 If there are multiple revisions specified, they will be send as a stack
1083 If there are multiple revisions specified, they will be send as a stack
1082 with a linear dependencies relationship using the order specified by the
1084 with a linear dependencies relationship using the order specified by the
1083 revset.
1085 revset.
1084
1086
1085 For the first time uploading changesets, local tags will be created to
1087 For the first time uploading changesets, local tags will be created to
1086 maintain the association. After the first time, phabsend will check
1088 maintain the association. After the first time, phabsend will check
1087 obsstore and tags information so it can figure out whether to update an
1089 obsstore and tags information so it can figure out whether to update an
1088 existing Differential Revision, or create a new one.
1090 existing Differential Revision, or create a new one.
1089
1091
1090 If --amend is set, update commit messages so they have the
1092 If --amend is set, update commit messages so they have the
1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1093 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1094 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 use local tags to record the ``Differential Revision`` association.
1095 use local tags to record the ``Differential Revision`` association.
1094
1096
1095 The --confirm option lets you confirm changesets before sending them. You
1097 The --confirm option lets you confirm changesets before sending them. You
1096 can also add following to your configuration file to make it default
1098 can also add following to your configuration file to make it default
1097 behaviour::
1099 behaviour::
1098
1100
1099 [phabsend]
1101 [phabsend]
1100 confirm = true
1102 confirm = true
1101
1103
1102 phabsend will check obsstore and the above association to decide whether to
1104 phabsend will check obsstore and the above association to decide whether to
1103 update an existing Differential Revision, or create a new one.
1105 update an existing Differential Revision, or create a new one.
1104 """
1106 """
1105 opts = pycompat.byteskwargs(opts)
1107 opts = pycompat.byteskwargs(opts)
1106 revs = list(revs) + opts.get(b'rev', [])
1108 revs = list(revs) + opts.get(b'rev', [])
1107 revs = scmutil.revrange(repo, revs)
1109 revs = scmutil.revrange(repo, revs)
1108 revs.sort() # ascending order to preserve topological parent/child in phab
1110 revs.sort() # ascending order to preserve topological parent/child in phab
1109
1111
1110 if not revs:
1112 if not revs:
1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1113 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 if opts.get(b'amend'):
1114 if opts.get(b'amend'):
1113 cmdutil.checkunfinished(repo)
1115 cmdutil.checkunfinished(repo)
1114
1116
1115 # {newnode: (oldnode, olddiff, olddrev}
1117 # {newnode: (oldnode, olddiff, olddrev}
1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1118 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117
1119
1118 confirm = ui.configbool(b'phabsend', b'confirm')
1120 confirm = ui.configbool(b'phabsend', b'confirm')
1119 confirm |= bool(opts.get(b'confirm'))
1121 confirm |= bool(opts.get(b'confirm'))
1120 if confirm:
1122 if confirm:
1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1123 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 if not confirmed:
1124 if not confirmed:
1123 raise error.Abort(_(b'phabsend cancelled'))
1125 raise error.Abort(_(b'phabsend cancelled'))
1124
1126
1125 actions = []
1127 actions = []
1126 reviewers = opts.get(b'reviewer', [])
1128 reviewers = opts.get(b'reviewer', [])
1127 blockers = opts.get(b'blocker', [])
1129 blockers = opts.get(b'blocker', [])
1128 phids = []
1130 phids = []
1129 if reviewers:
1131 if reviewers:
1130 phids.extend(userphids(repo.ui, reviewers))
1132 phids.extend(userphids(repo.ui, reviewers))
1131 if blockers:
1133 if blockers:
1132 phids.extend(
1134 phids.extend(
1133 map(
1135 map(
1134 lambda phid: b'blocking(%s)' % phid,
1136 lambda phid: b'blocking(%s)' % phid,
1135 userphids(repo.ui, blockers),
1137 userphids(repo.ui, blockers),
1136 )
1138 )
1137 )
1139 )
1138 if phids:
1140 if phids:
1139 actions.append({b'type': b'reviewers.add', b'value': phids})
1141 actions.append({b'type': b'reviewers.add', b'value': phids})
1140
1142
1141 drevids = [] # [int]
1143 drevids = [] # [int]
1142 diffmap = {} # {newnode: diff}
1144 diffmap = {} # {newnode: diff}
1143
1145
1144 # Send patches one by one so we know their Differential Revision PHIDs and
1146 # Send patches one by one so we know their Differential Revision PHIDs and
1145 # can provide dependency relationship
1147 # can provide dependency relationship
1146 lastrevphid = None
1148 lastrevphid = None
1147 for rev in revs:
1149 for rev in revs:
1148 ui.debug(b'sending rev %d\n' % rev)
1150 ui.debug(b'sending rev %d\n' % rev)
1149 ctx = repo[rev]
1151 ctx = repo[rev]
1150
1152
1151 # Get Differential Revision ID
1153 # Get Differential Revision ID
1152 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1154 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1153 if oldnode != ctx.node() or opts.get(b'amend'):
1155 if oldnode != ctx.node() or opts.get(b'amend'):
1154 # Create or update Differential Revision
1156 # Create or update Differential Revision
1155 revision, diff = createdifferentialrevision(
1157 revision, diff = createdifferentialrevision(
1156 ctx,
1158 ctx,
1157 revid,
1159 revid,
1158 lastrevphid,
1160 lastrevphid,
1159 oldnode,
1161 oldnode,
1160 olddiff,
1162 olddiff,
1161 actions,
1163 actions,
1162 opts.get(b'comment'),
1164 opts.get(b'comment'),
1163 )
1165 )
1164 diffmap[ctx.node()] = diff
1166 diffmap[ctx.node()] = diff
1165 newrevid = int(revision[b'object'][b'id'])
1167 newrevid = int(revision[b'object'][b'id'])
1166 newrevphid = revision[b'object'][b'phid']
1168 newrevphid = revision[b'object'][b'phid']
1167 if revid:
1169 if revid:
1168 action = b'updated'
1170 action = b'updated'
1169 else:
1171 else:
1170 action = b'created'
1172 action = b'created'
1171
1173
1172 # Create a local tag to note the association, if commit message
1174 # Create a local tag to note the association, if commit message
1173 # does not have it already
1175 # does not have it already
1174 m = _differentialrevisiondescre.search(ctx.description())
1176 m = _differentialrevisiondescre.search(ctx.description())
1175 if not m or int(m.group('id')) != newrevid:
1177 if not m or int(m.group('id')) != newrevid:
1176 tagname = b'D%d' % newrevid
1178 tagname = b'D%d' % newrevid
1177 tags.tag(
1179 tags.tag(
1178 repo,
1180 repo,
1179 tagname,
1181 tagname,
1180 ctx.node(),
1182 ctx.node(),
1181 message=None,
1183 message=None,
1182 user=None,
1184 user=None,
1183 date=None,
1185 date=None,
1184 local=True,
1186 local=True,
1185 )
1187 )
1186 else:
1188 else:
1187 # Nothing changed. But still set "newrevphid" so the next revision
1189 # Nothing changed. But still set "newrevphid" so the next revision
1188 # could depend on this one and "newrevid" for the summary line.
1190 # could depend on this one and "newrevid" for the summary line.
1189 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1191 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1190 newrevid = revid
1192 newrevid = revid
1191 action = b'skipped'
1193 action = b'skipped'
1192
1194
1193 actiondesc = ui.label(
1195 actiondesc = ui.label(
1194 {
1196 {
1195 b'created': _(b'created'),
1197 b'created': _(b'created'),
1196 b'skipped': _(b'skipped'),
1198 b'skipped': _(b'skipped'),
1197 b'updated': _(b'updated'),
1199 b'updated': _(b'updated'),
1198 }[action],
1200 }[action],
1199 b'phabricator.action.%s' % action,
1201 b'phabricator.action.%s' % action,
1200 )
1202 )
1201 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1203 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1202 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1204 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1203 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1205 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1204 ui.write(
1206 ui.write(
1205 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1207 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1206 )
1208 )
1207 drevids.append(newrevid)
1209 drevids.append(newrevid)
1208 lastrevphid = newrevphid
1210 lastrevphid = newrevphid
1209
1211
1210 # Update commit messages and remove tags
1212 # Update commit messages and remove tags
1211 if opts.get(b'amend'):
1213 if opts.get(b'amend'):
1212 unfi = repo.unfiltered()
1214 unfi = repo.unfiltered()
1213 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1215 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1214 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1216 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1215 wnode = unfi[b'.'].node()
1217 wnode = unfi[b'.'].node()
1216 mapping = {} # {oldnode: [newnode]}
1218 mapping = {} # {oldnode: [newnode]}
1217 for i, rev in enumerate(revs):
1219 for i, rev in enumerate(revs):
1218 old = unfi[rev]
1220 old = unfi[rev]
1219 drevid = drevids[i]
1221 drevid = drevids[i]
1220 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1222 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1221 newdesc = getdescfromdrev(drev)
1223 newdesc = getdescfromdrev(drev)
1222 # Make sure commit message contain "Differential Revision"
1224 # Make sure commit message contain "Differential Revision"
1223 if old.description() != newdesc:
1225 if old.description() != newdesc:
1224 if old.phase() == phases.public:
1226 if old.phase() == phases.public:
1225 ui.warn(
1227 ui.warn(
1226 _(b"warning: not updating public commit %s\n")
1228 _(b"warning: not updating public commit %s\n")
1227 % scmutil.formatchangeid(old)
1229 % scmutil.formatchangeid(old)
1228 )
1230 )
1229 continue
1231 continue
1230 parents = [
1232 parents = [
1231 mapping.get(old.p1().node(), (old.p1(),))[0],
1233 mapping.get(old.p1().node(), (old.p1(),))[0],
1232 mapping.get(old.p2().node(), (old.p2(),))[0],
1234 mapping.get(old.p2().node(), (old.p2(),))[0],
1233 ]
1235 ]
1234 new = context.metadataonlyctx(
1236 new = context.metadataonlyctx(
1235 repo,
1237 repo,
1236 old,
1238 old,
1237 parents=parents,
1239 parents=parents,
1238 text=newdesc,
1240 text=newdesc,
1239 user=old.user(),
1241 user=old.user(),
1240 date=old.date(),
1242 date=old.date(),
1241 extra=old.extra(),
1243 extra=old.extra(),
1242 )
1244 )
1243
1245
1244 newnode = new.commit()
1246 newnode = new.commit()
1245
1247
1246 mapping[old.node()] = [newnode]
1248 mapping[old.node()] = [newnode]
1247 # Update diff property
1249 # Update diff property
1248 # If it fails just warn and keep going, otherwise the DREV
1250 # If it fails just warn and keep going, otherwise the DREV
1249 # associations will be lost
1251 # associations will be lost
1250 try:
1252 try:
1251 writediffproperties(unfi[newnode], diffmap[old.node()])
1253 writediffproperties(unfi[newnode], diffmap[old.node()])
1252 except util.urlerr.urlerror:
1254 except util.urlerr.urlerror:
1253 ui.warnnoi18n(
1255 ui.warnnoi18n(
1254 b'Failed to update metadata for D%d\n' % drevid
1256 b'Failed to update metadata for D%d\n' % drevid
1255 )
1257 )
1256 # Remove local tags since it's no longer necessary
1258 # Remove local tags since it's no longer necessary
1257 tagname = b'D%d' % drevid
1259 tagname = b'D%d' % drevid
1258 if tagname in repo.tags():
1260 if tagname in repo.tags():
1259 tags.tag(
1261 tags.tag(
1260 repo,
1262 repo,
1261 tagname,
1263 tagname,
1262 nullid,
1264 nullid,
1263 message=None,
1265 message=None,
1264 user=None,
1266 user=None,
1265 date=None,
1267 date=None,
1266 local=True,
1268 local=True,
1267 )
1269 )
1268 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1270 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1269 if wnode in mapping:
1271 if wnode in mapping:
1270 unfi.setparents(mapping[wnode][0])
1272 unfi.setparents(mapping[wnode][0])
1271
1273
1272
1274
1273 # Map from "hg:meta" keys to header understood by "hg import". The order is
1275 # Map from "hg:meta" keys to header understood by "hg import". The order is
1274 # consistent with "hg export" output.
1276 # consistent with "hg export" output.
1275 _metanamemap = util.sortdict(
1277 _metanamemap = util.sortdict(
1276 [
1278 [
1277 (b'user', b'User'),
1279 (b'user', b'User'),
1278 (b'date', b'Date'),
1280 (b'date', b'Date'),
1279 (b'branch', b'Branch'),
1281 (b'branch', b'Branch'),
1280 (b'node', b'Node ID'),
1282 (b'node', b'Node ID'),
1281 (b'parent', b'Parent '),
1283 (b'parent', b'Parent '),
1282 ]
1284 ]
1283 )
1285 )
1284
1286
1285
1287
1286 def _confirmbeforesend(repo, revs, oldmap):
1288 def _confirmbeforesend(repo, revs, oldmap):
1287 url, token = readurltoken(repo.ui)
1289 url, token = readurltoken(repo.ui)
1288 ui = repo.ui
1290 ui = repo.ui
1289 for rev in revs:
1291 for rev in revs:
1290 ctx = repo[rev]
1292 ctx = repo[rev]
1291 desc = ctx.description().splitlines()[0]
1293 desc = ctx.description().splitlines()[0]
1292 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1294 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1293 if drevid:
1295 if drevid:
1294 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1296 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1295 else:
1297 else:
1296 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1298 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1297
1299
1298 ui.write(
1300 ui.write(
1299 _(b'%s - %s: %s\n')
1301 _(b'%s - %s: %s\n')
1300 % (
1302 % (
1301 drevdesc,
1303 drevdesc,
1302 ui.label(bytes(ctx), b'phabricator.node'),
1304 ui.label(bytes(ctx), b'phabricator.node'),
1303 ui.label(desc, b'phabricator.desc'),
1305 ui.label(desc, b'phabricator.desc'),
1304 )
1306 )
1305 )
1307 )
1306
1308
1307 if ui.promptchoice(
1309 if ui.promptchoice(
1308 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1310 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1309 ):
1311 ):
1310 return False
1312 return False
1311
1313
1312 return True
1314 return True
1313
1315
1314
1316
1315 _knownstatusnames = {
1317 _knownstatusnames = {
1316 b'accepted',
1318 b'accepted',
1317 b'needsreview',
1319 b'needsreview',
1318 b'needsrevision',
1320 b'needsrevision',
1319 b'closed',
1321 b'closed',
1320 b'abandoned',
1322 b'abandoned',
1321 b'changesplanned',
1323 b'changesplanned',
1322 }
1324 }
1323
1325
1324
1326
1325 def _getstatusname(drev):
1327 def _getstatusname(drev):
1326 """get normalized status name from a Differential Revision"""
1328 """get normalized status name from a Differential Revision"""
1327 return drev[b'statusName'].replace(b' ', b'').lower()
1329 return drev[b'statusName'].replace(b' ', b'').lower()
1328
1330
1329
1331
1330 # Small language to specify differential revisions. Support symbols: (), :X,
1332 # Small language to specify differential revisions. Support symbols: (), :X,
1331 # +, and -.
1333 # +, and -.
1332
1334
1333 _elements = {
1335 _elements = {
1334 # token-type: binding-strength, primary, prefix, infix, suffix
1336 # token-type: binding-strength, primary, prefix, infix, suffix
1335 b'(': (12, None, (b'group', 1, b')'), None, None),
1337 b'(': (12, None, (b'group', 1, b')'), None, None),
1336 b':': (8, None, (b'ancestors', 8), None, None),
1338 b':': (8, None, (b'ancestors', 8), None, None),
1337 b'&': (5, None, None, (b'and_', 5), None),
1339 b'&': (5, None, None, (b'and_', 5), None),
1338 b'+': (4, None, None, (b'add', 4), None),
1340 b'+': (4, None, None, (b'add', 4), None),
1339 b'-': (4, None, None, (b'sub', 4), None),
1341 b'-': (4, None, None, (b'sub', 4), None),
1340 b')': (0, None, None, None, None),
1342 b')': (0, None, None, None, None),
1341 b'symbol': (0, b'symbol', None, None, None),
1343 b'symbol': (0, b'symbol', None, None, None),
1342 b'end': (0, None, None, None, None),
1344 b'end': (0, None, None, None, None),
1343 }
1345 }
1344
1346
1345
1347
1346 def _tokenize(text):
1348 def _tokenize(text):
1347 view = memoryview(text) # zero-copy slice
1349 view = memoryview(text) # zero-copy slice
1348 special = b'():+-& '
1350 special = b'():+-& '
1349 pos = 0
1351 pos = 0
1350 length = len(text)
1352 length = len(text)
1351 while pos < length:
1353 while pos < length:
1352 symbol = b''.join(
1354 symbol = b''.join(
1353 itertools.takewhile(
1355 itertools.takewhile(
1354 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1356 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1355 )
1357 )
1356 )
1358 )
1357 if symbol:
1359 if symbol:
1358 yield (b'symbol', symbol, pos)
1360 yield (b'symbol', symbol, pos)
1359 pos += len(symbol)
1361 pos += len(symbol)
1360 else: # special char, ignore space
1362 else: # special char, ignore space
1361 if text[pos : pos + 1] != b' ':
1363 if text[pos : pos + 1] != b' ':
1362 yield (text[pos : pos + 1], None, pos)
1364 yield (text[pos : pos + 1], None, pos)
1363 pos += 1
1365 pos += 1
1364 yield (b'end', None, pos)
1366 yield (b'end', None, pos)
1365
1367
1366
1368
1367 def _parse(text):
1369 def _parse(text):
1368 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1370 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1369 if pos != len(text):
1371 if pos != len(text):
1370 raise error.ParseError(b'invalid token', pos)
1372 raise error.ParseError(b'invalid token', pos)
1371 return tree
1373 return tree
1372
1374
1373
1375
1374 def _parsedrev(symbol):
1376 def _parsedrev(symbol):
1375 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1377 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1376 if symbol.startswith(b'D') and symbol[1:].isdigit():
1378 if symbol.startswith(b'D') and symbol[1:].isdigit():
1377 return int(symbol[1:])
1379 return int(symbol[1:])
1378 if symbol.isdigit():
1380 if symbol.isdigit():
1379 return int(symbol)
1381 return int(symbol)
1380
1382
1381
1383
1382 def _prefetchdrevs(tree):
1384 def _prefetchdrevs(tree):
1383 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1385 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1384 drevs = set()
1386 drevs = set()
1385 ancestordrevs = set()
1387 ancestordrevs = set()
1386 op = tree[0]
1388 op = tree[0]
1387 if op == b'symbol':
1389 if op == b'symbol':
1388 r = _parsedrev(tree[1])
1390 r = _parsedrev(tree[1])
1389 if r:
1391 if r:
1390 drevs.add(r)
1392 drevs.add(r)
1391 elif op == b'ancestors':
1393 elif op == b'ancestors':
1392 r, a = _prefetchdrevs(tree[1])
1394 r, a = _prefetchdrevs(tree[1])
1393 drevs.update(r)
1395 drevs.update(r)
1394 ancestordrevs.update(r)
1396 ancestordrevs.update(r)
1395 ancestordrevs.update(a)
1397 ancestordrevs.update(a)
1396 else:
1398 else:
1397 for t in tree[1:]:
1399 for t in tree[1:]:
1398 r, a = _prefetchdrevs(t)
1400 r, a = _prefetchdrevs(t)
1399 drevs.update(r)
1401 drevs.update(r)
1400 ancestordrevs.update(a)
1402 ancestordrevs.update(a)
1401 return drevs, ancestordrevs
1403 return drevs, ancestordrevs
1402
1404
1403
1405
1404 def querydrev(ui, spec):
1406 def querydrev(ui, spec):
1405 """return a list of "Differential Revision" dicts
1407 """return a list of "Differential Revision" dicts
1406
1408
1407 spec is a string using a simple query language, see docstring in phabread
1409 spec is a string using a simple query language, see docstring in phabread
1408 for details.
1410 for details.
1409
1411
1410 A "Differential Revision dict" looks like:
1412 A "Differential Revision dict" looks like:
1411
1413
1412 {
1414 {
1413 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1415 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1414 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1416 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1415 "auxiliary": {
1417 "auxiliary": {
1416 "phabricator:depends-on": [
1418 "phabricator:depends-on": [
1417 "PHID-DREV-gbapp366kutjebt7agcd"
1419 "PHID-DREV-gbapp366kutjebt7agcd"
1418 ]
1420 ]
1419 "phabricator:projects": [],
1421 "phabricator:projects": [],
1420 },
1422 },
1421 "branch": "default",
1423 "branch": "default",
1422 "ccs": [],
1424 "ccs": [],
1423 "commits": [],
1425 "commits": [],
1424 "dateCreated": "1499181406",
1426 "dateCreated": "1499181406",
1425 "dateModified": "1499182103",
1427 "dateModified": "1499182103",
1426 "diffs": [
1428 "diffs": [
1427 "3",
1429 "3",
1428 "4",
1430 "4",
1429 ],
1431 ],
1430 "hashes": [],
1432 "hashes": [],
1431 "id": "2",
1433 "id": "2",
1432 "lineCount": "2",
1434 "lineCount": "2",
1433 "phid": "PHID-DREV-672qvysjcczopag46qty",
1435 "phid": "PHID-DREV-672qvysjcczopag46qty",
1434 "properties": {},
1436 "properties": {},
1435 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1437 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1436 "reviewers": [],
1438 "reviewers": [],
1437 "sourcePath": null
1439 "sourcePath": null
1438 "status": "0",
1440 "status": "0",
1439 "statusName": "Needs Review",
1441 "statusName": "Needs Review",
1440 "summary": "",
1442 "summary": "",
1441 "testPlan": "",
1443 "testPlan": "",
1442 "title": "example",
1444 "title": "example",
1443 "uri": "https://phab.example.com/D2",
1445 "uri": "https://phab.example.com/D2",
1444 }
1446 }
1445 """
1447 """
1446 # TODO: replace differential.query and differential.querydiffs with
1448 # TODO: replace differential.query and differential.querydiffs with
1447 # differential.diff.search because the former (and their output) are
1449 # differential.diff.search because the former (and their output) are
1448 # frozen, and planned to be deprecated and removed.
1450 # frozen, and planned to be deprecated and removed.
1449
1451
1450 def fetch(params):
1452 def fetch(params):
1451 """params -> single drev or None"""
1453 """params -> single drev or None"""
1452 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1454 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1453 if key in prefetched:
1455 if key in prefetched:
1454 return prefetched[key]
1456 return prefetched[key]
1455 drevs = callconduit(ui, b'differential.query', params)
1457 drevs = callconduit(ui, b'differential.query', params)
1456 # Fill prefetched with the result
1458 # Fill prefetched with the result
1457 for drev in drevs:
1459 for drev in drevs:
1458 prefetched[drev[b'phid']] = drev
1460 prefetched[drev[b'phid']] = drev
1459 prefetched[int(drev[b'id'])] = drev
1461 prefetched[int(drev[b'id'])] = drev
1460 if key not in prefetched:
1462 if key not in prefetched:
1461 raise error.Abort(
1463 raise error.Abort(
1462 _(b'cannot get Differential Revision %r') % params
1464 _(b'cannot get Differential Revision %r') % params
1463 )
1465 )
1464 return prefetched[key]
1466 return prefetched[key]
1465
1467
1466 def getstack(topdrevids):
1468 def getstack(topdrevids):
1467 """given a top, get a stack from the bottom, [id] -> [id]"""
1469 """given a top, get a stack from the bottom, [id] -> [id]"""
1468 visited = set()
1470 visited = set()
1469 result = []
1471 result = []
1470 queue = [{b'ids': [i]} for i in topdrevids]
1472 queue = [{b'ids': [i]} for i in topdrevids]
1471 while queue:
1473 while queue:
1472 params = queue.pop()
1474 params = queue.pop()
1473 drev = fetch(params)
1475 drev = fetch(params)
1474 if drev[b'id'] in visited:
1476 if drev[b'id'] in visited:
1475 continue
1477 continue
1476 visited.add(drev[b'id'])
1478 visited.add(drev[b'id'])
1477 result.append(int(drev[b'id']))
1479 result.append(int(drev[b'id']))
1478 auxiliary = drev.get(b'auxiliary', {})
1480 auxiliary = drev.get(b'auxiliary', {})
1479 depends = auxiliary.get(b'phabricator:depends-on', [])
1481 depends = auxiliary.get(b'phabricator:depends-on', [])
1480 for phid in depends:
1482 for phid in depends:
1481 queue.append({b'phids': [phid]})
1483 queue.append({b'phids': [phid]})
1482 result.reverse()
1484 result.reverse()
1483 return smartset.baseset(result)
1485 return smartset.baseset(result)
1484
1486
1485 # Initialize prefetch cache
1487 # Initialize prefetch cache
1486 prefetched = {} # {id or phid: drev}
1488 prefetched = {} # {id or phid: drev}
1487
1489
1488 tree = _parse(spec)
1490 tree = _parse(spec)
1489 drevs, ancestordrevs = _prefetchdrevs(tree)
1491 drevs, ancestordrevs = _prefetchdrevs(tree)
1490
1492
1491 # developer config: phabricator.batchsize
1493 # developer config: phabricator.batchsize
1492 batchsize = ui.configint(b'phabricator', b'batchsize')
1494 batchsize = ui.configint(b'phabricator', b'batchsize')
1493
1495
1494 # Prefetch Differential Revisions in batch
1496 # Prefetch Differential Revisions in batch
1495 tofetch = set(drevs)
1497 tofetch = set(drevs)
1496 for r in ancestordrevs:
1498 for r in ancestordrevs:
1497 tofetch.update(range(max(1, r - batchsize), r + 1))
1499 tofetch.update(range(max(1, r - batchsize), r + 1))
1498 if drevs:
1500 if drevs:
1499 fetch({b'ids': list(tofetch)})
1501 fetch({b'ids': list(tofetch)})
1500 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1502 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1501
1503
1502 # Walk through the tree, return smartsets
1504 # Walk through the tree, return smartsets
1503 def walk(tree):
1505 def walk(tree):
1504 op = tree[0]
1506 op = tree[0]
1505 if op == b'symbol':
1507 if op == b'symbol':
1506 drev = _parsedrev(tree[1])
1508 drev = _parsedrev(tree[1])
1507 if drev:
1509 if drev:
1508 return smartset.baseset([drev])
1510 return smartset.baseset([drev])
1509 elif tree[1] in _knownstatusnames:
1511 elif tree[1] in _knownstatusnames:
1510 drevs = [
1512 drevs = [
1511 r
1513 r
1512 for r in validids
1514 for r in validids
1513 if _getstatusname(prefetched[r]) == tree[1]
1515 if _getstatusname(prefetched[r]) == tree[1]
1514 ]
1516 ]
1515 return smartset.baseset(drevs)
1517 return smartset.baseset(drevs)
1516 else:
1518 else:
1517 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1519 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1518 elif op in {b'and_', b'add', b'sub'}:
1520 elif op in {b'and_', b'add', b'sub'}:
1519 assert len(tree) == 3
1521 assert len(tree) == 3
1520 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1522 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1521 elif op == b'group':
1523 elif op == b'group':
1522 return walk(tree[1])
1524 return walk(tree[1])
1523 elif op == b'ancestors':
1525 elif op == b'ancestors':
1524 return getstack(walk(tree[1]))
1526 return getstack(walk(tree[1]))
1525 else:
1527 else:
1526 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1528 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1527
1529
1528 return [prefetched[r] for r in walk(tree)]
1530 return [prefetched[r] for r in walk(tree)]
1529
1531
1530
1532
1531 def getdescfromdrev(drev):
1533 def getdescfromdrev(drev):
1532 """get description (commit message) from "Differential Revision"
1534 """get description (commit message) from "Differential Revision"
1533
1535
1534 This is similar to differential.getcommitmessage API. But we only care
1536 This is similar to differential.getcommitmessage API. But we only care
1535 about limited fields: title, summary, test plan, and URL.
1537 about limited fields: title, summary, test plan, and URL.
1536 """
1538 """
1537 title = drev[b'title']
1539 title = drev[b'title']
1538 summary = drev[b'summary'].rstrip()
1540 summary = drev[b'summary'].rstrip()
1539 testplan = drev[b'testPlan'].rstrip()
1541 testplan = drev[b'testPlan'].rstrip()
1540 if testplan:
1542 if testplan:
1541 testplan = b'Test Plan:\n%s' % testplan
1543 testplan = b'Test Plan:\n%s' % testplan
1542 uri = b'Differential Revision: %s' % drev[b'uri']
1544 uri = b'Differential Revision: %s' % drev[b'uri']
1543 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1545 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1544
1546
1545
1547
1546 def getdiffmeta(diff):
1548 def getdiffmeta(diff):
1547 """get commit metadata (date, node, user, p1) from a diff object
1549 """get commit metadata (date, node, user, p1) from a diff object
1548
1550
1549 The metadata could be "hg:meta", sent by phabsend, like:
1551 The metadata could be "hg:meta", sent by phabsend, like:
1550
1552
1551 "properties": {
1553 "properties": {
1552 "hg:meta": {
1554 "hg:meta": {
1553 "branch": "default",
1555 "branch": "default",
1554 "date": "1499571514 25200",
1556 "date": "1499571514 25200",
1555 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1557 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1556 "user": "Foo Bar <foo@example.com>",
1558 "user": "Foo Bar <foo@example.com>",
1557 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1559 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1558 }
1560 }
1559 }
1561 }
1560
1562
1561 Or converted from "local:commits", sent by "arc", like:
1563 Or converted from "local:commits", sent by "arc", like:
1562
1564
1563 "properties": {
1565 "properties": {
1564 "local:commits": {
1566 "local:commits": {
1565 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1567 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1566 "author": "Foo Bar",
1568 "author": "Foo Bar",
1567 "authorEmail": "foo@example.com"
1569 "authorEmail": "foo@example.com"
1568 "branch": "default",
1570 "branch": "default",
1569 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1571 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 "local": "1000",
1572 "local": "1000",
1571 "message": "...",
1573 "message": "...",
1572 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1574 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1573 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1575 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1574 "summary": "...",
1576 "summary": "...",
1575 "tag": "",
1577 "tag": "",
1576 "time": 1499546314,
1578 "time": 1499546314,
1577 }
1579 }
1578 }
1580 }
1579 }
1581 }
1580
1582
1581 Note: metadata extracted from "local:commits" will lose time zone
1583 Note: metadata extracted from "local:commits" will lose time zone
1582 information.
1584 information.
1583 """
1585 """
1584 props = diff.get(b'properties') or {}
1586 props = diff.get(b'properties') or {}
1585 meta = props.get(b'hg:meta')
1587 meta = props.get(b'hg:meta')
1586 if not meta:
1588 if not meta:
1587 if props.get(b'local:commits'):
1589 if props.get(b'local:commits'):
1588 commit = sorted(props[b'local:commits'].values())[0]
1590 commit = sorted(props[b'local:commits'].values())[0]
1589 meta = {}
1591 meta = {}
1590 if b'author' in commit and b'authorEmail' in commit:
1592 if b'author' in commit and b'authorEmail' in commit:
1591 meta[b'user'] = b'%s <%s>' % (
1593 meta[b'user'] = b'%s <%s>' % (
1592 commit[b'author'],
1594 commit[b'author'],
1593 commit[b'authorEmail'],
1595 commit[b'authorEmail'],
1594 )
1596 )
1595 if b'time' in commit:
1597 if b'time' in commit:
1596 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1598 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1597 if b'branch' in commit:
1599 if b'branch' in commit:
1598 meta[b'branch'] = commit[b'branch']
1600 meta[b'branch'] = commit[b'branch']
1599 node = commit.get(b'commit', commit.get(b'rev'))
1601 node = commit.get(b'commit', commit.get(b'rev'))
1600 if node:
1602 if node:
1601 meta[b'node'] = node
1603 meta[b'node'] = node
1602 if len(commit.get(b'parents', ())) >= 1:
1604 if len(commit.get(b'parents', ())) >= 1:
1603 meta[b'parent'] = commit[b'parents'][0]
1605 meta[b'parent'] = commit[b'parents'][0]
1604 else:
1606 else:
1605 meta = {}
1607 meta = {}
1606 if b'date' not in meta and b'dateCreated' in diff:
1608 if b'date' not in meta and b'dateCreated' in diff:
1607 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1609 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1608 if b'branch' not in meta and diff.get(b'branch'):
1610 if b'branch' not in meta and diff.get(b'branch'):
1609 meta[b'branch'] = diff[b'branch']
1611 meta[b'branch'] = diff[b'branch']
1610 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1612 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1611 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1613 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1612 return meta
1614 return meta
1613
1615
1614
1616
1615 def readpatch(ui, drevs, write):
1617 def readpatch(ui, drevs, write):
1616 """generate plain-text patch readable by 'hg import'
1618 """generate plain-text patch readable by 'hg import'
1617
1619
1618 write takes a list of (DREV, bytes), where DREV is the differential number
1620 write takes a list of (DREV, bytes), where DREV is the differential number
1619 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1621 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1620 to be imported. drevs is what "querydrev" returns, results of
1622 to be imported. drevs is what "querydrev" returns, results of
1621 "differential.query".
1623 "differential.query".
1622 """
1624 """
1623 # Prefetch hg:meta property for all diffs
1625 # Prefetch hg:meta property for all diffs
1624 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1626 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1625 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1627 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1626
1628
1627 patches = []
1629 patches = []
1628
1630
1629 # Generate patch for each drev
1631 # Generate patch for each drev
1630 for drev in drevs:
1632 for drev in drevs:
1631 ui.note(_(b'reading D%s\n') % drev[b'id'])
1633 ui.note(_(b'reading D%s\n') % drev[b'id'])
1632
1634
1633 diffid = max(int(v) for v in drev[b'diffs'])
1635 diffid = max(int(v) for v in drev[b'diffs'])
1634 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1636 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1635 desc = getdescfromdrev(drev)
1637 desc = getdescfromdrev(drev)
1636 header = b'# HG changeset patch\n'
1638 header = b'# HG changeset patch\n'
1637
1639
1638 # Try to preserve metadata from hg:meta property. Write hg patch
1640 # Try to preserve metadata from hg:meta property. Write hg patch
1639 # headers that can be read by the "import" command. See patchheadermap
1641 # headers that can be read by the "import" command. See patchheadermap
1640 # and extract in mercurial/patch.py for supported headers.
1642 # and extract in mercurial/patch.py for supported headers.
1641 meta = getdiffmeta(diffs[b'%d' % diffid])
1643 meta = getdiffmeta(diffs[b'%d' % diffid])
1642 for k in _metanamemap.keys():
1644 for k in _metanamemap.keys():
1643 if k in meta:
1645 if k in meta:
1644 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1646 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1645
1647
1646 content = b'%s%s\n%s' % (header, desc, body)
1648 content = b'%s%s\n%s' % (header, desc, body)
1647 patches.append((drev[b'id'], content))
1649 patches.append((drev[b'id'], content))
1648
1650
1649 # Write patches to the supplied callback
1651 # Write patches to the supplied callback
1650 write(patches)
1652 write(patches)
1651
1653
1652
1654
1653 @vcrcommand(
1655 @vcrcommand(
1654 b'phabread',
1656 b'phabread',
1655 [(b'', b'stack', False, _(b'read dependencies'))],
1657 [(b'', b'stack', False, _(b'read dependencies'))],
1656 _(b'DREVSPEC [OPTIONS]'),
1658 _(b'DREVSPEC [OPTIONS]'),
1657 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1659 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1658 optionalrepo=True,
1660 optionalrepo=True,
1659 )
1661 )
1660 def phabread(ui, repo, spec, **opts):
1662 def phabread(ui, repo, spec, **opts):
1661 """print patches from Phabricator suitable for importing
1663 """print patches from Phabricator suitable for importing
1662
1664
1663 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1665 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1664 the number ``123``. It could also have common operators like ``+``, ``-``,
1666 the number ``123``. It could also have common operators like ``+``, ``-``,
1665 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1667 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1666 select a stack.
1668 select a stack.
1667
1669
1668 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1670 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1669 could be used to filter patches by status. For performance reason, they
1671 could be used to filter patches by status. For performance reason, they
1670 only represent a subset of non-status selections and cannot be used alone.
1672 only represent a subset of non-status selections and cannot be used alone.
1671
1673
1672 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1674 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1673 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1675 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1674 stack up to D9.
1676 stack up to D9.
1675
1677
1676 If --stack is given, follow dependencies information and read all patches.
1678 If --stack is given, follow dependencies information and read all patches.
1677 It is equivalent to the ``:`` operator.
1679 It is equivalent to the ``:`` operator.
1678 """
1680 """
1679 opts = pycompat.byteskwargs(opts)
1681 opts = pycompat.byteskwargs(opts)
1680 if opts.get(b'stack'):
1682 if opts.get(b'stack'):
1681 spec = b':(%s)' % spec
1683 spec = b':(%s)' % spec
1682 drevs = querydrev(ui, spec)
1684 drevs = querydrev(ui, spec)
1683
1685
1684 def _write(patches):
1686 def _write(patches):
1685 for drev, content in patches:
1687 for drev, content in patches:
1686 ui.write(content)
1688 ui.write(content)
1687
1689
1688 readpatch(ui, drevs, _write)
1690 readpatch(ui, drevs, _write)
1689
1691
1690
1692
1691 @vcrcommand(
1693 @vcrcommand(
1692 b'phabupdate',
1694 b'phabupdate',
1693 [
1695 [
1694 (b'', b'accept', False, _(b'accept revisions')),
1696 (b'', b'accept', False, _(b'accept revisions')),
1695 (b'', b'reject', False, _(b'reject revisions')),
1697 (b'', b'reject', False, _(b'reject revisions')),
1696 (b'', b'abandon', False, _(b'abandon revisions')),
1698 (b'', b'abandon', False, _(b'abandon revisions')),
1697 (b'', b'reclaim', False, _(b'reclaim revisions')),
1699 (b'', b'reclaim', False, _(b'reclaim revisions')),
1698 (b'm', b'comment', b'', _(b'comment on the last revision')),
1700 (b'm', b'comment', b'', _(b'comment on the last revision')),
1699 ],
1701 ],
1700 _(b'DREVSPEC [OPTIONS]'),
1702 _(b'DREVSPEC [OPTIONS]'),
1701 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1703 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1702 optionalrepo=True,
1704 optionalrepo=True,
1703 )
1705 )
1704 def phabupdate(ui, repo, spec, **opts):
1706 def phabupdate(ui, repo, spec, **opts):
1705 """update Differential Revision in batch
1707 """update Differential Revision in batch
1706
1708
1707 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1709 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1708 """
1710 """
1709 opts = pycompat.byteskwargs(opts)
1711 opts = pycompat.byteskwargs(opts)
1710 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1712 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1711 if len(flags) > 1:
1713 if len(flags) > 1:
1712 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1714 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1713
1715
1714 actions = []
1716 actions = []
1715 for f in flags:
1717 for f in flags:
1716 actions.append({b'type': f, b'value': True})
1718 actions.append({b'type': f, b'value': True})
1717
1719
1718 drevs = querydrev(ui, spec)
1720 drevs = querydrev(ui, spec)
1719 for i, drev in enumerate(drevs):
1721 for i, drev in enumerate(drevs):
1720 if i + 1 == len(drevs) and opts.get(b'comment'):
1722 if i + 1 == len(drevs) and opts.get(b'comment'):
1721 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1723 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1722 if actions:
1724 if actions:
1723 params = {
1725 params = {
1724 b'objectIdentifier': drev[b'phid'],
1726 b'objectIdentifier': drev[b'phid'],
1725 b'transactions': actions,
1727 b'transactions': actions,
1726 }
1728 }
1727 callconduit(ui, b'differential.revision.edit', params)
1729 callconduit(ui, b'differential.revision.edit', params)
1728
1730
1729
1731
1730 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1732 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1731 def template_review(context, mapping):
1733 def template_review(context, mapping):
1732 """:phabreview: Object describing the review for this changeset.
1734 """:phabreview: Object describing the review for this changeset.
1733 Has attributes `url` and `id`.
1735 Has attributes `url` and `id`.
1734 """
1736 """
1735 ctx = context.resource(mapping, b'ctx')
1737 ctx = context.resource(mapping, b'ctx')
1736 m = _differentialrevisiondescre.search(ctx.description())
1738 m = _differentialrevisiondescre.search(ctx.description())
1737 if m:
1739 if m:
1738 return templateutil.hybriddict(
1740 return templateutil.hybriddict(
1739 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1741 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1740 )
1742 )
1741 else:
1743 else:
1742 tags = ctx.repo().nodetags(ctx.node())
1744 tags = ctx.repo().nodetags(ctx.node())
1743 for t in tags:
1745 for t in tags:
1744 if _differentialrevisiontagre.match(t):
1746 if _differentialrevisiontagre.match(t):
1745 url = ctx.repo().ui.config(b'phabricator', b'url')
1747 url = ctx.repo().ui.config(b'phabricator', b'url')
1746 if not url.endswith(b'/'):
1748 if not url.endswith(b'/'):
1747 url += b'/'
1749 url += b'/'
1748 url += t
1750 url += t
1749
1751
1750 return templateutil.hybriddict({b'url': url, b'id': t,})
1752 return templateutil.hybriddict({b'url': url, b'id': t,})
1751 return None
1753 return None
1752
1754
1753
1755
1754 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1756 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1755 def template_status(context, mapping):
1757 def template_status(context, mapping):
1756 """:phabstatus: String. Status of Phabricator differential.
1758 """:phabstatus: String. Status of Phabricator differential.
1757 """
1759 """
1758 ctx = context.resource(mapping, b'ctx')
1760 ctx = context.resource(mapping, b'ctx')
1759 repo = context.resource(mapping, b'repo')
1761 repo = context.resource(mapping, b'repo')
1760 ui = context.resource(mapping, b'ui')
1762 ui = context.resource(mapping, b'ui')
1761
1763
1762 rev = ctx.rev()
1764 rev = ctx.rev()
1763 try:
1765 try:
1764 drevid = getdrevmap(repo, [rev])[rev]
1766 drevid = getdrevmap(repo, [rev])[rev]
1765 except KeyError:
1767 except KeyError:
1766 return None
1768 return None
1767 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1769 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1768 for drev in drevs:
1770 for drev in drevs:
1769 if int(drev[b'id']) == drevid:
1771 if int(drev[b'id']) == drevid:
1770 return templateutil.hybriddict(
1772 return templateutil.hybriddict(
1771 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1773 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1772 )
1774 )
1773 return None
1775 return None
1774
1776
1775
1777
1776 @show.showview(b'phabstatus', csettopic=b'work')
1778 @show.showview(b'phabstatus', csettopic=b'work')
1777 def phabstatusshowview(ui, repo, displayer):
1779 def phabstatusshowview(ui, repo, displayer):
1778 """Phabricator differiential status"""
1780 """Phabricator differiential status"""
1779 revs = repo.revs('sort(_underway(), topo)')
1781 revs = repo.revs('sort(_underway(), topo)')
1780 drevmap = getdrevmap(repo, revs)
1782 drevmap = getdrevmap(repo, revs)
1781 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1783 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1782 for rev, drevid in pycompat.iteritems(drevmap):
1784 for rev, drevid in pycompat.iteritems(drevmap):
1783 if drevid is not None:
1785 if drevid is not None:
1784 drevids.add(drevid)
1786 drevids.add(drevid)
1785 revsbydrevid.setdefault(drevid, set([])).add(rev)
1787 revsbydrevid.setdefault(drevid, set([])).add(rev)
1786 else:
1788 else:
1787 unknownrevs.append(rev)
1789 unknownrevs.append(rev)
1788
1790
1789 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1791 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1790 drevsbyrev = {}
1792 drevsbyrev = {}
1791 for drev in drevs:
1793 for drev in drevs:
1792 for rev in revsbydrevid[int(drev[b'id'])]:
1794 for rev in revsbydrevid[int(drev[b'id'])]:
1793 drevsbyrev[rev] = drev
1795 drevsbyrev[rev] = drev
1794
1796
1795 def phabstatus(ctx):
1797 def phabstatus(ctx):
1796 drev = drevsbyrev[ctx.rev()]
1798 drev = drevsbyrev[ctx.rev()]
1797 status = ui.label(
1799 status = ui.label(
1798 b'%(statusName)s' % drev,
1800 b'%(statusName)s' % drev,
1799 b'phabricator.status.%s' % _getstatusname(drev),
1801 b'phabricator.status.%s' % _getstatusname(drev),
1800 )
1802 )
1801 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1803 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1802
1804
1803 revs -= smartset.baseset(unknownrevs)
1805 revs -= smartset.baseset(unknownrevs)
1804 revdag = graphmod.dagwalker(repo, revs)
1806 revdag = graphmod.dagwalker(repo, revs)
1805
1807
1806 ui.setconfig(b'experimental', b'graphshorten', True)
1808 ui.setconfig(b'experimental', b'graphshorten', True)
1807 displayer._exthook = phabstatus
1809 displayer._exthook = phabstatus
1808 nodelen = show.longestshortest(repo, revs)
1810 nodelen = show.longestshortest(repo, revs)
1809 logcmdutil.displaygraph(
1811 logcmdutil.displaygraph(
1810 ui,
1812 ui,
1811 repo,
1813 repo,
1812 revdag,
1814 revdag,
1813 displayer,
1815 displayer,
1814 graphmod.asciiedges,
1816 graphmod.asciiedges,
1815 props={b'nodelen': nodelen},
1817 props={b'nodelen': nodelen},
1816 )
1818 )
General Comments 0
You need to be logged in to leave comments. Login now