##// END OF EJS Templates
phabricator: also check parent fctx for binary where it is checked for UTF-8...
Matt Harbison -
r44914:4ce2330f default
parent child Browse files
Show More
@@ -1,1819 +1,1828 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127
127
128 colortable = {
128 colortable = {
129 b'phabricator.action.created': b'green',
129 b'phabricator.action.created': b'green',
130 b'phabricator.action.skipped': b'magenta',
130 b'phabricator.action.skipped': b'magenta',
131 b'phabricator.action.updated': b'magenta',
131 b'phabricator.action.updated': b'magenta',
132 b'phabricator.desc': b'',
132 b'phabricator.desc': b'',
133 b'phabricator.drev': b'bold',
133 b'phabricator.drev': b'bold',
134 b'phabricator.node': b'',
134 b'phabricator.node': b'',
135 b'phabricator.status.abandoned': b'magenta dim',
135 b'phabricator.status.abandoned': b'magenta dim',
136 b'phabricator.status.accepted': b'green bold',
136 b'phabricator.status.accepted': b'green bold',
137 b'phabricator.status.closed': b'green',
137 b'phabricator.status.closed': b'green',
138 b'phabricator.status.needsreview': b'yellow',
138 b'phabricator.status.needsreview': b'yellow',
139 b'phabricator.status.needsrevision': b'red',
139 b'phabricator.status.needsrevision': b'red',
140 b'phabricator.status.changesplanned': b'red',
140 b'phabricator.status.changesplanned': b'red',
141 }
141 }
142
142
143 _VCR_FLAGS = [
143 _VCR_FLAGS = [
144 (
144 (
145 b'',
145 b'',
146 b'test-vcr',
146 b'test-vcr',
147 b'',
147 b'',
148 _(
148 _(
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 b', otherwise will mock all http requests using the specified vcr file.'
150 b', otherwise will mock all http requests using the specified vcr file.'
151 b' (ADVANCED)'
151 b' (ADVANCED)'
152 ),
152 ),
153 ),
153 ),
154 ]
154 ]
155
155
156
156
157 @eh.wrapfunction(localrepo, "loadhgrc")
157 @eh.wrapfunction(localrepo, "loadhgrc")
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 """Load ``.arcconfig`` content into a ui instance on repository open.
159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 """
160 """
161 result = False
161 result = False
162 arcconfig = {}
162 arcconfig = {}
163
163
164 try:
164 try:
165 # json.loads only accepts bytes from 3.6+
165 # json.loads only accepts bytes from 3.6+
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 # json.loads only returns unicode strings
167 # json.loads only returns unicode strings
168 arcconfig = pycompat.rapply(
168 arcconfig = pycompat.rapply(
169 lambda x: encoding.unitolocal(x)
169 lambda x: encoding.unitolocal(x)
170 if isinstance(x, pycompat.unicode)
170 if isinstance(x, pycompat.unicode)
171 else x,
171 else x,
172 pycompat.json_loads(rawparams),
172 pycompat.json_loads(rawparams),
173 )
173 )
174
174
175 result = True
175 result = True
176 except ValueError:
176 except ValueError:
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 except IOError:
178 except IOError:
179 pass
179 pass
180
180
181 cfg = util.sortdict()
181 cfg = util.sortdict()
182
182
183 if b"repository.callsign" in arcconfig:
183 if b"repository.callsign" in arcconfig:
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185
185
186 if b"phabricator.uri" in arcconfig:
186 if b"phabricator.uri" in arcconfig:
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188
188
189 if cfg:
189 if cfg:
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191
191
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193
193
194
194
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 fullflags = flags + _VCR_FLAGS
196 fullflags = flags + _VCR_FLAGS
197
197
198 def hgmatcher(r1, r2):
198 def hgmatcher(r1, r2):
199 if r1.uri != r2.uri or r1.method != r2.method:
199 if r1.uri != r2.uri or r1.method != r2.method:
200 return False
200 return False
201 r1params = util.urlreq.parseqs(r1.body)
201 r1params = util.urlreq.parseqs(r1.body)
202 r2params = util.urlreq.parseqs(r2.body)
202 r2params = util.urlreq.parseqs(r2.body)
203 for key in r1params:
203 for key in r1params:
204 if key not in r2params:
204 if key not in r2params:
205 return False
205 return False
206 value = r1params[key][0]
206 value = r1params[key][0]
207 # we want to compare json payloads without worrying about ordering
207 # we want to compare json payloads without worrying about ordering
208 if value.startswith(b'{') and value.endswith(b'}'):
208 if value.startswith(b'{') and value.endswith(b'}'):
209 r1json = pycompat.json_loads(value)
209 r1json = pycompat.json_loads(value)
210 r2json = pycompat.json_loads(r2params[key][0])
210 r2json = pycompat.json_loads(r2params[key][0])
211 if r1json != r2json:
211 if r1json != r2json:
212 return False
212 return False
213 elif r2params[key][0] != value:
213 elif r2params[key][0] != value:
214 return False
214 return False
215 return True
215 return True
216
216
217 def sanitiserequest(request):
217 def sanitiserequest(request):
218 request.body = re.sub(
218 request.body = re.sub(
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 )
220 )
221 return request
221 return request
222
222
223 def sanitiseresponse(response):
223 def sanitiseresponse(response):
224 if 'set-cookie' in response['headers']:
224 if 'set-cookie' in response['headers']:
225 del response['headers']['set-cookie']
225 del response['headers']['set-cookie']
226 return response
226 return response
227
227
228 def decorate(fn):
228 def decorate(fn):
229 def inner(*args, **kwargs):
229 def inner(*args, **kwargs):
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 if cassette:
231 if cassette:
232 import hgdemandimport
232 import hgdemandimport
233
233
234 with hgdemandimport.deactivated():
234 with hgdemandimport.deactivated():
235 import vcr as vcrmod
235 import vcr as vcrmod
236 import vcr.stubs as stubs
236 import vcr.stubs as stubs
237
237
238 vcr = vcrmod.VCR(
238 vcr = vcrmod.VCR(
239 serializer='json',
239 serializer='json',
240 before_record_request=sanitiserequest,
240 before_record_request=sanitiserequest,
241 before_record_response=sanitiseresponse,
241 before_record_response=sanitiseresponse,
242 custom_patches=[
242 custom_patches=[
243 (
243 (
244 urlmod,
244 urlmod,
245 'httpconnection',
245 'httpconnection',
246 stubs.VCRHTTPConnection,
246 stubs.VCRHTTPConnection,
247 ),
247 ),
248 (
248 (
249 urlmod,
249 urlmod,
250 'httpsconnection',
250 'httpsconnection',
251 stubs.VCRHTTPSConnection,
251 stubs.VCRHTTPSConnection,
252 ),
252 ),
253 ],
253 ],
254 )
254 )
255 vcr.register_matcher('hgmatcher', hgmatcher)
255 vcr.register_matcher('hgmatcher', hgmatcher)
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 return fn(*args, **kwargs)
257 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
258 return fn(*args, **kwargs)
259
259
260 inner.__name__ = fn.__name__
260 inner.__name__ = fn.__name__
261 inner.__doc__ = fn.__doc__
261 inner.__doc__ = fn.__doc__
262 return command(
262 return command(
263 name,
263 name,
264 fullflags,
264 fullflags,
265 spec,
265 spec,
266 helpcategory=helpcategory,
266 helpcategory=helpcategory,
267 optionalrepo=optionalrepo,
267 optionalrepo=optionalrepo,
268 )(inner)
268 )(inner)
269
269
270 return decorate
270 return decorate
271
271
272
272
273 def urlencodenested(params):
273 def urlencodenested(params):
274 """like urlencode, but works with nested parameters.
274 """like urlencode, but works with nested parameters.
275
275
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 """
279 """
280 flatparams = util.sortdict()
280 flatparams = util.sortdict()
281
281
282 def process(prefix, obj):
282 def process(prefix, obj):
283 if isinstance(obj, bool):
283 if isinstance(obj, bool):
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 if items is None:
287 if items is None:
288 flatparams[prefix] = obj
288 flatparams[prefix] = obj
289 else:
289 else:
290 for k, v in items(obj):
290 for k, v in items(obj):
291 if prefix:
291 if prefix:
292 process(b'%s[%s]' % (prefix, k), v)
292 process(b'%s[%s]' % (prefix, k), v)
293 else:
293 else:
294 process(k, v)
294 process(k, v)
295
295
296 process(b'', params)
296 process(b'', params)
297 return util.urlreq.urlencode(flatparams)
297 return util.urlreq.urlencode(flatparams)
298
298
299
299
300 def readurltoken(ui):
300 def readurltoken(ui):
301 """return conduit url, token and make sure they exist
301 """return conduit url, token and make sure they exist
302
302
303 Currently read from [auth] config section. In the future, it might
303 Currently read from [auth] config section. In the future, it might
304 make sense to read from .arcconfig and .arcrc as well.
304 make sense to read from .arcconfig and .arcrc as well.
305 """
305 """
306 url = ui.config(b'phabricator', b'url')
306 url = ui.config(b'phabricator', b'url')
307 if not url:
307 if not url:
308 raise error.Abort(
308 raise error.Abort(
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 )
310 )
311
311
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 token = None
313 token = None
314
314
315 if res:
315 if res:
316 group, auth = res
316 group, auth = res
317
317
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319
319
320 token = auth.get(b'phabtoken')
320 token = auth.get(b'phabtoken')
321
321
322 if not token:
322 if not token:
323 raise error.Abort(
323 raise error.Abort(
324 _(b'Can\'t find conduit token associated to %s') % (url,)
324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 )
325 )
326
326
327 return url, token
327 return url, token
328
328
329
329
330 def callconduit(ui, name, params):
330 def callconduit(ui, name, params):
331 """call Conduit API, params is a dict. return json.loads result, or None"""
331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 host, token = readurltoken(ui)
332 host, token = readurltoken(ui)
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 params = params.copy()
335 params = params.copy()
336 params[b'__conduit__'] = {
336 params[b'__conduit__'] = {
337 b'token': token,
337 b'token': token,
338 }
338 }
339 rawdata = {
339 rawdata = {
340 b'params': templatefilters.json(params),
340 b'params': templatefilters.json(params),
341 b'output': b'json',
341 b'output': b'json',
342 b'__conduit__': 1,
342 b'__conduit__': 1,
343 }
343 }
344 data = urlencodenested(rawdata)
344 data = urlencodenested(rawdata)
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 if curlcmd:
346 if curlcmd:
347 sin, sout = procutil.popen2(
347 sin, sout = procutil.popen2(
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 )
349 )
350 sin.write(data)
350 sin.write(data)
351 sin.close()
351 sin.close()
352 body = sout.read()
352 body = sout.read()
353 else:
353 else:
354 urlopener = urlmod.opener(ui, authinfo)
354 urlopener = urlmod.opener(ui, authinfo)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 with contextlib.closing(urlopener.open(request)) as rsp:
356 with contextlib.closing(urlopener.open(request)) as rsp:
357 body = rsp.read()
357 body = rsp.read()
358 ui.debug(b'Conduit Response: %s\n' % body)
358 ui.debug(b'Conduit Response: %s\n' % body)
359 parsed = pycompat.rapply(
359 parsed = pycompat.rapply(
360 lambda x: encoding.unitolocal(x)
360 lambda x: encoding.unitolocal(x)
361 if isinstance(x, pycompat.unicode)
361 if isinstance(x, pycompat.unicode)
362 else x,
362 else x,
363 # json.loads only accepts bytes from py3.6+
363 # json.loads only accepts bytes from py3.6+
364 pycompat.json_loads(encoding.unifromlocal(body)),
364 pycompat.json_loads(encoding.unifromlocal(body)),
365 )
365 )
366 if parsed.get(b'error_code'):
366 if parsed.get(b'error_code'):
367 msg = _(b'Conduit Error (%s): %s') % (
367 msg = _(b'Conduit Error (%s): %s') % (
368 parsed[b'error_code'],
368 parsed[b'error_code'],
369 parsed[b'error_info'],
369 parsed[b'error_info'],
370 )
370 )
371 raise error.Abort(msg)
371 raise error.Abort(msg)
372 return parsed[b'result']
372 return parsed[b'result']
373
373
374
374
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 def debugcallconduit(ui, repo, name):
376 def debugcallconduit(ui, repo, name):
377 """call Conduit API
377 """call Conduit API
378
378
379 Call parameters are read from stdin as a JSON blob. Result will be written
379 Call parameters are read from stdin as a JSON blob. Result will be written
380 to stdout as a JSON blob.
380 to stdout as a JSON blob.
381 """
381 """
382 # json.loads only accepts bytes from 3.6+
382 # json.loads only accepts bytes from 3.6+
383 rawparams = encoding.unifromlocal(ui.fin.read())
383 rawparams = encoding.unifromlocal(ui.fin.read())
384 # json.loads only returns unicode strings
384 # json.loads only returns unicode strings
385 params = pycompat.rapply(
385 params = pycompat.rapply(
386 lambda x: encoding.unitolocal(x)
386 lambda x: encoding.unitolocal(x)
387 if isinstance(x, pycompat.unicode)
387 if isinstance(x, pycompat.unicode)
388 else x,
388 else x,
389 pycompat.json_loads(rawparams),
389 pycompat.json_loads(rawparams),
390 )
390 )
391 # json.dumps only accepts unicode strings
391 # json.dumps only accepts unicode strings
392 result = pycompat.rapply(
392 result = pycompat.rapply(
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 callconduit(ui, name, params),
394 callconduit(ui, name, params),
395 )
395 )
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
397 ui.write(b'%s\n' % encoding.unitolocal(s))
398
398
399
399
400 def getrepophid(repo):
400 def getrepophid(repo):
401 """given callsign, return repository PHID or None"""
401 """given callsign, return repository PHID or None"""
402 # developer config: phabricator.repophid
402 # developer config: phabricator.repophid
403 repophid = repo.ui.config(b'phabricator', b'repophid')
403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 if repophid:
404 if repophid:
405 return repophid
405 return repophid
406 callsign = repo.ui.config(b'phabricator', b'callsign')
406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 if not callsign:
407 if not callsign:
408 return None
408 return None
409 query = callconduit(
409 query = callconduit(
410 repo.ui,
410 repo.ui,
411 b'diffusion.repository.search',
411 b'diffusion.repository.search',
412 {b'constraints': {b'callsigns': [callsign]}},
412 {b'constraints': {b'callsigns': [callsign]}},
413 )
413 )
414 if len(query[b'data']) == 0:
414 if len(query[b'data']) == 0:
415 return None
415 return None
416 repophid = query[b'data'][0][b'phid']
416 repophid = query[b'data'][0][b'phid']
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 return repophid
418 return repophid
419
419
420
420
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 _differentialrevisiondescre = re.compile(
422 _differentialrevisiondescre = re.compile(
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 )
424 )
425
425
426
426
427 def getoldnodedrevmap(repo, nodelist):
427 def getoldnodedrevmap(repo, nodelist):
428 """find previous nodes that has been sent to Phabricator
428 """find previous nodes that has been sent to Phabricator
429
429
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 for node in nodelist with known previous sent versions, or associated
431 for node in nodelist with known previous sent versions, or associated
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 be ``None``.
433 be ``None``.
434
434
435 Examines commit messages like "Differential Revision:" to get the
435 Examines commit messages like "Differential Revision:" to get the
436 association information.
436 association information.
437
437
438 If such commit message line is not found, examines all precursors and their
438 If such commit message line is not found, examines all precursors and their
439 tags. Tags with format like "D1234" are considered a match and the node
439 tags. Tags with format like "D1234" are considered a match and the node
440 with that tag, and the number after "D" (ex. 1234) will be returned.
440 with that tag, and the number after "D" (ex. 1234) will be returned.
441
441
442 The ``old node``, if not None, is guaranteed to be the last diff of
442 The ``old node``, if not None, is guaranteed to be the last diff of
443 corresponding Differential Revision, and exist in the repo.
443 corresponding Differential Revision, and exist in the repo.
444 """
444 """
445 unfi = repo.unfiltered()
445 unfi = repo.unfiltered()
446 has_node = unfi.changelog.index.has_node
446 has_node = unfi.changelog.index.has_node
447
447
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 for node in nodelist:
450 for node in nodelist:
451 ctx = unfi[node]
451 ctx = unfi[node]
452 # For tags like "D123", put them into "toconfirm" to verify later
452 # For tags like "D123", put them into "toconfirm" to verify later
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 for n in precnodes:
454 for n in precnodes:
455 if has_node(n):
455 if has_node(n):
456 for tag in unfi.nodetags(n):
456 for tag in unfi.nodetags(n):
457 m = _differentialrevisiontagre.match(tag)
457 m = _differentialrevisiontagre.match(tag)
458 if m:
458 if m:
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 break
460 break
461 else:
461 else:
462 continue # move to next predecessor
462 continue # move to next predecessor
463 break # found a tag, stop
463 break # found a tag, stop
464 else:
464 else:
465 # Check commit message
465 # Check commit message
466 m = _differentialrevisiondescre.search(ctx.description())
466 m = _differentialrevisiondescre.search(ctx.description())
467 if m:
467 if m:
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469
469
470 # Double check if tags are genuine by collecting all old nodes from
470 # Double check if tags are genuine by collecting all old nodes from
471 # Phabricator, and expect precursors overlap with it.
471 # Phabricator, and expect precursors overlap with it.
472 if toconfirm:
472 if toconfirm:
473 drevs = [drev for force, precs, drev in toconfirm.values()]
473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 alldiffs = callconduit(
474 alldiffs = callconduit(
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 )
476 )
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 for newnode, (force, precset, drev) in toconfirm.items():
478 for newnode, (force, precset, drev) in toconfirm.items():
479 diffs = [
479 diffs = [
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 ]
481 ]
482
482
483 # "precursors" as known by Phabricator
483 # "precursors" as known by Phabricator
484 phprecset = set(getnode(d) for d in diffs)
484 phprecset = set(getnode(d) for d in diffs)
485
485
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 # and force is not set (when commit message says nothing)
487 # and force is not set (when commit message says nothing)
488 if not force and not bool(phprecset & precset):
488 if not force and not bool(phprecset & precset):
489 tagname = b'D%d' % drev
489 tagname = b'D%d' % drev
490 tags.tag(
490 tags.tag(
491 repo,
491 repo,
492 tagname,
492 tagname,
493 nullid,
493 nullid,
494 message=None,
494 message=None,
495 user=None,
495 user=None,
496 date=None,
496 date=None,
497 local=True,
497 local=True,
498 )
498 )
499 unfi.ui.warn(
499 unfi.ui.warn(
500 _(
500 _(
501 b'D%d: local tag removed - does not match '
501 b'D%d: local tag removed - does not match '
502 b'Differential history\n'
502 b'Differential history\n'
503 )
503 )
504 % drev
504 % drev
505 )
505 )
506 continue
506 continue
507
507
508 # Find the last node using Phabricator metadata, and make sure it
508 # Find the last node using Phabricator metadata, and make sure it
509 # exists in the repo
509 # exists in the repo
510 oldnode = lastdiff = None
510 oldnode = lastdiff = None
511 if diffs:
511 if diffs:
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 oldnode = getnode(lastdiff)
513 oldnode = getnode(lastdiff)
514 if oldnode and not has_node(oldnode):
514 if oldnode and not has_node(oldnode):
515 oldnode = None
515 oldnode = None
516
516
517 result[newnode] = (oldnode, lastdiff, drev)
517 result[newnode] = (oldnode, lastdiff, drev)
518
518
519 return result
519 return result
520
520
521
521
522 def getdrevmap(repo, revs):
522 def getdrevmap(repo, revs):
523 """Return a dict mapping each rev in `revs` to their Differential Revision
523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 ID or None.
524 ID or None.
525 """
525 """
526 result = {}
526 result = {}
527 for rev in revs:
527 for rev in revs:
528 result[rev] = None
528 result[rev] = None
529 ctx = repo[rev]
529 ctx = repo[rev]
530 # Check commit message
530 # Check commit message
531 m = _differentialrevisiondescre.search(ctx.description())
531 m = _differentialrevisiondescre.search(ctx.description())
532 if m:
532 if m:
533 result[rev] = int(m.group('id'))
533 result[rev] = int(m.group('id'))
534 continue
534 continue
535 # Check tags
535 # Check tags
536 for tag in repo.nodetags(ctx.node()):
536 for tag in repo.nodetags(ctx.node()):
537 m = _differentialrevisiontagre.match(tag)
537 m = _differentialrevisiontagre.match(tag)
538 if m:
538 if m:
539 result[rev] = int(m.group(1))
539 result[rev] = int(m.group(1))
540 break
540 break
541
541
542 return result
542 return result
543
543
544
544
545 def getdiff(ctx, diffopts):
545 def getdiff(ctx, diffopts):
546 """plain-text diff without header (user, commit message, etc)"""
546 """plain-text diff without header (user, commit message, etc)"""
547 output = util.stringio()
547 output = util.stringio()
548 for chunk, _label in patch.diffui(
548 for chunk, _label in patch.diffui(
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 ):
550 ):
551 output.write(chunk)
551 output.write(chunk)
552 return output.getvalue()
552 return output.getvalue()
553
553
554
554
555 class DiffChangeType(object):
555 class DiffChangeType(object):
556 ADD = 1
556 ADD = 1
557 CHANGE = 2
557 CHANGE = 2
558 DELETE = 3
558 DELETE = 3
559 MOVE_AWAY = 4
559 MOVE_AWAY = 4
560 COPY_AWAY = 5
560 COPY_AWAY = 5
561 MOVE_HERE = 6
561 MOVE_HERE = 6
562 COPY_HERE = 7
562 COPY_HERE = 7
563 MULTICOPY = 8
563 MULTICOPY = 8
564
564
565
565
566 class DiffFileType(object):
566 class DiffFileType(object):
567 TEXT = 1
567 TEXT = 1
568 IMAGE = 2
568 IMAGE = 2
569 BINARY = 3
569 BINARY = 3
570
570
571
571
572 @attr.s
572 @attr.s
573 class phabhunk(dict):
573 class phabhunk(dict):
574 """Represents a Differential hunk, which is owned by a Differential change
574 """Represents a Differential hunk, which is owned by a Differential change
575 """
575 """
576
576
577 oldOffset = attr.ib(default=0) # camelcase-required
577 oldOffset = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
578 oldLength = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
579 newOffset = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
580 newLength = attr.ib(default=0) # camelcase-required
581 corpus = attr.ib(default='')
581 corpus = attr.ib(default='')
582 # These get added to the phabchange's equivalents
582 # These get added to the phabchange's equivalents
583 addLines = attr.ib(default=0) # camelcase-required
583 addLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
584 delLines = attr.ib(default=0) # camelcase-required
585
585
586
586
587 @attr.s
587 @attr.s
588 class phabchange(object):
588 class phabchange(object):
589 """Represents a Differential change, owns Differential hunks and owned by a
589 """Represents a Differential change, owns Differential hunks and owned by a
590 Differential diff. Each one represents one file in a diff.
590 Differential diff. Each one represents one file in a diff.
591 """
591 """
592
592
593 currentPath = attr.ib(default=None) # camelcase-required
593 currentPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
594 oldPath = attr.ib(default=None) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 metadata = attr.ib(default=attr.Factory(dict))
596 metadata = attr.ib(default=attr.Factory(dict))
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 type = attr.ib(default=DiffChangeType.CHANGE)
599 type = attr.ib(default=DiffChangeType.CHANGE)
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
601 commitHash = attr.ib(default=None) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
602 addLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
603 delLines = attr.ib(default=0) # camelcase-required
604 hunks = attr.ib(default=attr.Factory(list))
604 hunks = attr.ib(default=attr.Factory(list))
605
605
606 def copynewmetadatatoold(self):
606 def copynewmetadatatoold(self):
607 for key in list(self.metadata.keys()):
607 for key in list(self.metadata.keys()):
608 newkey = key.replace(b'new:', b'old:')
608 newkey = key.replace(b'new:', b'old:')
609 self.metadata[newkey] = self.metadata[key]
609 self.metadata[newkey] = self.metadata[key]
610
610
611 def addoldmode(self, value):
611 def addoldmode(self, value):
612 self.oldProperties[b'unix:filemode'] = value
612 self.oldProperties[b'unix:filemode'] = value
613
613
614 def addnewmode(self, value):
614 def addnewmode(self, value):
615 self.newProperties[b'unix:filemode'] = value
615 self.newProperties[b'unix:filemode'] = value
616
616
617 def addhunk(self, hunk):
617 def addhunk(self, hunk):
618 if not isinstance(hunk, phabhunk):
618 if not isinstance(hunk, phabhunk):
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 # It's useful to include these stats since the Phab web UI shows them,
621 # It's useful to include these stats since the Phab web UI shows them,
622 # and uses them to estimate how large a change a Revision is. Also used
622 # and uses them to estimate how large a change a Revision is. Also used
623 # in email subjects for the [+++--] bit.
623 # in email subjects for the [+++--] bit.
624 self.addLines += hunk.addLines
624 self.addLines += hunk.addLines
625 self.delLines += hunk.delLines
625 self.delLines += hunk.delLines
626
626
627
627
628 @attr.s
628 @attr.s
629 class phabdiff(object):
629 class phabdiff(object):
630 """Represents a Differential diff, owns Differential changes. Corresponds
630 """Represents a Differential diff, owns Differential changes. Corresponds
631 to a commit.
631 to a commit.
632 """
632 """
633
633
634 # Doesn't seem to be any reason to send this (output of uname -n)
634 # Doesn't seem to be any reason to send this (output of uname -n)
635 sourceMachine = attr.ib(default=b'') # camelcase-required
635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 branch = attr.ib(default=b'default')
640 branch = attr.ib(default=b'default')
641 bookmark = attr.ib(default=None)
641 bookmark = attr.ib(default=None)
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 changes = attr.ib(default=attr.Factory(dict))
645 changes = attr.ib(default=attr.Factory(dict))
646 repositoryPHID = attr.ib(default=None) # camelcase-required
646 repositoryPHID = attr.ib(default=None) # camelcase-required
647
647
648 def addchange(self, change):
648 def addchange(self, change):
649 if not isinstance(change, phabchange):
649 if not isinstance(change, phabchange):
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 self.changes[change.currentPath] = pycompat.byteskwargs(
651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 attr.asdict(change)
652 attr.asdict(change)
653 )
653 )
654
654
655
655
656 def maketext(pchange, ctx, fname):
656 def maketext(pchange, ctx, fname):
657 """populate the phabchange for a text file"""
657 """populate the phabchange for a text file"""
658 repo = ctx.repo()
658 repo = ctx.repo()
659 fmatcher = match.exact([fname])
659 fmatcher = match.exact([fname])
660 diffopts = mdiff.diffopts(git=True, context=32767)
660 diffopts = mdiff.diffopts(git=True, context=32767)
661 _pfctx, _fctx, header, fhunks = next(
661 _pfctx, _fctx, header, fhunks = next(
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 )
663 )
664
664
665 for fhunk in fhunks:
665 for fhunk in fhunks:
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 corpus = b''.join(lines[1:])
667 corpus = b''.join(lines[1:])
668 shunk = list(header)
668 shunk = list(header)
669 shunk.extend(lines)
669 shunk.extend(lines)
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 patch.diffstatdata(util.iterlines(shunk))
671 patch.diffstatdata(util.iterlines(shunk))
672 )
672 )
673 pchange.addhunk(
673 pchange.addhunk(
674 phabhunk(
674 phabhunk(
675 oldOffset,
675 oldOffset,
676 oldLength,
676 oldLength,
677 newOffset,
677 newOffset,
678 newLength,
678 newLength,
679 corpus,
679 corpus,
680 addLines,
680 addLines,
681 delLines,
681 delLines,
682 )
682 )
683 )
683 )
684
684
685
685
686 def uploadchunks(fctx, fphid):
686 def uploadchunks(fctx, fphid):
687 """upload large binary files as separate chunks.
687 """upload large binary files as separate chunks.
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 """
689 """
690 ui = fctx.repo().ui
690 ui = fctx.repo().ui
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 with ui.makeprogress(
692 with ui.makeprogress(
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 ) as progress:
694 ) as progress:
695 for chunk in chunks:
695 for chunk in chunks:
696 progress.increment()
696 progress.increment()
697 if chunk[b'complete']:
697 if chunk[b'complete']:
698 continue
698 continue
699 bstart = int(chunk[b'byteStart'])
699 bstart = int(chunk[b'byteStart'])
700 bend = int(chunk[b'byteEnd'])
700 bend = int(chunk[b'byteEnd'])
701 callconduit(
701 callconduit(
702 ui,
702 ui,
703 b'file.uploadchunk',
703 b'file.uploadchunk',
704 {
704 {
705 b'filePHID': fphid,
705 b'filePHID': fphid,
706 b'byteStart': bstart,
706 b'byteStart': bstart,
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 b'dataEncoding': b'base64',
708 b'dataEncoding': b'base64',
709 },
709 },
710 )
710 )
711
711
712
712
713 def uploadfile(fctx):
713 def uploadfile(fctx):
714 """upload binary files to Phabricator"""
714 """upload binary files to Phabricator"""
715 repo = fctx.repo()
715 repo = fctx.repo()
716 ui = repo.ui
716 ui = repo.ui
717 fname = fctx.path()
717 fname = fctx.path()
718 size = fctx.size()
718 size = fctx.size()
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720
720
721 # an allocate call is required first to see if an upload is even required
721 # an allocate call is required first to see if an upload is even required
722 # (Phab might already have it) and to determine if chunking is needed
722 # (Phab might already have it) and to determine if chunking is needed
723 allocateparams = {
723 allocateparams = {
724 b'name': fname,
724 b'name': fname,
725 b'contentLength': size,
725 b'contentLength': size,
726 b'contentHash': fhash,
726 b'contentHash': fhash,
727 }
727 }
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 fphid = filealloc[b'filePHID']
729 fphid = filealloc[b'filePHID']
730
730
731 if filealloc[b'upload']:
731 if filealloc[b'upload']:
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 if not fphid:
733 if not fphid:
734 uploadparams = {
734 uploadparams = {
735 b'name': fname,
735 b'name': fname,
736 b'data_base64': base64.b64encode(fctx.data()),
736 b'data_base64': base64.b64encode(fctx.data()),
737 }
737 }
738 fphid = callconduit(ui, b'file.upload', uploadparams)
738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 else:
739 else:
740 uploadchunks(fctx, fphid)
740 uploadchunks(fctx, fphid)
741 else:
741 else:
742 ui.debug(b'server already has %s\n' % bytes(fctx))
742 ui.debug(b'server already has %s\n' % bytes(fctx))
743
743
744 if not fphid:
744 if not fphid:
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746
746
747 return fphid
747 return fphid
748
748
749
749
750 def addoldbinary(pchange, oldfctx, fctx):
750 def addoldbinary(pchange, oldfctx, fctx):
751 """add the metadata for the previous version of a binary file to the
751 """add the metadata for the previous version of a binary file to the
752 phabchange for the new version
752 phabchange for the new version
753
753
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 version of the file, or None if the file is being removed.
755 version of the file, or None if the file is being removed.
756 """
756 """
757 if not fctx or fctx.cmp(oldfctx):
757 if not fctx or fctx.cmp(oldfctx):
758 # Files differ, add the old one
758 # Files differ, add the old one
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
759 pchange.metadata[b'old:file:size'] = oldfctx.size()
760 mimeguess, _enc = mimetypes.guess_type(
760 mimeguess, _enc = mimetypes.guess_type(
761 encoding.unifromlocal(oldfctx.path())
761 encoding.unifromlocal(oldfctx.path())
762 )
762 )
763 if mimeguess:
763 if mimeguess:
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
765 mimeguess
765 mimeguess
766 )
766 )
767 fphid = uploadfile(oldfctx)
767 fphid = uploadfile(oldfctx)
768 pchange.metadata[b'old:binary-phid'] = fphid
768 pchange.metadata[b'old:binary-phid'] = fphid
769 else:
769 else:
770 # If it's left as IMAGE/BINARY web UI might try to display it
770 # If it's left as IMAGE/BINARY web UI might try to display it
771 pchange.fileType = DiffFileType.TEXT
771 pchange.fileType = DiffFileType.TEXT
772 pchange.copynewmetadatatoold()
772 pchange.copynewmetadatatoold()
773
773
774
774
775 def makebinary(pchange, fctx):
775 def makebinary(pchange, fctx):
776 """populate the phabchange for a binary file"""
776 """populate the phabchange for a binary file"""
777 pchange.fileType = DiffFileType.BINARY
777 pchange.fileType = DiffFileType.BINARY
778 fphid = uploadfile(fctx)
778 fphid = uploadfile(fctx)
779 pchange.metadata[b'new:binary-phid'] = fphid
779 pchange.metadata[b'new:binary-phid'] = fphid
780 pchange.metadata[b'new:file:size'] = fctx.size()
780 pchange.metadata[b'new:file:size'] = fctx.size()
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
782 if mimeguess:
782 if mimeguess:
783 mimeguess = pycompat.bytestr(mimeguess)
783 mimeguess = pycompat.bytestr(mimeguess)
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
784 pchange.metadata[b'new:file:mime-type'] = mimeguess
785 if mimeguess.startswith(b'image/'):
785 if mimeguess.startswith(b'image/'):
786 pchange.fileType = DiffFileType.IMAGE
786 pchange.fileType = DiffFileType.IMAGE
787
787
788
788
789 # Copied from mercurial/patch.py
789 # Copied from mercurial/patch.py
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
791
791
792
792
793 def notutf8(fctx):
793 def notutf8(fctx):
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
794 """detect non-UTF-8 text files since Phabricator requires them to be marked
795 as binary
795 as binary
796 """
796 """
797 try:
797 try:
798 fctx.data().decode('utf-8')
798 fctx.data().decode('utf-8')
799 return False
799 return False
800 except UnicodeDecodeError:
800 except UnicodeDecodeError:
801 fctx.repo().ui.write(
801 fctx.repo().ui.write(
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 % fctx.path()
803 % fctx.path()
804 )
804 )
805 return True
805 return True
806
806
807
807
808 def addremoved(pdiff, ctx, removed):
808 def addremoved(pdiff, ctx, removed):
809 """add removed files to the phabdiff. Shouldn't include moves"""
809 """add removed files to the phabdiff. Shouldn't include moves"""
810 for fname in removed:
810 for fname in removed:
811 pchange = phabchange(
811 pchange = phabchange(
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 )
813 )
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 oldfctx = ctx.p1()[fname]
815 oldfctx = ctx.p1()[fname]
816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
817 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
818
818
819 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
820
820
821
821
822 def addmodified(pdiff, ctx, modified):
822 def addmodified(pdiff, ctx, modified):
823 """add modified files to the phabdiff"""
823 """add modified files to the phabdiff"""
824 for fname in modified:
824 for fname in modified:
825 fctx = ctx[fname]
825 fctx = ctx[fname]
826 oldfctx = fctx.p1()
826 oldfctx = fctx.p1()
827 pchange = phabchange(currentPath=fname, oldPath=fname)
827 pchange = phabchange(currentPath=fname, oldPath=fname)
828 filemode = gitmode[ctx[fname].flags()]
828 filemode = gitmode[ctx[fname].flags()]
829 originalmode = gitmode[ctx.p1()[fname].flags()]
829 originalmode = gitmode[ctx.p1()[fname].flags()]
830 if filemode != originalmode:
830 if filemode != originalmode:
831 pchange.addoldmode(originalmode)
831 pchange.addoldmode(originalmode)
832 pchange.addnewmode(filemode)
832 pchange.addnewmode(filemode)
833
833
834 if fctx.isbinary() or notutf8(fctx) or notutf8(oldfctx):
834 if (
835 fctx.isbinary()
836 or notutf8(fctx)
837 or oldfctx.isbinary()
838 or notutf8(oldfctx)
839 ):
835 makebinary(pchange, fctx)
840 makebinary(pchange, fctx)
836 addoldbinary(pchange, fctx.p1(), fctx)
841 addoldbinary(pchange, fctx.p1(), fctx)
837 else:
842 else:
838 maketext(pchange, ctx, fname)
843 maketext(pchange, ctx, fname)
839
844
840 pdiff.addchange(pchange)
845 pdiff.addchange(pchange)
841
846
842
847
843 def addadded(pdiff, ctx, added, removed):
848 def addadded(pdiff, ctx, added, removed):
844 """add file adds to the phabdiff, both new files and copies/moves"""
849 """add file adds to the phabdiff, both new files and copies/moves"""
845 # Keep track of files that've been recorded as moved/copied, so if there are
850 # Keep track of files that've been recorded as moved/copied, so if there are
846 # additional copies we can mark them (moves get removed from removed)
851 # additional copies we can mark them (moves get removed from removed)
847 copiedchanges = {}
852 copiedchanges = {}
848 movedchanges = {}
853 movedchanges = {}
849 for fname in added:
854 for fname in added:
850 fctx = ctx[fname]
855 fctx = ctx[fname]
851 oldfctx = None
856 oldfctx = None
852 pchange = phabchange(currentPath=fname)
857 pchange = phabchange(currentPath=fname)
853
858
854 filemode = gitmode[ctx[fname].flags()]
859 filemode = gitmode[ctx[fname].flags()]
855 renamed = fctx.renamed()
860 renamed = fctx.renamed()
856
861
857 if renamed:
862 if renamed:
858 originalfname = renamed[0]
863 originalfname = renamed[0]
859 oldfctx = ctx.p1()[originalfname]
864 oldfctx = ctx.p1()[originalfname]
860 originalmode = gitmode[oldfctx.flags()]
865 originalmode = gitmode[oldfctx.flags()]
861 pchange.oldPath = originalfname
866 pchange.oldPath = originalfname
862
867
863 if originalfname in removed:
868 if originalfname in removed:
864 origpchange = phabchange(
869 origpchange = phabchange(
865 currentPath=originalfname,
870 currentPath=originalfname,
866 oldPath=originalfname,
871 oldPath=originalfname,
867 type=DiffChangeType.MOVE_AWAY,
872 type=DiffChangeType.MOVE_AWAY,
868 awayPaths=[fname],
873 awayPaths=[fname],
869 )
874 )
870 movedchanges[originalfname] = origpchange
875 movedchanges[originalfname] = origpchange
871 removed.remove(originalfname)
876 removed.remove(originalfname)
872 pchange.type = DiffChangeType.MOVE_HERE
877 pchange.type = DiffChangeType.MOVE_HERE
873 elif originalfname in movedchanges:
878 elif originalfname in movedchanges:
874 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
879 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
875 movedchanges[originalfname].awayPaths.append(fname)
880 movedchanges[originalfname].awayPaths.append(fname)
876 pchange.type = DiffChangeType.COPY_HERE
881 pchange.type = DiffChangeType.COPY_HERE
877 else: # pure copy
882 else: # pure copy
878 if originalfname not in copiedchanges:
883 if originalfname not in copiedchanges:
879 origpchange = phabchange(
884 origpchange = phabchange(
880 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
885 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
881 )
886 )
882 copiedchanges[originalfname] = origpchange
887 copiedchanges[originalfname] = origpchange
883 else:
888 else:
884 origpchange = copiedchanges[originalfname]
889 origpchange = copiedchanges[originalfname]
885 origpchange.awayPaths.append(fname)
890 origpchange.awayPaths.append(fname)
886 pchange.type = DiffChangeType.COPY_HERE
891 pchange.type = DiffChangeType.COPY_HERE
887
892
888 if filemode != originalmode:
893 if filemode != originalmode:
889 pchange.addoldmode(originalmode)
894 pchange.addoldmode(originalmode)
890 pchange.addnewmode(filemode)
895 pchange.addnewmode(filemode)
891 else: # Brand-new file
896 else: # Brand-new file
892 pchange.addnewmode(gitmode[fctx.flags()])
897 pchange.addnewmode(gitmode[fctx.flags()])
893 pchange.type = DiffChangeType.ADD
898 pchange.type = DiffChangeType.ADD
894
899
895 if fctx.isbinary() or notutf8(fctx) or (oldfctx and notutf8(oldfctx)):
900 if (
901 fctx.isbinary()
902 or notutf8(fctx)
903 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
904 ):
896 makebinary(pchange, fctx)
905 makebinary(pchange, fctx)
897 if renamed:
906 if renamed:
898 addoldbinary(pchange, oldfctx, fctx)
907 addoldbinary(pchange, oldfctx, fctx)
899 else:
908 else:
900 maketext(pchange, ctx, fname)
909 maketext(pchange, ctx, fname)
901
910
902 pdiff.addchange(pchange)
911 pdiff.addchange(pchange)
903
912
904 for _path, copiedchange in copiedchanges.items():
913 for _path, copiedchange in copiedchanges.items():
905 pdiff.addchange(copiedchange)
914 pdiff.addchange(copiedchange)
906 for _path, movedchange in movedchanges.items():
915 for _path, movedchange in movedchanges.items():
907 pdiff.addchange(movedchange)
916 pdiff.addchange(movedchange)
908
917
909
918
910 def creatediff(ctx):
919 def creatediff(ctx):
911 """create a Differential Diff"""
920 """create a Differential Diff"""
912 repo = ctx.repo()
921 repo = ctx.repo()
913 repophid = getrepophid(repo)
922 repophid = getrepophid(repo)
914 # Create a "Differential Diff" via "differential.creatediff" API
923 # Create a "Differential Diff" via "differential.creatediff" API
915 pdiff = phabdiff(
924 pdiff = phabdiff(
916 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
925 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
917 branch=b'%s' % ctx.branch(),
926 branch=b'%s' % ctx.branch(),
918 )
927 )
919 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
928 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
920 # addadded will remove moved files from removed, so addremoved won't get
929 # addadded will remove moved files from removed, so addremoved won't get
921 # them
930 # them
922 addadded(pdiff, ctx, added, removed)
931 addadded(pdiff, ctx, added, removed)
923 addmodified(pdiff, ctx, modified)
932 addmodified(pdiff, ctx, modified)
924 addremoved(pdiff, ctx, removed)
933 addremoved(pdiff, ctx, removed)
925 if repophid:
934 if repophid:
926 pdiff.repositoryPHID = repophid
935 pdiff.repositoryPHID = repophid
927 diff = callconduit(
936 diff = callconduit(
928 repo.ui,
937 repo.ui,
929 b'differential.creatediff',
938 b'differential.creatediff',
930 pycompat.byteskwargs(attr.asdict(pdiff)),
939 pycompat.byteskwargs(attr.asdict(pdiff)),
931 )
940 )
932 if not diff:
941 if not diff:
933 raise error.Abort(_(b'cannot create diff for %s') % ctx)
942 raise error.Abort(_(b'cannot create diff for %s') % ctx)
934 return diff
943 return diff
935
944
936
945
937 def writediffproperties(ctx, diff):
946 def writediffproperties(ctx, diff):
938 """write metadata to diff so patches could be applied losslessly"""
947 """write metadata to diff so patches could be applied losslessly"""
939 # creatediff returns with a diffid but query returns with an id
948 # creatediff returns with a diffid but query returns with an id
940 diffid = diff.get(b'diffid', diff.get(b'id'))
949 diffid = diff.get(b'diffid', diff.get(b'id'))
941 params = {
950 params = {
942 b'diff_id': diffid,
951 b'diff_id': diffid,
943 b'name': b'hg:meta',
952 b'name': b'hg:meta',
944 b'data': templatefilters.json(
953 b'data': templatefilters.json(
945 {
954 {
946 b'user': ctx.user(),
955 b'user': ctx.user(),
947 b'date': b'%d %d' % ctx.date(),
956 b'date': b'%d %d' % ctx.date(),
948 b'branch': ctx.branch(),
957 b'branch': ctx.branch(),
949 b'node': ctx.hex(),
958 b'node': ctx.hex(),
950 b'parent': ctx.p1().hex(),
959 b'parent': ctx.p1().hex(),
951 }
960 }
952 ),
961 ),
953 }
962 }
954 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
963 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
955
964
956 params = {
965 params = {
957 b'diff_id': diffid,
966 b'diff_id': diffid,
958 b'name': b'local:commits',
967 b'name': b'local:commits',
959 b'data': templatefilters.json(
968 b'data': templatefilters.json(
960 {
969 {
961 ctx.hex(): {
970 ctx.hex(): {
962 b'author': stringutil.person(ctx.user()),
971 b'author': stringutil.person(ctx.user()),
963 b'authorEmail': stringutil.email(ctx.user()),
972 b'authorEmail': stringutil.email(ctx.user()),
964 b'time': int(ctx.date()[0]),
973 b'time': int(ctx.date()[0]),
965 b'commit': ctx.hex(),
974 b'commit': ctx.hex(),
966 b'parents': [ctx.p1().hex()],
975 b'parents': [ctx.p1().hex()],
967 b'branch': ctx.branch(),
976 b'branch': ctx.branch(),
968 },
977 },
969 }
978 }
970 ),
979 ),
971 }
980 }
972 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
981 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
973
982
974
983
975 def createdifferentialrevision(
984 def createdifferentialrevision(
976 ctx,
985 ctx,
977 revid=None,
986 revid=None,
978 parentrevphid=None,
987 parentrevphid=None,
979 oldnode=None,
988 oldnode=None,
980 olddiff=None,
989 olddiff=None,
981 actions=None,
990 actions=None,
982 comment=None,
991 comment=None,
983 ):
992 ):
984 """create or update a Differential Revision
993 """create or update a Differential Revision
985
994
986 If revid is None, create a new Differential Revision, otherwise update
995 If revid is None, create a new Differential Revision, otherwise update
987 revid. If parentrevphid is not None, set it as a dependency.
996 revid. If parentrevphid is not None, set it as a dependency.
988
997
989 If oldnode is not None, check if the patch content (without commit message
998 If oldnode is not None, check if the patch content (without commit message
990 and metadata) has changed before creating another diff.
999 and metadata) has changed before creating another diff.
991
1000
992 If actions is not None, they will be appended to the transaction.
1001 If actions is not None, they will be appended to the transaction.
993 """
1002 """
994 repo = ctx.repo()
1003 repo = ctx.repo()
995 if oldnode:
1004 if oldnode:
996 diffopts = mdiff.diffopts(git=True, context=32767)
1005 diffopts = mdiff.diffopts(git=True, context=32767)
997 oldctx = repo.unfiltered()[oldnode]
1006 oldctx = repo.unfiltered()[oldnode]
998 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1007 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
999 else:
1008 else:
1000 neednewdiff = True
1009 neednewdiff = True
1001
1010
1002 transactions = []
1011 transactions = []
1003 if neednewdiff:
1012 if neednewdiff:
1004 diff = creatediff(ctx)
1013 diff = creatediff(ctx)
1005 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1014 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1006 if comment:
1015 if comment:
1007 transactions.append({b'type': b'comment', b'value': comment})
1016 transactions.append({b'type': b'comment', b'value': comment})
1008 else:
1017 else:
1009 # Even if we don't need to upload a new diff because the patch content
1018 # Even if we don't need to upload a new diff because the patch content
1010 # does not change. We might still need to update its metadata so
1019 # does not change. We might still need to update its metadata so
1011 # pushers could know the correct node metadata.
1020 # pushers could know the correct node metadata.
1012 assert olddiff
1021 assert olddiff
1013 diff = olddiff
1022 diff = olddiff
1014 writediffproperties(ctx, diff)
1023 writediffproperties(ctx, diff)
1015
1024
1016 # Set the parent Revision every time, so commit re-ordering is picked-up
1025 # Set the parent Revision every time, so commit re-ordering is picked-up
1017 if parentrevphid:
1026 if parentrevphid:
1018 transactions.append(
1027 transactions.append(
1019 {b'type': b'parents.set', b'value': [parentrevphid]}
1028 {b'type': b'parents.set', b'value': [parentrevphid]}
1020 )
1029 )
1021
1030
1022 if actions:
1031 if actions:
1023 transactions += actions
1032 transactions += actions
1024
1033
1025 # Parse commit message and update related fields.
1034 # Parse commit message and update related fields.
1026 desc = ctx.description()
1035 desc = ctx.description()
1027 info = callconduit(
1036 info = callconduit(
1028 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1037 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1029 )
1038 )
1030 for k, v in info[b'fields'].items():
1039 for k, v in info[b'fields'].items():
1031 if k in [b'title', b'summary', b'testPlan']:
1040 if k in [b'title', b'summary', b'testPlan']:
1032 transactions.append({b'type': k, b'value': v})
1041 transactions.append({b'type': k, b'value': v})
1033
1042
1034 params = {b'transactions': transactions}
1043 params = {b'transactions': transactions}
1035 if revid is not None:
1044 if revid is not None:
1036 # Update an existing Differential Revision
1045 # Update an existing Differential Revision
1037 params[b'objectIdentifier'] = revid
1046 params[b'objectIdentifier'] = revid
1038
1047
1039 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1048 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1040 if not revision:
1049 if not revision:
1041 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1050 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1042
1051
1043 return revision, diff
1052 return revision, diff
1044
1053
1045
1054
1046 def userphids(ui, names):
1055 def userphids(ui, names):
1047 """convert user names to PHIDs"""
1056 """convert user names to PHIDs"""
1048 names = [name.lower() for name in names]
1057 names = [name.lower() for name in names]
1049 query = {b'constraints': {b'usernames': names}}
1058 query = {b'constraints': {b'usernames': names}}
1050 result = callconduit(ui, b'user.search', query)
1059 result = callconduit(ui, b'user.search', query)
1051 # username not found is not an error of the API. So check if we have missed
1060 # username not found is not an error of the API. So check if we have missed
1052 # some names here.
1061 # some names here.
1053 data = result[b'data']
1062 data = result[b'data']
1054 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1063 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1055 unresolved = set(names) - resolved
1064 unresolved = set(names) - resolved
1056 if unresolved:
1065 if unresolved:
1057 raise error.Abort(
1066 raise error.Abort(
1058 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1067 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1059 )
1068 )
1060 return [entry[b'phid'] for entry in data]
1069 return [entry[b'phid'] for entry in data]
1061
1070
1062
1071
1063 @vcrcommand(
1072 @vcrcommand(
1064 b'phabsend',
1073 b'phabsend',
1065 [
1074 [
1066 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1075 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1067 (b'', b'amend', True, _(b'update commit messages')),
1076 (b'', b'amend', True, _(b'update commit messages')),
1068 (b'', b'reviewer', [], _(b'specify reviewers')),
1077 (b'', b'reviewer', [], _(b'specify reviewers')),
1069 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1078 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1070 (
1079 (
1071 b'm',
1080 b'm',
1072 b'comment',
1081 b'comment',
1073 b'',
1082 b'',
1074 _(b'add a comment to Revisions with new/updated Diffs'),
1083 _(b'add a comment to Revisions with new/updated Diffs'),
1075 ),
1084 ),
1076 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1085 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1077 ],
1086 ],
1078 _(b'REV [OPTIONS]'),
1087 _(b'REV [OPTIONS]'),
1079 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1088 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1080 )
1089 )
1081 def phabsend(ui, repo, *revs, **opts):
1090 def phabsend(ui, repo, *revs, **opts):
1082 """upload changesets to Phabricator
1091 """upload changesets to Phabricator
1083
1092
1084 If there are multiple revisions specified, they will be send as a stack
1093 If there are multiple revisions specified, they will be send as a stack
1085 with a linear dependencies relationship using the order specified by the
1094 with a linear dependencies relationship using the order specified by the
1086 revset.
1095 revset.
1087
1096
1088 For the first time uploading changesets, local tags will be created to
1097 For the first time uploading changesets, local tags will be created to
1089 maintain the association. After the first time, phabsend will check
1098 maintain the association. After the first time, phabsend will check
1090 obsstore and tags information so it can figure out whether to update an
1099 obsstore and tags information so it can figure out whether to update an
1091 existing Differential Revision, or create a new one.
1100 existing Differential Revision, or create a new one.
1092
1101
1093 If --amend is set, update commit messages so they have the
1102 If --amend is set, update commit messages so they have the
1094 ``Differential Revision`` URL, remove related tags. This is similar to what
1103 ``Differential Revision`` URL, remove related tags. This is similar to what
1095 arcanist will do, and is more desired in author-push workflows. Otherwise,
1104 arcanist will do, and is more desired in author-push workflows. Otherwise,
1096 use local tags to record the ``Differential Revision`` association.
1105 use local tags to record the ``Differential Revision`` association.
1097
1106
1098 The --confirm option lets you confirm changesets before sending them. You
1107 The --confirm option lets you confirm changesets before sending them. You
1099 can also add following to your configuration file to make it default
1108 can also add following to your configuration file to make it default
1100 behaviour::
1109 behaviour::
1101
1110
1102 [phabsend]
1111 [phabsend]
1103 confirm = true
1112 confirm = true
1104
1113
1105 phabsend will check obsstore and the above association to decide whether to
1114 phabsend will check obsstore and the above association to decide whether to
1106 update an existing Differential Revision, or create a new one.
1115 update an existing Differential Revision, or create a new one.
1107 """
1116 """
1108 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1109 revs = list(revs) + opts.get(b'rev', [])
1118 revs = list(revs) + opts.get(b'rev', [])
1110 revs = scmutil.revrange(repo, revs)
1119 revs = scmutil.revrange(repo, revs)
1111 revs.sort() # ascending order to preserve topological parent/child in phab
1120 revs.sort() # ascending order to preserve topological parent/child in phab
1112
1121
1113 if not revs:
1122 if not revs:
1114 raise error.Abort(_(b'phabsend requires at least one changeset'))
1123 raise error.Abort(_(b'phabsend requires at least one changeset'))
1115 if opts.get(b'amend'):
1124 if opts.get(b'amend'):
1116 cmdutil.checkunfinished(repo)
1125 cmdutil.checkunfinished(repo)
1117
1126
1118 # {newnode: (oldnode, olddiff, olddrev}
1127 # {newnode: (oldnode, olddiff, olddrev}
1119 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1128 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1120
1129
1121 confirm = ui.configbool(b'phabsend', b'confirm')
1130 confirm = ui.configbool(b'phabsend', b'confirm')
1122 confirm |= bool(opts.get(b'confirm'))
1131 confirm |= bool(opts.get(b'confirm'))
1123 if confirm:
1132 if confirm:
1124 confirmed = _confirmbeforesend(repo, revs, oldmap)
1133 confirmed = _confirmbeforesend(repo, revs, oldmap)
1125 if not confirmed:
1134 if not confirmed:
1126 raise error.Abort(_(b'phabsend cancelled'))
1135 raise error.Abort(_(b'phabsend cancelled'))
1127
1136
1128 actions = []
1137 actions = []
1129 reviewers = opts.get(b'reviewer', [])
1138 reviewers = opts.get(b'reviewer', [])
1130 blockers = opts.get(b'blocker', [])
1139 blockers = opts.get(b'blocker', [])
1131 phids = []
1140 phids = []
1132 if reviewers:
1141 if reviewers:
1133 phids.extend(userphids(repo.ui, reviewers))
1142 phids.extend(userphids(repo.ui, reviewers))
1134 if blockers:
1143 if blockers:
1135 phids.extend(
1144 phids.extend(
1136 map(
1145 map(
1137 lambda phid: b'blocking(%s)' % phid,
1146 lambda phid: b'blocking(%s)' % phid,
1138 userphids(repo.ui, blockers),
1147 userphids(repo.ui, blockers),
1139 )
1148 )
1140 )
1149 )
1141 if phids:
1150 if phids:
1142 actions.append({b'type': b'reviewers.add', b'value': phids})
1151 actions.append({b'type': b'reviewers.add', b'value': phids})
1143
1152
1144 drevids = [] # [int]
1153 drevids = [] # [int]
1145 diffmap = {} # {newnode: diff}
1154 diffmap = {} # {newnode: diff}
1146
1155
1147 # Send patches one by one so we know their Differential Revision PHIDs and
1156 # Send patches one by one so we know their Differential Revision PHIDs and
1148 # can provide dependency relationship
1157 # can provide dependency relationship
1149 lastrevphid = None
1158 lastrevphid = None
1150 for rev in revs:
1159 for rev in revs:
1151 ui.debug(b'sending rev %d\n' % rev)
1160 ui.debug(b'sending rev %d\n' % rev)
1152 ctx = repo[rev]
1161 ctx = repo[rev]
1153
1162
1154 # Get Differential Revision ID
1163 # Get Differential Revision ID
1155 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1164 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1156 if oldnode != ctx.node() or opts.get(b'amend'):
1165 if oldnode != ctx.node() or opts.get(b'amend'):
1157 # Create or update Differential Revision
1166 # Create or update Differential Revision
1158 revision, diff = createdifferentialrevision(
1167 revision, diff = createdifferentialrevision(
1159 ctx,
1168 ctx,
1160 revid,
1169 revid,
1161 lastrevphid,
1170 lastrevphid,
1162 oldnode,
1171 oldnode,
1163 olddiff,
1172 olddiff,
1164 actions,
1173 actions,
1165 opts.get(b'comment'),
1174 opts.get(b'comment'),
1166 )
1175 )
1167 diffmap[ctx.node()] = diff
1176 diffmap[ctx.node()] = diff
1168 newrevid = int(revision[b'object'][b'id'])
1177 newrevid = int(revision[b'object'][b'id'])
1169 newrevphid = revision[b'object'][b'phid']
1178 newrevphid = revision[b'object'][b'phid']
1170 if revid:
1179 if revid:
1171 action = b'updated'
1180 action = b'updated'
1172 else:
1181 else:
1173 action = b'created'
1182 action = b'created'
1174
1183
1175 # Create a local tag to note the association, if commit message
1184 # Create a local tag to note the association, if commit message
1176 # does not have it already
1185 # does not have it already
1177 m = _differentialrevisiondescre.search(ctx.description())
1186 m = _differentialrevisiondescre.search(ctx.description())
1178 if not m or int(m.group('id')) != newrevid:
1187 if not m or int(m.group('id')) != newrevid:
1179 tagname = b'D%d' % newrevid
1188 tagname = b'D%d' % newrevid
1180 tags.tag(
1189 tags.tag(
1181 repo,
1190 repo,
1182 tagname,
1191 tagname,
1183 ctx.node(),
1192 ctx.node(),
1184 message=None,
1193 message=None,
1185 user=None,
1194 user=None,
1186 date=None,
1195 date=None,
1187 local=True,
1196 local=True,
1188 )
1197 )
1189 else:
1198 else:
1190 # Nothing changed. But still set "newrevphid" so the next revision
1199 # Nothing changed. But still set "newrevphid" so the next revision
1191 # could depend on this one and "newrevid" for the summary line.
1200 # could depend on this one and "newrevid" for the summary line.
1192 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1201 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1193 newrevid = revid
1202 newrevid = revid
1194 action = b'skipped'
1203 action = b'skipped'
1195
1204
1196 actiondesc = ui.label(
1205 actiondesc = ui.label(
1197 {
1206 {
1198 b'created': _(b'created'),
1207 b'created': _(b'created'),
1199 b'skipped': _(b'skipped'),
1208 b'skipped': _(b'skipped'),
1200 b'updated': _(b'updated'),
1209 b'updated': _(b'updated'),
1201 }[action],
1210 }[action],
1202 b'phabricator.action.%s' % action,
1211 b'phabricator.action.%s' % action,
1203 )
1212 )
1204 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1213 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1205 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1214 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1206 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1215 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1207 ui.write(
1216 ui.write(
1208 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1217 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1209 )
1218 )
1210 drevids.append(newrevid)
1219 drevids.append(newrevid)
1211 lastrevphid = newrevphid
1220 lastrevphid = newrevphid
1212
1221
1213 # Update commit messages and remove tags
1222 # Update commit messages and remove tags
1214 if opts.get(b'amend'):
1223 if opts.get(b'amend'):
1215 unfi = repo.unfiltered()
1224 unfi = repo.unfiltered()
1216 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1225 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1217 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1226 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1218 wnode = unfi[b'.'].node()
1227 wnode = unfi[b'.'].node()
1219 mapping = {} # {oldnode: [newnode]}
1228 mapping = {} # {oldnode: [newnode]}
1220 for i, rev in enumerate(revs):
1229 for i, rev in enumerate(revs):
1221 old = unfi[rev]
1230 old = unfi[rev]
1222 drevid = drevids[i]
1231 drevid = drevids[i]
1223 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1232 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1224 newdesc = getdescfromdrev(drev)
1233 newdesc = getdescfromdrev(drev)
1225 # Make sure commit message contain "Differential Revision"
1234 # Make sure commit message contain "Differential Revision"
1226 if old.description() != newdesc:
1235 if old.description() != newdesc:
1227 if old.phase() == phases.public:
1236 if old.phase() == phases.public:
1228 ui.warn(
1237 ui.warn(
1229 _(b"warning: not updating public commit %s\n")
1238 _(b"warning: not updating public commit %s\n")
1230 % scmutil.formatchangeid(old)
1239 % scmutil.formatchangeid(old)
1231 )
1240 )
1232 continue
1241 continue
1233 parents = [
1242 parents = [
1234 mapping.get(old.p1().node(), (old.p1(),))[0],
1243 mapping.get(old.p1().node(), (old.p1(),))[0],
1235 mapping.get(old.p2().node(), (old.p2(),))[0],
1244 mapping.get(old.p2().node(), (old.p2(),))[0],
1236 ]
1245 ]
1237 new = context.metadataonlyctx(
1246 new = context.metadataonlyctx(
1238 repo,
1247 repo,
1239 old,
1248 old,
1240 parents=parents,
1249 parents=parents,
1241 text=newdesc,
1250 text=newdesc,
1242 user=old.user(),
1251 user=old.user(),
1243 date=old.date(),
1252 date=old.date(),
1244 extra=old.extra(),
1253 extra=old.extra(),
1245 )
1254 )
1246
1255
1247 newnode = new.commit()
1256 newnode = new.commit()
1248
1257
1249 mapping[old.node()] = [newnode]
1258 mapping[old.node()] = [newnode]
1250 # Update diff property
1259 # Update diff property
1251 # If it fails just warn and keep going, otherwise the DREV
1260 # If it fails just warn and keep going, otherwise the DREV
1252 # associations will be lost
1261 # associations will be lost
1253 try:
1262 try:
1254 writediffproperties(unfi[newnode], diffmap[old.node()])
1263 writediffproperties(unfi[newnode], diffmap[old.node()])
1255 except util.urlerr.urlerror:
1264 except util.urlerr.urlerror:
1256 ui.warnnoi18n(
1265 ui.warnnoi18n(
1257 b'Failed to update metadata for D%d\n' % drevid
1266 b'Failed to update metadata for D%d\n' % drevid
1258 )
1267 )
1259 # Remove local tags since it's no longer necessary
1268 # Remove local tags since it's no longer necessary
1260 tagname = b'D%d' % drevid
1269 tagname = b'D%d' % drevid
1261 if tagname in repo.tags():
1270 if tagname in repo.tags():
1262 tags.tag(
1271 tags.tag(
1263 repo,
1272 repo,
1264 tagname,
1273 tagname,
1265 nullid,
1274 nullid,
1266 message=None,
1275 message=None,
1267 user=None,
1276 user=None,
1268 date=None,
1277 date=None,
1269 local=True,
1278 local=True,
1270 )
1279 )
1271 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1280 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1272 if wnode in mapping:
1281 if wnode in mapping:
1273 unfi.setparents(mapping[wnode][0])
1282 unfi.setparents(mapping[wnode][0])
1274
1283
1275
1284
1276 # Map from "hg:meta" keys to header understood by "hg import". The order is
1285 # Map from "hg:meta" keys to header understood by "hg import". The order is
1277 # consistent with "hg export" output.
1286 # consistent with "hg export" output.
1278 _metanamemap = util.sortdict(
1287 _metanamemap = util.sortdict(
1279 [
1288 [
1280 (b'user', b'User'),
1289 (b'user', b'User'),
1281 (b'date', b'Date'),
1290 (b'date', b'Date'),
1282 (b'branch', b'Branch'),
1291 (b'branch', b'Branch'),
1283 (b'node', b'Node ID'),
1292 (b'node', b'Node ID'),
1284 (b'parent', b'Parent '),
1293 (b'parent', b'Parent '),
1285 ]
1294 ]
1286 )
1295 )
1287
1296
1288
1297
1289 def _confirmbeforesend(repo, revs, oldmap):
1298 def _confirmbeforesend(repo, revs, oldmap):
1290 url, token = readurltoken(repo.ui)
1299 url, token = readurltoken(repo.ui)
1291 ui = repo.ui
1300 ui = repo.ui
1292 for rev in revs:
1301 for rev in revs:
1293 ctx = repo[rev]
1302 ctx = repo[rev]
1294 desc = ctx.description().splitlines()[0]
1303 desc = ctx.description().splitlines()[0]
1295 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1304 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1296 if drevid:
1305 if drevid:
1297 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1306 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1298 else:
1307 else:
1299 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1308 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1300
1309
1301 ui.write(
1310 ui.write(
1302 _(b'%s - %s: %s\n')
1311 _(b'%s - %s: %s\n')
1303 % (
1312 % (
1304 drevdesc,
1313 drevdesc,
1305 ui.label(bytes(ctx), b'phabricator.node'),
1314 ui.label(bytes(ctx), b'phabricator.node'),
1306 ui.label(desc, b'phabricator.desc'),
1315 ui.label(desc, b'phabricator.desc'),
1307 )
1316 )
1308 )
1317 )
1309
1318
1310 if ui.promptchoice(
1319 if ui.promptchoice(
1311 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1320 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1312 ):
1321 ):
1313 return False
1322 return False
1314
1323
1315 return True
1324 return True
1316
1325
1317
1326
1318 _knownstatusnames = {
1327 _knownstatusnames = {
1319 b'accepted',
1328 b'accepted',
1320 b'needsreview',
1329 b'needsreview',
1321 b'needsrevision',
1330 b'needsrevision',
1322 b'closed',
1331 b'closed',
1323 b'abandoned',
1332 b'abandoned',
1324 b'changesplanned',
1333 b'changesplanned',
1325 }
1334 }
1326
1335
1327
1336
1328 def _getstatusname(drev):
1337 def _getstatusname(drev):
1329 """get normalized status name from a Differential Revision"""
1338 """get normalized status name from a Differential Revision"""
1330 return drev[b'statusName'].replace(b' ', b'').lower()
1339 return drev[b'statusName'].replace(b' ', b'').lower()
1331
1340
1332
1341
1333 # Small language to specify differential revisions. Support symbols: (), :X,
1342 # Small language to specify differential revisions. Support symbols: (), :X,
1334 # +, and -.
1343 # +, and -.
1335
1344
1336 _elements = {
1345 _elements = {
1337 # token-type: binding-strength, primary, prefix, infix, suffix
1346 # token-type: binding-strength, primary, prefix, infix, suffix
1338 b'(': (12, None, (b'group', 1, b')'), None, None),
1347 b'(': (12, None, (b'group', 1, b')'), None, None),
1339 b':': (8, None, (b'ancestors', 8), None, None),
1348 b':': (8, None, (b'ancestors', 8), None, None),
1340 b'&': (5, None, None, (b'and_', 5), None),
1349 b'&': (5, None, None, (b'and_', 5), None),
1341 b'+': (4, None, None, (b'add', 4), None),
1350 b'+': (4, None, None, (b'add', 4), None),
1342 b'-': (4, None, None, (b'sub', 4), None),
1351 b'-': (4, None, None, (b'sub', 4), None),
1343 b')': (0, None, None, None, None),
1352 b')': (0, None, None, None, None),
1344 b'symbol': (0, b'symbol', None, None, None),
1353 b'symbol': (0, b'symbol', None, None, None),
1345 b'end': (0, None, None, None, None),
1354 b'end': (0, None, None, None, None),
1346 }
1355 }
1347
1356
1348
1357
1349 def _tokenize(text):
1358 def _tokenize(text):
1350 view = memoryview(text) # zero-copy slice
1359 view = memoryview(text) # zero-copy slice
1351 special = b'():+-& '
1360 special = b'():+-& '
1352 pos = 0
1361 pos = 0
1353 length = len(text)
1362 length = len(text)
1354 while pos < length:
1363 while pos < length:
1355 symbol = b''.join(
1364 symbol = b''.join(
1356 itertools.takewhile(
1365 itertools.takewhile(
1357 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1366 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1358 )
1367 )
1359 )
1368 )
1360 if symbol:
1369 if symbol:
1361 yield (b'symbol', symbol, pos)
1370 yield (b'symbol', symbol, pos)
1362 pos += len(symbol)
1371 pos += len(symbol)
1363 else: # special char, ignore space
1372 else: # special char, ignore space
1364 if text[pos : pos + 1] != b' ':
1373 if text[pos : pos + 1] != b' ':
1365 yield (text[pos : pos + 1], None, pos)
1374 yield (text[pos : pos + 1], None, pos)
1366 pos += 1
1375 pos += 1
1367 yield (b'end', None, pos)
1376 yield (b'end', None, pos)
1368
1377
1369
1378
1370 def _parse(text):
1379 def _parse(text):
1371 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1380 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1372 if pos != len(text):
1381 if pos != len(text):
1373 raise error.ParseError(b'invalid token', pos)
1382 raise error.ParseError(b'invalid token', pos)
1374 return tree
1383 return tree
1375
1384
1376
1385
1377 def _parsedrev(symbol):
1386 def _parsedrev(symbol):
1378 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1387 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1379 if symbol.startswith(b'D') and symbol[1:].isdigit():
1388 if symbol.startswith(b'D') and symbol[1:].isdigit():
1380 return int(symbol[1:])
1389 return int(symbol[1:])
1381 if symbol.isdigit():
1390 if symbol.isdigit():
1382 return int(symbol)
1391 return int(symbol)
1383
1392
1384
1393
1385 def _prefetchdrevs(tree):
1394 def _prefetchdrevs(tree):
1386 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1395 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1387 drevs = set()
1396 drevs = set()
1388 ancestordrevs = set()
1397 ancestordrevs = set()
1389 op = tree[0]
1398 op = tree[0]
1390 if op == b'symbol':
1399 if op == b'symbol':
1391 r = _parsedrev(tree[1])
1400 r = _parsedrev(tree[1])
1392 if r:
1401 if r:
1393 drevs.add(r)
1402 drevs.add(r)
1394 elif op == b'ancestors':
1403 elif op == b'ancestors':
1395 r, a = _prefetchdrevs(tree[1])
1404 r, a = _prefetchdrevs(tree[1])
1396 drevs.update(r)
1405 drevs.update(r)
1397 ancestordrevs.update(r)
1406 ancestordrevs.update(r)
1398 ancestordrevs.update(a)
1407 ancestordrevs.update(a)
1399 else:
1408 else:
1400 for t in tree[1:]:
1409 for t in tree[1:]:
1401 r, a = _prefetchdrevs(t)
1410 r, a = _prefetchdrevs(t)
1402 drevs.update(r)
1411 drevs.update(r)
1403 ancestordrevs.update(a)
1412 ancestordrevs.update(a)
1404 return drevs, ancestordrevs
1413 return drevs, ancestordrevs
1405
1414
1406
1415
1407 def querydrev(ui, spec):
1416 def querydrev(ui, spec):
1408 """return a list of "Differential Revision" dicts
1417 """return a list of "Differential Revision" dicts
1409
1418
1410 spec is a string using a simple query language, see docstring in phabread
1419 spec is a string using a simple query language, see docstring in phabread
1411 for details.
1420 for details.
1412
1421
1413 A "Differential Revision dict" looks like:
1422 A "Differential Revision dict" looks like:
1414
1423
1415 {
1424 {
1416 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1425 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1417 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1426 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1418 "auxiliary": {
1427 "auxiliary": {
1419 "phabricator:depends-on": [
1428 "phabricator:depends-on": [
1420 "PHID-DREV-gbapp366kutjebt7agcd"
1429 "PHID-DREV-gbapp366kutjebt7agcd"
1421 ]
1430 ]
1422 "phabricator:projects": [],
1431 "phabricator:projects": [],
1423 },
1432 },
1424 "branch": "default",
1433 "branch": "default",
1425 "ccs": [],
1434 "ccs": [],
1426 "commits": [],
1435 "commits": [],
1427 "dateCreated": "1499181406",
1436 "dateCreated": "1499181406",
1428 "dateModified": "1499182103",
1437 "dateModified": "1499182103",
1429 "diffs": [
1438 "diffs": [
1430 "3",
1439 "3",
1431 "4",
1440 "4",
1432 ],
1441 ],
1433 "hashes": [],
1442 "hashes": [],
1434 "id": "2",
1443 "id": "2",
1435 "lineCount": "2",
1444 "lineCount": "2",
1436 "phid": "PHID-DREV-672qvysjcczopag46qty",
1445 "phid": "PHID-DREV-672qvysjcczopag46qty",
1437 "properties": {},
1446 "properties": {},
1438 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1447 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1439 "reviewers": [],
1448 "reviewers": [],
1440 "sourcePath": null
1449 "sourcePath": null
1441 "status": "0",
1450 "status": "0",
1442 "statusName": "Needs Review",
1451 "statusName": "Needs Review",
1443 "summary": "",
1452 "summary": "",
1444 "testPlan": "",
1453 "testPlan": "",
1445 "title": "example",
1454 "title": "example",
1446 "uri": "https://phab.example.com/D2",
1455 "uri": "https://phab.example.com/D2",
1447 }
1456 }
1448 """
1457 """
1449 # TODO: replace differential.query and differential.querydiffs with
1458 # TODO: replace differential.query and differential.querydiffs with
1450 # differential.diff.search because the former (and their output) are
1459 # differential.diff.search because the former (and their output) are
1451 # frozen, and planned to be deprecated and removed.
1460 # frozen, and planned to be deprecated and removed.
1452
1461
1453 def fetch(params):
1462 def fetch(params):
1454 """params -> single drev or None"""
1463 """params -> single drev or None"""
1455 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1464 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1456 if key in prefetched:
1465 if key in prefetched:
1457 return prefetched[key]
1466 return prefetched[key]
1458 drevs = callconduit(ui, b'differential.query', params)
1467 drevs = callconduit(ui, b'differential.query', params)
1459 # Fill prefetched with the result
1468 # Fill prefetched with the result
1460 for drev in drevs:
1469 for drev in drevs:
1461 prefetched[drev[b'phid']] = drev
1470 prefetched[drev[b'phid']] = drev
1462 prefetched[int(drev[b'id'])] = drev
1471 prefetched[int(drev[b'id'])] = drev
1463 if key not in prefetched:
1472 if key not in prefetched:
1464 raise error.Abort(
1473 raise error.Abort(
1465 _(b'cannot get Differential Revision %r') % params
1474 _(b'cannot get Differential Revision %r') % params
1466 )
1475 )
1467 return prefetched[key]
1476 return prefetched[key]
1468
1477
1469 def getstack(topdrevids):
1478 def getstack(topdrevids):
1470 """given a top, get a stack from the bottom, [id] -> [id]"""
1479 """given a top, get a stack from the bottom, [id] -> [id]"""
1471 visited = set()
1480 visited = set()
1472 result = []
1481 result = []
1473 queue = [{b'ids': [i]} for i in topdrevids]
1482 queue = [{b'ids': [i]} for i in topdrevids]
1474 while queue:
1483 while queue:
1475 params = queue.pop()
1484 params = queue.pop()
1476 drev = fetch(params)
1485 drev = fetch(params)
1477 if drev[b'id'] in visited:
1486 if drev[b'id'] in visited:
1478 continue
1487 continue
1479 visited.add(drev[b'id'])
1488 visited.add(drev[b'id'])
1480 result.append(int(drev[b'id']))
1489 result.append(int(drev[b'id']))
1481 auxiliary = drev.get(b'auxiliary', {})
1490 auxiliary = drev.get(b'auxiliary', {})
1482 depends = auxiliary.get(b'phabricator:depends-on', [])
1491 depends = auxiliary.get(b'phabricator:depends-on', [])
1483 for phid in depends:
1492 for phid in depends:
1484 queue.append({b'phids': [phid]})
1493 queue.append({b'phids': [phid]})
1485 result.reverse()
1494 result.reverse()
1486 return smartset.baseset(result)
1495 return smartset.baseset(result)
1487
1496
1488 # Initialize prefetch cache
1497 # Initialize prefetch cache
1489 prefetched = {} # {id or phid: drev}
1498 prefetched = {} # {id or phid: drev}
1490
1499
1491 tree = _parse(spec)
1500 tree = _parse(spec)
1492 drevs, ancestordrevs = _prefetchdrevs(tree)
1501 drevs, ancestordrevs = _prefetchdrevs(tree)
1493
1502
1494 # developer config: phabricator.batchsize
1503 # developer config: phabricator.batchsize
1495 batchsize = ui.configint(b'phabricator', b'batchsize')
1504 batchsize = ui.configint(b'phabricator', b'batchsize')
1496
1505
1497 # Prefetch Differential Revisions in batch
1506 # Prefetch Differential Revisions in batch
1498 tofetch = set(drevs)
1507 tofetch = set(drevs)
1499 for r in ancestordrevs:
1508 for r in ancestordrevs:
1500 tofetch.update(range(max(1, r - batchsize), r + 1))
1509 tofetch.update(range(max(1, r - batchsize), r + 1))
1501 if drevs:
1510 if drevs:
1502 fetch({b'ids': list(tofetch)})
1511 fetch({b'ids': list(tofetch)})
1503 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1512 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1504
1513
1505 # Walk through the tree, return smartsets
1514 # Walk through the tree, return smartsets
1506 def walk(tree):
1515 def walk(tree):
1507 op = tree[0]
1516 op = tree[0]
1508 if op == b'symbol':
1517 if op == b'symbol':
1509 drev = _parsedrev(tree[1])
1518 drev = _parsedrev(tree[1])
1510 if drev:
1519 if drev:
1511 return smartset.baseset([drev])
1520 return smartset.baseset([drev])
1512 elif tree[1] in _knownstatusnames:
1521 elif tree[1] in _knownstatusnames:
1513 drevs = [
1522 drevs = [
1514 r
1523 r
1515 for r in validids
1524 for r in validids
1516 if _getstatusname(prefetched[r]) == tree[1]
1525 if _getstatusname(prefetched[r]) == tree[1]
1517 ]
1526 ]
1518 return smartset.baseset(drevs)
1527 return smartset.baseset(drevs)
1519 else:
1528 else:
1520 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1529 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1521 elif op in {b'and_', b'add', b'sub'}:
1530 elif op in {b'and_', b'add', b'sub'}:
1522 assert len(tree) == 3
1531 assert len(tree) == 3
1523 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1532 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1524 elif op == b'group':
1533 elif op == b'group':
1525 return walk(tree[1])
1534 return walk(tree[1])
1526 elif op == b'ancestors':
1535 elif op == b'ancestors':
1527 return getstack(walk(tree[1]))
1536 return getstack(walk(tree[1]))
1528 else:
1537 else:
1529 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1538 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1530
1539
1531 return [prefetched[r] for r in walk(tree)]
1540 return [prefetched[r] for r in walk(tree)]
1532
1541
1533
1542
1534 def getdescfromdrev(drev):
1543 def getdescfromdrev(drev):
1535 """get description (commit message) from "Differential Revision"
1544 """get description (commit message) from "Differential Revision"
1536
1545
1537 This is similar to differential.getcommitmessage API. But we only care
1546 This is similar to differential.getcommitmessage API. But we only care
1538 about limited fields: title, summary, test plan, and URL.
1547 about limited fields: title, summary, test plan, and URL.
1539 """
1548 """
1540 title = drev[b'title']
1549 title = drev[b'title']
1541 summary = drev[b'summary'].rstrip()
1550 summary = drev[b'summary'].rstrip()
1542 testplan = drev[b'testPlan'].rstrip()
1551 testplan = drev[b'testPlan'].rstrip()
1543 if testplan:
1552 if testplan:
1544 testplan = b'Test Plan:\n%s' % testplan
1553 testplan = b'Test Plan:\n%s' % testplan
1545 uri = b'Differential Revision: %s' % drev[b'uri']
1554 uri = b'Differential Revision: %s' % drev[b'uri']
1546 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1555 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1547
1556
1548
1557
1549 def getdiffmeta(diff):
1558 def getdiffmeta(diff):
1550 """get commit metadata (date, node, user, p1) from a diff object
1559 """get commit metadata (date, node, user, p1) from a diff object
1551
1560
1552 The metadata could be "hg:meta", sent by phabsend, like:
1561 The metadata could be "hg:meta", sent by phabsend, like:
1553
1562
1554 "properties": {
1563 "properties": {
1555 "hg:meta": {
1564 "hg:meta": {
1556 "branch": "default",
1565 "branch": "default",
1557 "date": "1499571514 25200",
1566 "date": "1499571514 25200",
1558 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1567 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1559 "user": "Foo Bar <foo@example.com>",
1568 "user": "Foo Bar <foo@example.com>",
1560 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1569 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1561 }
1570 }
1562 }
1571 }
1563
1572
1564 Or converted from "local:commits", sent by "arc", like:
1573 Or converted from "local:commits", sent by "arc", like:
1565
1574
1566 "properties": {
1575 "properties": {
1567 "local:commits": {
1576 "local:commits": {
1568 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1577 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1569 "author": "Foo Bar",
1578 "author": "Foo Bar",
1570 "authorEmail": "foo@example.com"
1579 "authorEmail": "foo@example.com"
1571 "branch": "default",
1580 "branch": "default",
1572 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1581 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1573 "local": "1000",
1582 "local": "1000",
1574 "message": "...",
1583 "message": "...",
1575 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1584 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1576 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1585 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1577 "summary": "...",
1586 "summary": "...",
1578 "tag": "",
1587 "tag": "",
1579 "time": 1499546314,
1588 "time": 1499546314,
1580 }
1589 }
1581 }
1590 }
1582 }
1591 }
1583
1592
1584 Note: metadata extracted from "local:commits" will lose time zone
1593 Note: metadata extracted from "local:commits" will lose time zone
1585 information.
1594 information.
1586 """
1595 """
1587 props = diff.get(b'properties') or {}
1596 props = diff.get(b'properties') or {}
1588 meta = props.get(b'hg:meta')
1597 meta = props.get(b'hg:meta')
1589 if not meta:
1598 if not meta:
1590 if props.get(b'local:commits'):
1599 if props.get(b'local:commits'):
1591 commit = sorted(props[b'local:commits'].values())[0]
1600 commit = sorted(props[b'local:commits'].values())[0]
1592 meta = {}
1601 meta = {}
1593 if b'author' in commit and b'authorEmail' in commit:
1602 if b'author' in commit and b'authorEmail' in commit:
1594 meta[b'user'] = b'%s <%s>' % (
1603 meta[b'user'] = b'%s <%s>' % (
1595 commit[b'author'],
1604 commit[b'author'],
1596 commit[b'authorEmail'],
1605 commit[b'authorEmail'],
1597 )
1606 )
1598 if b'time' in commit:
1607 if b'time' in commit:
1599 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1608 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1600 if b'branch' in commit:
1609 if b'branch' in commit:
1601 meta[b'branch'] = commit[b'branch']
1610 meta[b'branch'] = commit[b'branch']
1602 node = commit.get(b'commit', commit.get(b'rev'))
1611 node = commit.get(b'commit', commit.get(b'rev'))
1603 if node:
1612 if node:
1604 meta[b'node'] = node
1613 meta[b'node'] = node
1605 if len(commit.get(b'parents', ())) >= 1:
1614 if len(commit.get(b'parents', ())) >= 1:
1606 meta[b'parent'] = commit[b'parents'][0]
1615 meta[b'parent'] = commit[b'parents'][0]
1607 else:
1616 else:
1608 meta = {}
1617 meta = {}
1609 if b'date' not in meta and b'dateCreated' in diff:
1618 if b'date' not in meta and b'dateCreated' in diff:
1610 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1619 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1611 if b'branch' not in meta and diff.get(b'branch'):
1620 if b'branch' not in meta and diff.get(b'branch'):
1612 meta[b'branch'] = diff[b'branch']
1621 meta[b'branch'] = diff[b'branch']
1613 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1622 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1614 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1623 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1615 return meta
1624 return meta
1616
1625
1617
1626
1618 def readpatch(ui, drevs, write):
1627 def readpatch(ui, drevs, write):
1619 """generate plain-text patch readable by 'hg import'
1628 """generate plain-text patch readable by 'hg import'
1620
1629
1621 write takes a list of (DREV, bytes), where DREV is the differential number
1630 write takes a list of (DREV, bytes), where DREV is the differential number
1622 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1631 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1623 to be imported. drevs is what "querydrev" returns, results of
1632 to be imported. drevs is what "querydrev" returns, results of
1624 "differential.query".
1633 "differential.query".
1625 """
1634 """
1626 # Prefetch hg:meta property for all diffs
1635 # Prefetch hg:meta property for all diffs
1627 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1636 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1628 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1637 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1629
1638
1630 patches = []
1639 patches = []
1631
1640
1632 # Generate patch for each drev
1641 # Generate patch for each drev
1633 for drev in drevs:
1642 for drev in drevs:
1634 ui.note(_(b'reading D%s\n') % drev[b'id'])
1643 ui.note(_(b'reading D%s\n') % drev[b'id'])
1635
1644
1636 diffid = max(int(v) for v in drev[b'diffs'])
1645 diffid = max(int(v) for v in drev[b'diffs'])
1637 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1646 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1638 desc = getdescfromdrev(drev)
1647 desc = getdescfromdrev(drev)
1639 header = b'# HG changeset patch\n'
1648 header = b'# HG changeset patch\n'
1640
1649
1641 # Try to preserve metadata from hg:meta property. Write hg patch
1650 # Try to preserve metadata from hg:meta property. Write hg patch
1642 # headers that can be read by the "import" command. See patchheadermap
1651 # headers that can be read by the "import" command. See patchheadermap
1643 # and extract in mercurial/patch.py for supported headers.
1652 # and extract in mercurial/patch.py for supported headers.
1644 meta = getdiffmeta(diffs[b'%d' % diffid])
1653 meta = getdiffmeta(diffs[b'%d' % diffid])
1645 for k in _metanamemap.keys():
1654 for k in _metanamemap.keys():
1646 if k in meta:
1655 if k in meta:
1647 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1656 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1648
1657
1649 content = b'%s%s\n%s' % (header, desc, body)
1658 content = b'%s%s\n%s' % (header, desc, body)
1650 patches.append((drev[b'id'], content))
1659 patches.append((drev[b'id'], content))
1651
1660
1652 # Write patches to the supplied callback
1661 # Write patches to the supplied callback
1653 write(patches)
1662 write(patches)
1654
1663
1655
1664
1656 @vcrcommand(
1665 @vcrcommand(
1657 b'phabread',
1666 b'phabread',
1658 [(b'', b'stack', False, _(b'read dependencies'))],
1667 [(b'', b'stack', False, _(b'read dependencies'))],
1659 _(b'DREVSPEC [OPTIONS]'),
1668 _(b'DREVSPEC [OPTIONS]'),
1660 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1669 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1661 optionalrepo=True,
1670 optionalrepo=True,
1662 )
1671 )
1663 def phabread(ui, repo, spec, **opts):
1672 def phabread(ui, repo, spec, **opts):
1664 """print patches from Phabricator suitable for importing
1673 """print patches from Phabricator suitable for importing
1665
1674
1666 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1675 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1667 the number ``123``. It could also have common operators like ``+``, ``-``,
1676 the number ``123``. It could also have common operators like ``+``, ``-``,
1668 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1677 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1669 select a stack.
1678 select a stack.
1670
1679
1671 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1680 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1672 could be used to filter patches by status. For performance reason, they
1681 could be used to filter patches by status. For performance reason, they
1673 only represent a subset of non-status selections and cannot be used alone.
1682 only represent a subset of non-status selections and cannot be used alone.
1674
1683
1675 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1684 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1676 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1685 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1677 stack up to D9.
1686 stack up to D9.
1678
1687
1679 If --stack is given, follow dependencies information and read all patches.
1688 If --stack is given, follow dependencies information and read all patches.
1680 It is equivalent to the ``:`` operator.
1689 It is equivalent to the ``:`` operator.
1681 """
1690 """
1682 opts = pycompat.byteskwargs(opts)
1691 opts = pycompat.byteskwargs(opts)
1683 if opts.get(b'stack'):
1692 if opts.get(b'stack'):
1684 spec = b':(%s)' % spec
1693 spec = b':(%s)' % spec
1685 drevs = querydrev(ui, spec)
1694 drevs = querydrev(ui, spec)
1686
1695
1687 def _write(patches):
1696 def _write(patches):
1688 for drev, content in patches:
1697 for drev, content in patches:
1689 ui.write(content)
1698 ui.write(content)
1690
1699
1691 readpatch(ui, drevs, _write)
1700 readpatch(ui, drevs, _write)
1692
1701
1693
1702
1694 @vcrcommand(
1703 @vcrcommand(
1695 b'phabupdate',
1704 b'phabupdate',
1696 [
1705 [
1697 (b'', b'accept', False, _(b'accept revisions')),
1706 (b'', b'accept', False, _(b'accept revisions')),
1698 (b'', b'reject', False, _(b'reject revisions')),
1707 (b'', b'reject', False, _(b'reject revisions')),
1699 (b'', b'abandon', False, _(b'abandon revisions')),
1708 (b'', b'abandon', False, _(b'abandon revisions')),
1700 (b'', b'reclaim', False, _(b'reclaim revisions')),
1709 (b'', b'reclaim', False, _(b'reclaim revisions')),
1701 (b'm', b'comment', b'', _(b'comment on the last revision')),
1710 (b'm', b'comment', b'', _(b'comment on the last revision')),
1702 ],
1711 ],
1703 _(b'DREVSPEC [OPTIONS]'),
1712 _(b'DREVSPEC [OPTIONS]'),
1704 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1713 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1705 optionalrepo=True,
1714 optionalrepo=True,
1706 )
1715 )
1707 def phabupdate(ui, repo, spec, **opts):
1716 def phabupdate(ui, repo, spec, **opts):
1708 """update Differential Revision in batch
1717 """update Differential Revision in batch
1709
1718
1710 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1719 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1711 """
1720 """
1712 opts = pycompat.byteskwargs(opts)
1721 opts = pycompat.byteskwargs(opts)
1713 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1722 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1714 if len(flags) > 1:
1723 if len(flags) > 1:
1715 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1724 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1716
1725
1717 actions = []
1726 actions = []
1718 for f in flags:
1727 for f in flags:
1719 actions.append({b'type': f, b'value': True})
1728 actions.append({b'type': f, b'value': True})
1720
1729
1721 drevs = querydrev(ui, spec)
1730 drevs = querydrev(ui, spec)
1722 for i, drev in enumerate(drevs):
1731 for i, drev in enumerate(drevs):
1723 if i + 1 == len(drevs) and opts.get(b'comment'):
1732 if i + 1 == len(drevs) and opts.get(b'comment'):
1724 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1733 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1725 if actions:
1734 if actions:
1726 params = {
1735 params = {
1727 b'objectIdentifier': drev[b'phid'],
1736 b'objectIdentifier': drev[b'phid'],
1728 b'transactions': actions,
1737 b'transactions': actions,
1729 }
1738 }
1730 callconduit(ui, b'differential.revision.edit', params)
1739 callconduit(ui, b'differential.revision.edit', params)
1731
1740
1732
1741
1733 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1742 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1734 def template_review(context, mapping):
1743 def template_review(context, mapping):
1735 """:phabreview: Object describing the review for this changeset.
1744 """:phabreview: Object describing the review for this changeset.
1736 Has attributes `url` and `id`.
1745 Has attributes `url` and `id`.
1737 """
1746 """
1738 ctx = context.resource(mapping, b'ctx')
1747 ctx = context.resource(mapping, b'ctx')
1739 m = _differentialrevisiondescre.search(ctx.description())
1748 m = _differentialrevisiondescre.search(ctx.description())
1740 if m:
1749 if m:
1741 return templateutil.hybriddict(
1750 return templateutil.hybriddict(
1742 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1751 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1743 )
1752 )
1744 else:
1753 else:
1745 tags = ctx.repo().nodetags(ctx.node())
1754 tags = ctx.repo().nodetags(ctx.node())
1746 for t in tags:
1755 for t in tags:
1747 if _differentialrevisiontagre.match(t):
1756 if _differentialrevisiontagre.match(t):
1748 url = ctx.repo().ui.config(b'phabricator', b'url')
1757 url = ctx.repo().ui.config(b'phabricator', b'url')
1749 if not url.endswith(b'/'):
1758 if not url.endswith(b'/'):
1750 url += b'/'
1759 url += b'/'
1751 url += t
1760 url += t
1752
1761
1753 return templateutil.hybriddict({b'url': url, b'id': t,})
1762 return templateutil.hybriddict({b'url': url, b'id': t,})
1754 return None
1763 return None
1755
1764
1756
1765
1757 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1766 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1758 def template_status(context, mapping):
1767 def template_status(context, mapping):
1759 """:phabstatus: String. Status of Phabricator differential.
1768 """:phabstatus: String. Status of Phabricator differential.
1760 """
1769 """
1761 ctx = context.resource(mapping, b'ctx')
1770 ctx = context.resource(mapping, b'ctx')
1762 repo = context.resource(mapping, b'repo')
1771 repo = context.resource(mapping, b'repo')
1763 ui = context.resource(mapping, b'ui')
1772 ui = context.resource(mapping, b'ui')
1764
1773
1765 rev = ctx.rev()
1774 rev = ctx.rev()
1766 try:
1775 try:
1767 drevid = getdrevmap(repo, [rev])[rev]
1776 drevid = getdrevmap(repo, [rev])[rev]
1768 except KeyError:
1777 except KeyError:
1769 return None
1778 return None
1770 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1779 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1771 for drev in drevs:
1780 for drev in drevs:
1772 if int(drev[b'id']) == drevid:
1781 if int(drev[b'id']) == drevid:
1773 return templateutil.hybriddict(
1782 return templateutil.hybriddict(
1774 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1783 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1775 )
1784 )
1776 return None
1785 return None
1777
1786
1778
1787
1779 @show.showview(b'phabstatus', csettopic=b'work')
1788 @show.showview(b'phabstatus', csettopic=b'work')
1780 def phabstatusshowview(ui, repo, displayer):
1789 def phabstatusshowview(ui, repo, displayer):
1781 """Phabricator differiential status"""
1790 """Phabricator differiential status"""
1782 revs = repo.revs('sort(_underway(), topo)')
1791 revs = repo.revs('sort(_underway(), topo)')
1783 drevmap = getdrevmap(repo, revs)
1792 drevmap = getdrevmap(repo, revs)
1784 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1793 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1785 for rev, drevid in pycompat.iteritems(drevmap):
1794 for rev, drevid in pycompat.iteritems(drevmap):
1786 if drevid is not None:
1795 if drevid is not None:
1787 drevids.add(drevid)
1796 drevids.add(drevid)
1788 revsbydrevid.setdefault(drevid, set([])).add(rev)
1797 revsbydrevid.setdefault(drevid, set([])).add(rev)
1789 else:
1798 else:
1790 unknownrevs.append(rev)
1799 unknownrevs.append(rev)
1791
1800
1792 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1801 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1793 drevsbyrev = {}
1802 drevsbyrev = {}
1794 for drev in drevs:
1803 for drev in drevs:
1795 for rev in revsbydrevid[int(drev[b'id'])]:
1804 for rev in revsbydrevid[int(drev[b'id'])]:
1796 drevsbyrev[rev] = drev
1805 drevsbyrev[rev] = drev
1797
1806
1798 def phabstatus(ctx):
1807 def phabstatus(ctx):
1799 drev = drevsbyrev[ctx.rev()]
1808 drev = drevsbyrev[ctx.rev()]
1800 status = ui.label(
1809 status = ui.label(
1801 b'%(statusName)s' % drev,
1810 b'%(statusName)s' % drev,
1802 b'phabricator.status.%s' % _getstatusname(drev),
1811 b'phabricator.status.%s' % _getstatusname(drev),
1803 )
1812 )
1804 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1813 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1805
1814
1806 revs -= smartset.baseset(unknownrevs)
1815 revs -= smartset.baseset(unknownrevs)
1807 revdag = graphmod.dagwalker(repo, revs)
1816 revdag = graphmod.dagwalker(repo, revs)
1808
1817
1809 ui.setconfig(b'experimental', b'graphshorten', True)
1818 ui.setconfig(b'experimental', b'graphshorten', True)
1810 displayer._exthook = phabstatus
1819 displayer._exthook = phabstatus
1811 nodelen = show.longestshortest(repo, revs)
1820 nodelen = show.longestshortest(repo, revs)
1812 logcmdutil.displaygraph(
1821 logcmdutil.displaygraph(
1813 ui,
1822 ui,
1814 repo,
1823 repo,
1815 revdag,
1824 revdag,
1816 displayer,
1825 displayer,
1817 graphmod.asciiedges,
1826 graphmod.asciiedges,
1818 props={b'nodelen': nodelen},
1827 props={b'nodelen': nodelen},
1819 )
1828 )
General Comments 0
You need to be logged in to leave comments. Login now