##// END OF EJS Templates
phabricator: add the uploadfile function...
Ian Moody -
r43458:24e8aac7 default
parent child Browse files
Show More
@@ -1,1419 +1,1457 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import itertools
47 import itertools
47 import json
48 import json
48 import operator
49 import operator
49 import re
50 import re
50
51
51 from mercurial.node import bin, nullid
52 from mercurial.node import bin, nullid
52 from mercurial.i18n import _
53 from mercurial.i18n import _
53 from mercurial.pycompat import getattr
54 from mercurial.pycompat import getattr
54 from mercurial.thirdparty import attr
55 from mercurial.thirdparty import attr
55 from mercurial import (
56 from mercurial import (
56 cmdutil,
57 cmdutil,
57 context,
58 context,
58 encoding,
59 encoding,
59 error,
60 error,
60 exthelper,
61 exthelper,
61 httpconnection as httpconnectionmod,
62 httpconnection as httpconnectionmod,
62 match,
63 match,
63 mdiff,
64 mdiff,
64 obsutil,
65 obsutil,
65 parser,
66 parser,
66 patch,
67 patch,
67 phases,
68 phases,
68 pycompat,
69 pycompat,
69 scmutil,
70 scmutil,
70 smartset,
71 smartset,
71 tags,
72 tags,
72 templatefilters,
73 templatefilters,
73 templateutil,
74 templateutil,
74 url as urlmod,
75 url as urlmod,
75 util,
76 util,
76 )
77 )
77 from mercurial.utils import (
78 from mercurial.utils import (
78 procutil,
79 procutil,
79 stringutil,
80 stringutil,
80 )
81 )
81
82
82 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
83 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
83 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
84 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
84 # be specifying the version(s) of Mercurial they are tested with, or
85 # be specifying the version(s) of Mercurial they are tested with, or
85 # leave the attribute unspecified.
86 # leave the attribute unspecified.
86 testedwith = b'ships-with-hg-core'
87 testedwith = b'ships-with-hg-core'
87
88
88 eh = exthelper.exthelper()
89 eh = exthelper.exthelper()
89
90
90 cmdtable = eh.cmdtable
91 cmdtable = eh.cmdtable
91 command = eh.command
92 command = eh.command
92 configtable = eh.configtable
93 configtable = eh.configtable
93 templatekeyword = eh.templatekeyword
94 templatekeyword = eh.templatekeyword
94
95
95 # developer config: phabricator.batchsize
96 # developer config: phabricator.batchsize
96 eh.configitem(
97 eh.configitem(
97 b'phabricator', b'batchsize', default=12,
98 b'phabricator', b'batchsize', default=12,
98 )
99 )
99 eh.configitem(
100 eh.configitem(
100 b'phabricator', b'callsign', default=None,
101 b'phabricator', b'callsign', default=None,
101 )
102 )
102 eh.configitem(
103 eh.configitem(
103 b'phabricator', b'curlcmd', default=None,
104 b'phabricator', b'curlcmd', default=None,
104 )
105 )
105 # developer config: phabricator.repophid
106 # developer config: phabricator.repophid
106 eh.configitem(
107 eh.configitem(
107 b'phabricator', b'repophid', default=None,
108 b'phabricator', b'repophid', default=None,
108 )
109 )
109 eh.configitem(
110 eh.configitem(
110 b'phabricator', b'url', default=None,
111 b'phabricator', b'url', default=None,
111 )
112 )
112 eh.configitem(
113 eh.configitem(
113 b'phabsend', b'confirm', default=False,
114 b'phabsend', b'confirm', default=False,
114 )
115 )
115
116
116 colortable = {
117 colortable = {
117 b'phabricator.action.created': b'green',
118 b'phabricator.action.created': b'green',
118 b'phabricator.action.skipped': b'magenta',
119 b'phabricator.action.skipped': b'magenta',
119 b'phabricator.action.updated': b'magenta',
120 b'phabricator.action.updated': b'magenta',
120 b'phabricator.desc': b'',
121 b'phabricator.desc': b'',
121 b'phabricator.drev': b'bold',
122 b'phabricator.drev': b'bold',
122 b'phabricator.node': b'',
123 b'phabricator.node': b'',
123 }
124 }
124
125
125 _VCR_FLAGS = [
126 _VCR_FLAGS = [
126 (
127 (
127 b'',
128 b'',
128 b'test-vcr',
129 b'test-vcr',
129 b'',
130 b'',
130 _(
131 _(
131 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
132 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
132 b', otherwise will mock all http requests using the specified vcr file.'
133 b', otherwise will mock all http requests using the specified vcr file.'
133 b' (ADVANCED)'
134 b' (ADVANCED)'
134 ),
135 ),
135 ),
136 ),
136 ]
137 ]
137
138
138
139
139 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
140 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
140 fullflags = flags + _VCR_FLAGS
141 fullflags = flags + _VCR_FLAGS
141
142
142 def hgmatcher(r1, r2):
143 def hgmatcher(r1, r2):
143 if r1.uri != r2.uri or r1.method != r2.method:
144 if r1.uri != r2.uri or r1.method != r2.method:
144 return False
145 return False
145 r1params = r1.body.split(b'&')
146 r1params = r1.body.split(b'&')
146 r2params = r2.body.split(b'&')
147 r2params = r2.body.split(b'&')
147 return set(r1params) == set(r2params)
148 return set(r1params) == set(r2params)
148
149
149 def sanitiserequest(request):
150 def sanitiserequest(request):
150 request.body = re.sub(
151 request.body = re.sub(
151 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
152 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
152 )
153 )
153 return request
154 return request
154
155
155 def sanitiseresponse(response):
156 def sanitiseresponse(response):
156 if r'set-cookie' in response[r'headers']:
157 if r'set-cookie' in response[r'headers']:
157 del response[r'headers'][r'set-cookie']
158 del response[r'headers'][r'set-cookie']
158 return response
159 return response
159
160
160 def decorate(fn):
161 def decorate(fn):
161 def inner(*args, **kwargs):
162 def inner(*args, **kwargs):
162 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
163 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
163 if cassette:
164 if cassette:
164 import hgdemandimport
165 import hgdemandimport
165
166
166 with hgdemandimport.deactivated():
167 with hgdemandimport.deactivated():
167 import vcr as vcrmod
168 import vcr as vcrmod
168 import vcr.stubs as stubs
169 import vcr.stubs as stubs
169
170
170 vcr = vcrmod.VCR(
171 vcr = vcrmod.VCR(
171 serializer=r'json',
172 serializer=r'json',
172 before_record_request=sanitiserequest,
173 before_record_request=sanitiserequest,
173 before_record_response=sanitiseresponse,
174 before_record_response=sanitiseresponse,
174 custom_patches=[
175 custom_patches=[
175 (
176 (
176 urlmod,
177 urlmod,
177 r'httpconnection',
178 r'httpconnection',
178 stubs.VCRHTTPConnection,
179 stubs.VCRHTTPConnection,
179 ),
180 ),
180 (
181 (
181 urlmod,
182 urlmod,
182 r'httpsconnection',
183 r'httpsconnection',
183 stubs.VCRHTTPSConnection,
184 stubs.VCRHTTPSConnection,
184 ),
185 ),
185 ],
186 ],
186 )
187 )
187 vcr.register_matcher(r'hgmatcher', hgmatcher)
188 vcr.register_matcher(r'hgmatcher', hgmatcher)
188 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
189 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
189 return fn(*args, **kwargs)
190 return fn(*args, **kwargs)
190 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
191
192
192 inner.__name__ = fn.__name__
193 inner.__name__ = fn.__name__
193 inner.__doc__ = fn.__doc__
194 inner.__doc__ = fn.__doc__
194 return command(
195 return command(
195 name,
196 name,
196 fullflags,
197 fullflags,
197 spec,
198 spec,
198 helpcategory=helpcategory,
199 helpcategory=helpcategory,
199 optionalrepo=optionalrepo,
200 optionalrepo=optionalrepo,
200 )(inner)
201 )(inner)
201
202
202 return decorate
203 return decorate
203
204
204
205
205 def urlencodenested(params):
206 def urlencodenested(params):
206 """like urlencode, but works with nested parameters.
207 """like urlencode, but works with nested parameters.
207
208
208 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
209 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
209 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
210 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
210 urlencode. Note: the encoding is consistent with PHP's http_build_query.
211 urlencode. Note: the encoding is consistent with PHP's http_build_query.
211 """
212 """
212 flatparams = util.sortdict()
213 flatparams = util.sortdict()
213
214
214 def process(prefix, obj):
215 def process(prefix, obj):
215 if isinstance(obj, bool):
216 if isinstance(obj, bool):
216 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
217 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
217 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
218 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
218 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
219 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
219 if items is None:
220 if items is None:
220 flatparams[prefix] = obj
221 flatparams[prefix] = obj
221 else:
222 else:
222 for k, v in items(obj):
223 for k, v in items(obj):
223 if prefix:
224 if prefix:
224 process(b'%s[%s]' % (prefix, k), v)
225 process(b'%s[%s]' % (prefix, k), v)
225 else:
226 else:
226 process(k, v)
227 process(k, v)
227
228
228 process(b'', params)
229 process(b'', params)
229 return util.urlreq.urlencode(flatparams)
230 return util.urlreq.urlencode(flatparams)
230
231
231
232
232 def readurltoken(ui):
233 def readurltoken(ui):
233 """return conduit url, token and make sure they exist
234 """return conduit url, token and make sure they exist
234
235
235 Currently read from [auth] config section. In the future, it might
236 Currently read from [auth] config section. In the future, it might
236 make sense to read from .arcconfig and .arcrc as well.
237 make sense to read from .arcconfig and .arcrc as well.
237 """
238 """
238 url = ui.config(b'phabricator', b'url')
239 url = ui.config(b'phabricator', b'url')
239 if not url:
240 if not url:
240 raise error.Abort(
241 raise error.Abort(
241 _(b'config %s.%s is required') % (b'phabricator', b'url')
242 _(b'config %s.%s is required') % (b'phabricator', b'url')
242 )
243 )
243
244
244 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
245 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
245 token = None
246 token = None
246
247
247 if res:
248 if res:
248 group, auth = res
249 group, auth = res
249
250
250 ui.debug(b"using auth.%s.* for authentication\n" % group)
251 ui.debug(b"using auth.%s.* for authentication\n" % group)
251
252
252 token = auth.get(b'phabtoken')
253 token = auth.get(b'phabtoken')
253
254
254 if not token:
255 if not token:
255 raise error.Abort(
256 raise error.Abort(
256 _(b'Can\'t find conduit token associated to %s') % (url,)
257 _(b'Can\'t find conduit token associated to %s') % (url,)
257 )
258 )
258
259
259 return url, token
260 return url, token
260
261
261
262
262 def callconduit(ui, name, params):
263 def callconduit(ui, name, params):
263 """call Conduit API, params is a dict. return json.loads result, or None"""
264 """call Conduit API, params is a dict. return json.loads result, or None"""
264 host, token = readurltoken(ui)
265 host, token = readurltoken(ui)
265 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
266 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
266 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
267 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
267 params = params.copy()
268 params = params.copy()
268 params[b'api.token'] = token
269 params[b'api.token'] = token
269 data = urlencodenested(params)
270 data = urlencodenested(params)
270 curlcmd = ui.config(b'phabricator', b'curlcmd')
271 curlcmd = ui.config(b'phabricator', b'curlcmd')
271 if curlcmd:
272 if curlcmd:
272 sin, sout = procutil.popen2(
273 sin, sout = procutil.popen2(
273 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
274 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
274 )
275 )
275 sin.write(data)
276 sin.write(data)
276 sin.close()
277 sin.close()
277 body = sout.read()
278 body = sout.read()
278 else:
279 else:
279 urlopener = urlmod.opener(ui, authinfo)
280 urlopener = urlmod.opener(ui, authinfo)
280 request = util.urlreq.request(pycompat.strurl(url), data=data)
281 request = util.urlreq.request(pycompat.strurl(url), data=data)
281 with contextlib.closing(urlopener.open(request)) as rsp:
282 with contextlib.closing(urlopener.open(request)) as rsp:
282 body = rsp.read()
283 body = rsp.read()
283 ui.debug(b'Conduit Response: %s\n' % body)
284 ui.debug(b'Conduit Response: %s\n' % body)
284 parsed = pycompat.rapply(
285 parsed = pycompat.rapply(
285 lambda x: encoding.unitolocal(x)
286 lambda x: encoding.unitolocal(x)
286 if isinstance(x, pycompat.unicode)
287 if isinstance(x, pycompat.unicode)
287 else x,
288 else x,
288 # json.loads only accepts bytes from py3.6+
289 # json.loads only accepts bytes from py3.6+
289 json.loads(encoding.unifromlocal(body)),
290 json.loads(encoding.unifromlocal(body)),
290 )
291 )
291 if parsed.get(b'error_code'):
292 if parsed.get(b'error_code'):
292 msg = _(b'Conduit Error (%s): %s') % (
293 msg = _(b'Conduit Error (%s): %s') % (
293 parsed[b'error_code'],
294 parsed[b'error_code'],
294 parsed[b'error_info'],
295 parsed[b'error_info'],
295 )
296 )
296 raise error.Abort(msg)
297 raise error.Abort(msg)
297 return parsed[b'result']
298 return parsed[b'result']
298
299
299
300
300 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
301 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
301 def debugcallconduit(ui, repo, name):
302 def debugcallconduit(ui, repo, name):
302 """call Conduit API
303 """call Conduit API
303
304
304 Call parameters are read from stdin as a JSON blob. Result will be written
305 Call parameters are read from stdin as a JSON blob. Result will be written
305 to stdout as a JSON blob.
306 to stdout as a JSON blob.
306 """
307 """
307 # json.loads only accepts bytes from 3.6+
308 # json.loads only accepts bytes from 3.6+
308 rawparams = encoding.unifromlocal(ui.fin.read())
309 rawparams = encoding.unifromlocal(ui.fin.read())
309 # json.loads only returns unicode strings
310 # json.loads only returns unicode strings
310 params = pycompat.rapply(
311 params = pycompat.rapply(
311 lambda x: encoding.unitolocal(x)
312 lambda x: encoding.unitolocal(x)
312 if isinstance(x, pycompat.unicode)
313 if isinstance(x, pycompat.unicode)
313 else x,
314 else x,
314 json.loads(rawparams),
315 json.loads(rawparams),
315 )
316 )
316 # json.dumps only accepts unicode strings
317 # json.dumps only accepts unicode strings
317 result = pycompat.rapply(
318 result = pycompat.rapply(
318 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
319 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
319 callconduit(ui, name, params),
320 callconduit(ui, name, params),
320 )
321 )
321 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
322 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
322 ui.write(b'%s\n' % encoding.unitolocal(s))
323 ui.write(b'%s\n' % encoding.unitolocal(s))
323
324
324
325
325 def getrepophid(repo):
326 def getrepophid(repo):
326 """given callsign, return repository PHID or None"""
327 """given callsign, return repository PHID or None"""
327 # developer config: phabricator.repophid
328 # developer config: phabricator.repophid
328 repophid = repo.ui.config(b'phabricator', b'repophid')
329 repophid = repo.ui.config(b'phabricator', b'repophid')
329 if repophid:
330 if repophid:
330 return repophid
331 return repophid
331 callsign = repo.ui.config(b'phabricator', b'callsign')
332 callsign = repo.ui.config(b'phabricator', b'callsign')
332 if not callsign:
333 if not callsign:
333 return None
334 return None
334 query = callconduit(
335 query = callconduit(
335 repo.ui,
336 repo.ui,
336 b'diffusion.repository.search',
337 b'diffusion.repository.search',
337 {b'constraints': {b'callsigns': [callsign]}},
338 {b'constraints': {b'callsigns': [callsign]}},
338 )
339 )
339 if len(query[b'data']) == 0:
340 if len(query[b'data']) == 0:
340 return None
341 return None
341 repophid = query[b'data'][0][b'phid']
342 repophid = query[b'data'][0][b'phid']
342 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
343 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
343 return repophid
344 return repophid
344
345
345
346
346 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
347 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
347 _differentialrevisiondescre = re.compile(
348 _differentialrevisiondescre = re.compile(
348 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
349 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
349 )
350 )
350
351
351
352
352 def getoldnodedrevmap(repo, nodelist):
353 def getoldnodedrevmap(repo, nodelist):
353 """find previous nodes that has been sent to Phabricator
354 """find previous nodes that has been sent to Phabricator
354
355
355 return {node: (oldnode, Differential diff, Differential Revision ID)}
356 return {node: (oldnode, Differential diff, Differential Revision ID)}
356 for node in nodelist with known previous sent versions, or associated
357 for node in nodelist with known previous sent versions, or associated
357 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
358 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
358 be ``None``.
359 be ``None``.
359
360
360 Examines commit messages like "Differential Revision:" to get the
361 Examines commit messages like "Differential Revision:" to get the
361 association information.
362 association information.
362
363
363 If such commit message line is not found, examines all precursors and their
364 If such commit message line is not found, examines all precursors and their
364 tags. Tags with format like "D1234" are considered a match and the node
365 tags. Tags with format like "D1234" are considered a match and the node
365 with that tag, and the number after "D" (ex. 1234) will be returned.
366 with that tag, and the number after "D" (ex. 1234) will be returned.
366
367
367 The ``old node``, if not None, is guaranteed to be the last diff of
368 The ``old node``, if not None, is guaranteed to be the last diff of
368 corresponding Differential Revision, and exist in the repo.
369 corresponding Differential Revision, and exist in the repo.
369 """
370 """
370 unfi = repo.unfiltered()
371 unfi = repo.unfiltered()
371 nodemap = unfi.changelog.nodemap
372 nodemap = unfi.changelog.nodemap
372
373
373 result = {} # {node: (oldnode?, lastdiff?, drev)}
374 result = {} # {node: (oldnode?, lastdiff?, drev)}
374 toconfirm = {} # {node: (force, {precnode}, drev)}
375 toconfirm = {} # {node: (force, {precnode}, drev)}
375 for node in nodelist:
376 for node in nodelist:
376 ctx = unfi[node]
377 ctx = unfi[node]
377 # For tags like "D123", put them into "toconfirm" to verify later
378 # For tags like "D123", put them into "toconfirm" to verify later
378 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
379 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
379 for n in precnodes:
380 for n in precnodes:
380 if n in nodemap:
381 if n in nodemap:
381 for tag in unfi.nodetags(n):
382 for tag in unfi.nodetags(n):
382 m = _differentialrevisiontagre.match(tag)
383 m = _differentialrevisiontagre.match(tag)
383 if m:
384 if m:
384 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
385 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
385 continue
386 continue
386
387
387 # Check commit message
388 # Check commit message
388 m = _differentialrevisiondescre.search(ctx.description())
389 m = _differentialrevisiondescre.search(ctx.description())
389 if m:
390 if m:
390 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
391 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
391
392
392 # Double check if tags are genuine by collecting all old nodes from
393 # Double check if tags are genuine by collecting all old nodes from
393 # Phabricator, and expect precursors overlap with it.
394 # Phabricator, and expect precursors overlap with it.
394 if toconfirm:
395 if toconfirm:
395 drevs = [drev for force, precs, drev in toconfirm.values()]
396 drevs = [drev for force, precs, drev in toconfirm.values()]
396 alldiffs = callconduit(
397 alldiffs = callconduit(
397 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
398 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
398 )
399 )
399 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
400 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
400 for newnode, (force, precset, drev) in toconfirm.items():
401 for newnode, (force, precset, drev) in toconfirm.items():
401 diffs = [
402 diffs = [
402 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
403 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
403 ]
404 ]
404
405
405 # "precursors" as known by Phabricator
406 # "precursors" as known by Phabricator
406 phprecset = set(getnode(d) for d in diffs)
407 phprecset = set(getnode(d) for d in diffs)
407
408
408 # Ignore if precursors (Phabricator and local repo) do not overlap,
409 # Ignore if precursors (Phabricator and local repo) do not overlap,
409 # and force is not set (when commit message says nothing)
410 # and force is not set (when commit message says nothing)
410 if not force and not bool(phprecset & precset):
411 if not force and not bool(phprecset & precset):
411 tagname = b'D%d' % drev
412 tagname = b'D%d' % drev
412 tags.tag(
413 tags.tag(
413 repo,
414 repo,
414 tagname,
415 tagname,
415 nullid,
416 nullid,
416 message=None,
417 message=None,
417 user=None,
418 user=None,
418 date=None,
419 date=None,
419 local=True,
420 local=True,
420 )
421 )
421 unfi.ui.warn(
422 unfi.ui.warn(
422 _(
423 _(
423 b'D%s: local tag removed - does not match '
424 b'D%s: local tag removed - does not match '
424 b'Differential history\n'
425 b'Differential history\n'
425 )
426 )
426 % drev
427 % drev
427 )
428 )
428 continue
429 continue
429
430
430 # Find the last node using Phabricator metadata, and make sure it
431 # Find the last node using Phabricator metadata, and make sure it
431 # exists in the repo
432 # exists in the repo
432 oldnode = lastdiff = None
433 oldnode = lastdiff = None
433 if diffs:
434 if diffs:
434 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
435 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
435 oldnode = getnode(lastdiff)
436 oldnode = getnode(lastdiff)
436 if oldnode and oldnode not in nodemap:
437 if oldnode and oldnode not in nodemap:
437 oldnode = None
438 oldnode = None
438
439
439 result[newnode] = (oldnode, lastdiff, drev)
440 result[newnode] = (oldnode, lastdiff, drev)
440
441
441 return result
442 return result
442
443
443
444
444 def getdiff(ctx, diffopts):
445 def getdiff(ctx, diffopts):
445 """plain-text diff without header (user, commit message, etc)"""
446 """plain-text diff without header (user, commit message, etc)"""
446 output = util.stringio()
447 output = util.stringio()
447 for chunk, _label in patch.diffui(
448 for chunk, _label in patch.diffui(
448 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
449 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
449 ):
450 ):
450 output.write(chunk)
451 output.write(chunk)
451 return output.getvalue()
452 return output.getvalue()
452
453
453
454
454 class DiffChangeType(object):
455 class DiffChangeType(object):
455 ADD = 1
456 ADD = 1
456 CHANGE = 2
457 CHANGE = 2
457 DELETE = 3
458 DELETE = 3
458 MOVE_AWAY = 4
459 MOVE_AWAY = 4
459 COPY_AWAY = 5
460 COPY_AWAY = 5
460 MOVE_HERE = 6
461 MOVE_HERE = 6
461 COPY_HERE = 7
462 COPY_HERE = 7
462 MULTICOPY = 8
463 MULTICOPY = 8
463
464
464
465
465 class DiffFileType(object):
466 class DiffFileType(object):
466 TEXT = 1
467 TEXT = 1
467 IMAGE = 2
468 IMAGE = 2
468 BINARY = 3
469 BINARY = 3
469
470
470
471
471 @attr.s
472 @attr.s
472 class phabhunk(dict):
473 class phabhunk(dict):
473 """Represents a Differential hunk, which is owned by a Differential change
474 """Represents a Differential hunk, which is owned by a Differential change
474 """
475 """
475
476
476 oldOffset = attr.ib(default=0) # camelcase-required
477 oldOffset = attr.ib(default=0) # camelcase-required
477 oldLength = attr.ib(default=0) # camelcase-required
478 oldLength = attr.ib(default=0) # camelcase-required
478 newOffset = attr.ib(default=0) # camelcase-required
479 newOffset = attr.ib(default=0) # camelcase-required
479 newLength = attr.ib(default=0) # camelcase-required
480 newLength = attr.ib(default=0) # camelcase-required
480 corpus = attr.ib(default='')
481 corpus = attr.ib(default='')
481 # These get added to the phabchange's equivalents
482 # These get added to the phabchange's equivalents
482 addLines = attr.ib(default=0) # camelcase-required
483 addLines = attr.ib(default=0) # camelcase-required
483 delLines = attr.ib(default=0) # camelcase-required
484 delLines = attr.ib(default=0) # camelcase-required
484
485
485
486
486 @attr.s
487 @attr.s
487 class phabchange(object):
488 class phabchange(object):
488 """Represents a Differential change, owns Differential hunks and owned by a
489 """Represents a Differential change, owns Differential hunks and owned by a
489 Differential diff. Each one represents one file in a diff.
490 Differential diff. Each one represents one file in a diff.
490 """
491 """
491
492
492 currentPath = attr.ib(default=None) # camelcase-required
493 currentPath = attr.ib(default=None) # camelcase-required
493 oldPath = attr.ib(default=None) # camelcase-required
494 oldPath = attr.ib(default=None) # camelcase-required
494 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
495 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
495 metadata = attr.ib(default=attr.Factory(dict))
496 metadata = attr.ib(default=attr.Factory(dict))
496 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 type = attr.ib(default=DiffChangeType.CHANGE)
499 type = attr.ib(default=DiffChangeType.CHANGE)
499 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
500 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
500 commitHash = attr.ib(default=None) # camelcase-required
501 commitHash = attr.ib(default=None) # camelcase-required
501 addLines = attr.ib(default=0) # camelcase-required
502 addLines = attr.ib(default=0) # camelcase-required
502 delLines = attr.ib(default=0) # camelcase-required
503 delLines = attr.ib(default=0) # camelcase-required
503 hunks = attr.ib(default=attr.Factory(list))
504 hunks = attr.ib(default=attr.Factory(list))
504
505
505 def copynewmetadatatoold(self):
506 def copynewmetadatatoold(self):
506 for key in list(self.metadata.keys()):
507 for key in list(self.metadata.keys()):
507 newkey = key.replace(b'new:', b'old:')
508 newkey = key.replace(b'new:', b'old:')
508 self.metadata[newkey] = self.metadata[key]
509 self.metadata[newkey] = self.metadata[key]
509
510
510 def addoldmode(self, value):
511 def addoldmode(self, value):
511 self.oldProperties[b'unix:filemode'] = value
512 self.oldProperties[b'unix:filemode'] = value
512
513
513 def addnewmode(self, value):
514 def addnewmode(self, value):
514 self.newProperties[b'unix:filemode'] = value
515 self.newProperties[b'unix:filemode'] = value
515
516
516 def addhunk(self, hunk):
517 def addhunk(self, hunk):
517 if not isinstance(hunk, phabhunk):
518 if not isinstance(hunk, phabhunk):
518 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
519 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
519 self.hunks.append(hunk)
520 self.hunks.append(hunk)
520 # It's useful to include these stats since the Phab web UI shows them,
521 # It's useful to include these stats since the Phab web UI shows them,
521 # and uses them to estimate how large a change a Revision is. Also used
522 # and uses them to estimate how large a change a Revision is. Also used
522 # in email subjects for the [+++--] bit.
523 # in email subjects for the [+++--] bit.
523 self.addLines += hunk.addLines
524 self.addLines += hunk.addLines
524 self.delLines += hunk.delLines
525 self.delLines += hunk.delLines
525
526
526
527
527 @attr.s
528 @attr.s
528 class phabdiff(object):
529 class phabdiff(object):
529 """Represents a Differential diff, owns Differential changes. Corresponds
530 """Represents a Differential diff, owns Differential changes. Corresponds
530 to a commit.
531 to a commit.
531 """
532 """
532
533
533 # Doesn't seem to be any reason to send this (output of uname -n)
534 # Doesn't seem to be any reason to send this (output of uname -n)
534 sourceMachine = attr.ib(default=b'') # camelcase-required
535 sourceMachine = attr.ib(default=b'') # camelcase-required
535 sourcePath = attr.ib(default=b'/') # camelcase-required
536 sourcePath = attr.ib(default=b'/') # camelcase-required
536 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
537 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
537 sourceControlPath = attr.ib(default=b'/') # camelcase-required
538 sourceControlPath = attr.ib(default=b'/') # camelcase-required
538 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
539 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
539 branch = attr.ib(default=b'default')
540 branch = attr.ib(default=b'default')
540 bookmark = attr.ib(default=None)
541 bookmark = attr.ib(default=None)
541 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
542 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
542 lintStatus = attr.ib(default=b'none') # camelcase-required
543 lintStatus = attr.ib(default=b'none') # camelcase-required
543 unitStatus = attr.ib(default=b'none') # camelcase-required
544 unitStatus = attr.ib(default=b'none') # camelcase-required
544 changes = attr.ib(default=attr.Factory(dict))
545 changes = attr.ib(default=attr.Factory(dict))
545 repositoryPHID = attr.ib(default=None) # camelcase-required
546 repositoryPHID = attr.ib(default=None) # camelcase-required
546
547
547 def addchange(self, change):
548 def addchange(self, change):
548 if not isinstance(change, phabchange):
549 if not isinstance(change, phabchange):
549 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
550 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
550 self.changes[change.currentPath] = change
551 self.changes[change.currentPath] = change
551
552
552
553
553 def maketext(pchange, ctx, fname):
554 def maketext(pchange, ctx, fname):
554 """populate the phabchange for a text file"""
555 """populate the phabchange for a text file"""
555 repo = ctx.repo()
556 repo = ctx.repo()
556 fmatcher = match.exact([fname])
557 fmatcher = match.exact([fname])
557 diffopts = mdiff.diffopts(git=True, context=32767)
558 diffopts = mdiff.diffopts(git=True, context=32767)
558 _pfctx, _fctx, header, fhunks = next(
559 _pfctx, _fctx, header, fhunks = next(
559 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
560 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
560 )
561 )
561
562
562 for fhunk in fhunks:
563 for fhunk in fhunks:
563 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
564 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
564 corpus = b''.join(lines[1:])
565 corpus = b''.join(lines[1:])
565 shunk = list(header)
566 shunk = list(header)
566 shunk.extend(lines)
567 shunk.extend(lines)
567 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
568 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
568 patch.diffstatdata(util.iterlines(shunk))
569 patch.diffstatdata(util.iterlines(shunk))
569 )
570 )
570 pchange.addhunk(
571 pchange.addhunk(
571 phabhunk(
572 phabhunk(
572 oldOffset,
573 oldOffset,
573 oldLength,
574 oldLength,
574 newOffset,
575 newOffset,
575 newLength,
576 newLength,
576 corpus,
577 corpus,
577 addLines,
578 addLines,
578 delLines,
579 delLines,
579 )
580 )
580 )
581 )
581
582
582
583
583 def uploadchunks(fctx, fphid):
584 def uploadchunks(fctx, fphid):
584 """upload large binary files as separate chunks.
585 """upload large binary files as separate chunks.
585 Phab requests chunking over 8MiB, and splits into 4MiB chunks
586 Phab requests chunking over 8MiB, and splits into 4MiB chunks
586 """
587 """
587 ui = fctx.repo().ui
588 ui = fctx.repo().ui
588 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
589 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
589 progress = ui.makeprogress(
590 progress = ui.makeprogress(
590 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
591 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
591 )
592 )
592 for chunk in chunks:
593 for chunk in chunks:
593 progress.increment()
594 progress.increment()
594 if chunk[b'complete']:
595 if chunk[b'complete']:
595 continue
596 continue
596 bstart = int(chunk[b'byteStart'])
597 bstart = int(chunk[b'byteStart'])
597 bend = int(chunk[b'byteEnd'])
598 bend = int(chunk[b'byteEnd'])
598 callconduit(
599 callconduit(
599 ui,
600 ui,
600 b'file.uploadchunk',
601 b'file.uploadchunk',
601 {
602 {
602 b'filePHID': fphid,
603 b'filePHID': fphid,
603 b'byteStart': bstart,
604 b'byteStart': bstart,
604 b'data': base64.b64encode(fctx.data()[bstart:bend]),
605 b'data': base64.b64encode(fctx.data()[bstart:bend]),
605 b'dataEncoding': b'base64',
606 b'dataEncoding': b'base64',
606 },
607 },
607 )
608 )
608 progress.complete()
609 progress.complete()
609
610
610
611
612 def uploadfile(fctx):
613 """upload binary files to Phabricator"""
614 repo = fctx.repo()
615 ui = repo.ui
616 fname = fctx.path()
617 size = fctx.size()
618 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
619
620 # an allocate call is required first to see if an upload is even required
621 # (Phab might already have it) and to determine if chunking is needed
622 allocateparams = {
623 b'name': fname,
624 b'contentLength': size,
625 b'contentHash': fhash,
626 }
627 filealloc = callconduit(ui, b'file.allocate', allocateparams)
628 fphid = filealloc[b'filePHID']
629
630 if filealloc[b'upload']:
631 ui.write(_(b'uploading %s\n') % bytes(fctx))
632 if not fphid:
633 uploadparams = {
634 b'name': fname,
635 b'data_base64': base64.b64encode(fctx.data()),
636 }
637 fphid = callconduit(ui, b'file.upload', uploadparams)
638 else:
639 uploadchunks(fctx, fphid)
640 else:
641 ui.debug(b'server already has %s\n' % bytes(fctx))
642
643 if not fphid:
644 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
645
646 return fphid
647
648
611 def creatediff(ctx):
649 def creatediff(ctx):
612 """create a Differential Diff"""
650 """create a Differential Diff"""
613 repo = ctx.repo()
651 repo = ctx.repo()
614 repophid = getrepophid(repo)
652 repophid = getrepophid(repo)
615 # Create a "Differential Diff" via "differential.createrawdiff" API
653 # Create a "Differential Diff" via "differential.createrawdiff" API
616 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
654 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
617 if repophid:
655 if repophid:
618 params[b'repositoryPHID'] = repophid
656 params[b'repositoryPHID'] = repophid
619 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
657 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
620 if not diff:
658 if not diff:
621 raise error.Abort(_(b'cannot create diff for %s') % ctx)
659 raise error.Abort(_(b'cannot create diff for %s') % ctx)
622 return diff
660 return diff
623
661
624
662
625 def writediffproperties(ctx, diff):
663 def writediffproperties(ctx, diff):
626 """write metadata to diff so patches could be applied losslessly"""
664 """write metadata to diff so patches could be applied losslessly"""
627 params = {
665 params = {
628 b'diff_id': diff[b'id'],
666 b'diff_id': diff[b'id'],
629 b'name': b'hg:meta',
667 b'name': b'hg:meta',
630 b'data': templatefilters.json(
668 b'data': templatefilters.json(
631 {
669 {
632 b'user': ctx.user(),
670 b'user': ctx.user(),
633 b'date': b'%d %d' % ctx.date(),
671 b'date': b'%d %d' % ctx.date(),
634 b'branch': ctx.branch(),
672 b'branch': ctx.branch(),
635 b'node': ctx.hex(),
673 b'node': ctx.hex(),
636 b'parent': ctx.p1().hex(),
674 b'parent': ctx.p1().hex(),
637 }
675 }
638 ),
676 ),
639 }
677 }
640 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
678 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
641
679
642 params = {
680 params = {
643 b'diff_id': diff[b'id'],
681 b'diff_id': diff[b'id'],
644 b'name': b'local:commits',
682 b'name': b'local:commits',
645 b'data': templatefilters.json(
683 b'data': templatefilters.json(
646 {
684 {
647 ctx.hex(): {
685 ctx.hex(): {
648 b'author': stringutil.person(ctx.user()),
686 b'author': stringutil.person(ctx.user()),
649 b'authorEmail': stringutil.email(ctx.user()),
687 b'authorEmail': stringutil.email(ctx.user()),
650 b'time': int(ctx.date()[0]),
688 b'time': int(ctx.date()[0]),
651 b'commit': ctx.hex(),
689 b'commit': ctx.hex(),
652 b'parents': [ctx.p1().hex()],
690 b'parents': [ctx.p1().hex()],
653 b'branch': ctx.branch(),
691 b'branch': ctx.branch(),
654 },
692 },
655 }
693 }
656 ),
694 ),
657 }
695 }
658 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
696 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
659
697
660
698
661 def createdifferentialrevision(
699 def createdifferentialrevision(
662 ctx,
700 ctx,
663 revid=None,
701 revid=None,
664 parentrevphid=None,
702 parentrevphid=None,
665 oldnode=None,
703 oldnode=None,
666 olddiff=None,
704 olddiff=None,
667 actions=None,
705 actions=None,
668 comment=None,
706 comment=None,
669 ):
707 ):
670 """create or update a Differential Revision
708 """create or update a Differential Revision
671
709
672 If revid is None, create a new Differential Revision, otherwise update
710 If revid is None, create a new Differential Revision, otherwise update
673 revid. If parentrevphid is not None, set it as a dependency.
711 revid. If parentrevphid is not None, set it as a dependency.
674
712
675 If oldnode is not None, check if the patch content (without commit message
713 If oldnode is not None, check if the patch content (without commit message
676 and metadata) has changed before creating another diff.
714 and metadata) has changed before creating another diff.
677
715
678 If actions is not None, they will be appended to the transaction.
716 If actions is not None, they will be appended to the transaction.
679 """
717 """
680 repo = ctx.repo()
718 repo = ctx.repo()
681 if oldnode:
719 if oldnode:
682 diffopts = mdiff.diffopts(git=True, context=32767)
720 diffopts = mdiff.diffopts(git=True, context=32767)
683 oldctx = repo.unfiltered()[oldnode]
721 oldctx = repo.unfiltered()[oldnode]
684 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
722 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
685 else:
723 else:
686 neednewdiff = True
724 neednewdiff = True
687
725
688 transactions = []
726 transactions = []
689 if neednewdiff:
727 if neednewdiff:
690 diff = creatediff(ctx)
728 diff = creatediff(ctx)
691 transactions.append({b'type': b'update', b'value': diff[b'phid']})
729 transactions.append({b'type': b'update', b'value': diff[b'phid']})
692 if comment:
730 if comment:
693 transactions.append({b'type': b'comment', b'value': comment})
731 transactions.append({b'type': b'comment', b'value': comment})
694 else:
732 else:
695 # Even if we don't need to upload a new diff because the patch content
733 # Even if we don't need to upload a new diff because the patch content
696 # does not change. We might still need to update its metadata so
734 # does not change. We might still need to update its metadata so
697 # pushers could know the correct node metadata.
735 # pushers could know the correct node metadata.
698 assert olddiff
736 assert olddiff
699 diff = olddiff
737 diff = olddiff
700 writediffproperties(ctx, diff)
738 writediffproperties(ctx, diff)
701
739
702 # Set the parent Revision every time, so commit re-ordering is picked-up
740 # Set the parent Revision every time, so commit re-ordering is picked-up
703 if parentrevphid:
741 if parentrevphid:
704 transactions.append(
742 transactions.append(
705 {b'type': b'parents.set', b'value': [parentrevphid]}
743 {b'type': b'parents.set', b'value': [parentrevphid]}
706 )
744 )
707
745
708 if actions:
746 if actions:
709 transactions += actions
747 transactions += actions
710
748
711 # Parse commit message and update related fields.
749 # Parse commit message and update related fields.
712 desc = ctx.description()
750 desc = ctx.description()
713 info = callconduit(
751 info = callconduit(
714 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
752 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
715 )
753 )
716 for k, v in info[b'fields'].items():
754 for k, v in info[b'fields'].items():
717 if k in [b'title', b'summary', b'testPlan']:
755 if k in [b'title', b'summary', b'testPlan']:
718 transactions.append({b'type': k, b'value': v})
756 transactions.append({b'type': k, b'value': v})
719
757
720 params = {b'transactions': transactions}
758 params = {b'transactions': transactions}
721 if revid is not None:
759 if revid is not None:
722 # Update an existing Differential Revision
760 # Update an existing Differential Revision
723 params[b'objectIdentifier'] = revid
761 params[b'objectIdentifier'] = revid
724
762
725 revision = callconduit(repo.ui, b'differential.revision.edit', params)
763 revision = callconduit(repo.ui, b'differential.revision.edit', params)
726 if not revision:
764 if not revision:
727 raise error.Abort(_(b'cannot create revision for %s') % ctx)
765 raise error.Abort(_(b'cannot create revision for %s') % ctx)
728
766
729 return revision, diff
767 return revision, diff
730
768
731
769
732 def userphids(repo, names):
770 def userphids(repo, names):
733 """convert user names to PHIDs"""
771 """convert user names to PHIDs"""
734 names = [name.lower() for name in names]
772 names = [name.lower() for name in names]
735 query = {b'constraints': {b'usernames': names}}
773 query = {b'constraints': {b'usernames': names}}
736 result = callconduit(repo.ui, b'user.search', query)
774 result = callconduit(repo.ui, b'user.search', query)
737 # username not found is not an error of the API. So check if we have missed
775 # username not found is not an error of the API. So check if we have missed
738 # some names here.
776 # some names here.
739 data = result[b'data']
777 data = result[b'data']
740 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
778 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
741 unresolved = set(names) - resolved
779 unresolved = set(names) - resolved
742 if unresolved:
780 if unresolved:
743 raise error.Abort(
781 raise error.Abort(
744 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
782 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
745 )
783 )
746 return [entry[b'phid'] for entry in data]
784 return [entry[b'phid'] for entry in data]
747
785
748
786
749 @vcrcommand(
787 @vcrcommand(
750 b'phabsend',
788 b'phabsend',
751 [
789 [
752 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
790 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
753 (b'', b'amend', True, _(b'update commit messages')),
791 (b'', b'amend', True, _(b'update commit messages')),
754 (b'', b'reviewer', [], _(b'specify reviewers')),
792 (b'', b'reviewer', [], _(b'specify reviewers')),
755 (b'', b'blocker', [], _(b'specify blocking reviewers')),
793 (b'', b'blocker', [], _(b'specify blocking reviewers')),
756 (
794 (
757 b'm',
795 b'm',
758 b'comment',
796 b'comment',
759 b'',
797 b'',
760 _(b'add a comment to Revisions with new/updated Diffs'),
798 _(b'add a comment to Revisions with new/updated Diffs'),
761 ),
799 ),
762 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
800 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
763 ],
801 ],
764 _(b'REV [OPTIONS]'),
802 _(b'REV [OPTIONS]'),
765 helpcategory=command.CATEGORY_IMPORT_EXPORT,
803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
766 )
804 )
767 def phabsend(ui, repo, *revs, **opts):
805 def phabsend(ui, repo, *revs, **opts):
768 """upload changesets to Phabricator
806 """upload changesets to Phabricator
769
807
770 If there are multiple revisions specified, they will be send as a stack
808 If there are multiple revisions specified, they will be send as a stack
771 with a linear dependencies relationship using the order specified by the
809 with a linear dependencies relationship using the order specified by the
772 revset.
810 revset.
773
811
774 For the first time uploading changesets, local tags will be created to
812 For the first time uploading changesets, local tags will be created to
775 maintain the association. After the first time, phabsend will check
813 maintain the association. After the first time, phabsend will check
776 obsstore and tags information so it can figure out whether to update an
814 obsstore and tags information so it can figure out whether to update an
777 existing Differential Revision, or create a new one.
815 existing Differential Revision, or create a new one.
778
816
779 If --amend is set, update commit messages so they have the
817 If --amend is set, update commit messages so they have the
780 ``Differential Revision`` URL, remove related tags. This is similar to what
818 ``Differential Revision`` URL, remove related tags. This is similar to what
781 arcanist will do, and is more desired in author-push workflows. Otherwise,
819 arcanist will do, and is more desired in author-push workflows. Otherwise,
782 use local tags to record the ``Differential Revision`` association.
820 use local tags to record the ``Differential Revision`` association.
783
821
784 The --confirm option lets you confirm changesets before sending them. You
822 The --confirm option lets you confirm changesets before sending them. You
785 can also add following to your configuration file to make it default
823 can also add following to your configuration file to make it default
786 behaviour::
824 behaviour::
787
825
788 [phabsend]
826 [phabsend]
789 confirm = true
827 confirm = true
790
828
791 phabsend will check obsstore and the above association to decide whether to
829 phabsend will check obsstore and the above association to decide whether to
792 update an existing Differential Revision, or create a new one.
830 update an existing Differential Revision, or create a new one.
793 """
831 """
794 opts = pycompat.byteskwargs(opts)
832 opts = pycompat.byteskwargs(opts)
795 revs = list(revs) + opts.get(b'rev', [])
833 revs = list(revs) + opts.get(b'rev', [])
796 revs = scmutil.revrange(repo, revs)
834 revs = scmutil.revrange(repo, revs)
797
835
798 if not revs:
836 if not revs:
799 raise error.Abort(_(b'phabsend requires at least one changeset'))
837 raise error.Abort(_(b'phabsend requires at least one changeset'))
800 if opts.get(b'amend'):
838 if opts.get(b'amend'):
801 cmdutil.checkunfinished(repo)
839 cmdutil.checkunfinished(repo)
802
840
803 # {newnode: (oldnode, olddiff, olddrev}
841 # {newnode: (oldnode, olddiff, olddrev}
804 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
842 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
805
843
806 confirm = ui.configbool(b'phabsend', b'confirm')
844 confirm = ui.configbool(b'phabsend', b'confirm')
807 confirm |= bool(opts.get(b'confirm'))
845 confirm |= bool(opts.get(b'confirm'))
808 if confirm:
846 if confirm:
809 confirmed = _confirmbeforesend(repo, revs, oldmap)
847 confirmed = _confirmbeforesend(repo, revs, oldmap)
810 if not confirmed:
848 if not confirmed:
811 raise error.Abort(_(b'phabsend cancelled'))
849 raise error.Abort(_(b'phabsend cancelled'))
812
850
813 actions = []
851 actions = []
814 reviewers = opts.get(b'reviewer', [])
852 reviewers = opts.get(b'reviewer', [])
815 blockers = opts.get(b'blocker', [])
853 blockers = opts.get(b'blocker', [])
816 phids = []
854 phids = []
817 if reviewers:
855 if reviewers:
818 phids.extend(userphids(repo, reviewers))
856 phids.extend(userphids(repo, reviewers))
819 if blockers:
857 if blockers:
820 phids.extend(
858 phids.extend(
821 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
859 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
822 )
860 )
823 if phids:
861 if phids:
824 actions.append({b'type': b'reviewers.add', b'value': phids})
862 actions.append({b'type': b'reviewers.add', b'value': phids})
825
863
826 drevids = [] # [int]
864 drevids = [] # [int]
827 diffmap = {} # {newnode: diff}
865 diffmap = {} # {newnode: diff}
828
866
829 # Send patches one by one so we know their Differential Revision PHIDs and
867 # Send patches one by one so we know their Differential Revision PHIDs and
830 # can provide dependency relationship
868 # can provide dependency relationship
831 lastrevphid = None
869 lastrevphid = None
832 for rev in revs:
870 for rev in revs:
833 ui.debug(b'sending rev %d\n' % rev)
871 ui.debug(b'sending rev %d\n' % rev)
834 ctx = repo[rev]
872 ctx = repo[rev]
835
873
836 # Get Differential Revision ID
874 # Get Differential Revision ID
837 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
875 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
838 if oldnode != ctx.node() or opts.get(b'amend'):
876 if oldnode != ctx.node() or opts.get(b'amend'):
839 # Create or update Differential Revision
877 # Create or update Differential Revision
840 revision, diff = createdifferentialrevision(
878 revision, diff = createdifferentialrevision(
841 ctx,
879 ctx,
842 revid,
880 revid,
843 lastrevphid,
881 lastrevphid,
844 oldnode,
882 oldnode,
845 olddiff,
883 olddiff,
846 actions,
884 actions,
847 opts.get(b'comment'),
885 opts.get(b'comment'),
848 )
886 )
849 diffmap[ctx.node()] = diff
887 diffmap[ctx.node()] = diff
850 newrevid = int(revision[b'object'][b'id'])
888 newrevid = int(revision[b'object'][b'id'])
851 newrevphid = revision[b'object'][b'phid']
889 newrevphid = revision[b'object'][b'phid']
852 if revid:
890 if revid:
853 action = b'updated'
891 action = b'updated'
854 else:
892 else:
855 action = b'created'
893 action = b'created'
856
894
857 # Create a local tag to note the association, if commit message
895 # Create a local tag to note the association, if commit message
858 # does not have it already
896 # does not have it already
859 m = _differentialrevisiondescre.search(ctx.description())
897 m = _differentialrevisiondescre.search(ctx.description())
860 if not m or int(m.group(r'id')) != newrevid:
898 if not m or int(m.group(r'id')) != newrevid:
861 tagname = b'D%d' % newrevid
899 tagname = b'D%d' % newrevid
862 tags.tag(
900 tags.tag(
863 repo,
901 repo,
864 tagname,
902 tagname,
865 ctx.node(),
903 ctx.node(),
866 message=None,
904 message=None,
867 user=None,
905 user=None,
868 date=None,
906 date=None,
869 local=True,
907 local=True,
870 )
908 )
871 else:
909 else:
872 # Nothing changed. But still set "newrevphid" so the next revision
910 # Nothing changed. But still set "newrevphid" so the next revision
873 # could depend on this one and "newrevid" for the summary line.
911 # could depend on this one and "newrevid" for the summary line.
874 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
912 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
875 newrevid = revid
913 newrevid = revid
876 action = b'skipped'
914 action = b'skipped'
877
915
878 actiondesc = ui.label(
916 actiondesc = ui.label(
879 {
917 {
880 b'created': _(b'created'),
918 b'created': _(b'created'),
881 b'skipped': _(b'skipped'),
919 b'skipped': _(b'skipped'),
882 b'updated': _(b'updated'),
920 b'updated': _(b'updated'),
883 }[action],
921 }[action],
884 b'phabricator.action.%s' % action,
922 b'phabricator.action.%s' % action,
885 )
923 )
886 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
924 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
887 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
925 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
888 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
926 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
889 ui.write(
927 ui.write(
890 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
928 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
891 )
929 )
892 drevids.append(newrevid)
930 drevids.append(newrevid)
893 lastrevphid = newrevphid
931 lastrevphid = newrevphid
894
932
895 # Update commit messages and remove tags
933 # Update commit messages and remove tags
896 if opts.get(b'amend'):
934 if opts.get(b'amend'):
897 unfi = repo.unfiltered()
935 unfi = repo.unfiltered()
898 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
936 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
899 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
937 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
900 wnode = unfi[b'.'].node()
938 wnode = unfi[b'.'].node()
901 mapping = {} # {oldnode: [newnode]}
939 mapping = {} # {oldnode: [newnode]}
902 for i, rev in enumerate(revs):
940 for i, rev in enumerate(revs):
903 old = unfi[rev]
941 old = unfi[rev]
904 drevid = drevids[i]
942 drevid = drevids[i]
905 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
943 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
906 newdesc = getdescfromdrev(drev)
944 newdesc = getdescfromdrev(drev)
907 # Make sure commit message contain "Differential Revision"
945 # Make sure commit message contain "Differential Revision"
908 if old.description() != newdesc:
946 if old.description() != newdesc:
909 if old.phase() == phases.public:
947 if old.phase() == phases.public:
910 ui.warn(
948 ui.warn(
911 _(b"warning: not updating public commit %s\n")
949 _(b"warning: not updating public commit %s\n")
912 % scmutil.formatchangeid(old)
950 % scmutil.formatchangeid(old)
913 )
951 )
914 continue
952 continue
915 parents = [
953 parents = [
916 mapping.get(old.p1().node(), (old.p1(),))[0],
954 mapping.get(old.p1().node(), (old.p1(),))[0],
917 mapping.get(old.p2().node(), (old.p2(),))[0],
955 mapping.get(old.p2().node(), (old.p2(),))[0],
918 ]
956 ]
919 new = context.metadataonlyctx(
957 new = context.metadataonlyctx(
920 repo,
958 repo,
921 old,
959 old,
922 parents=parents,
960 parents=parents,
923 text=newdesc,
961 text=newdesc,
924 user=old.user(),
962 user=old.user(),
925 date=old.date(),
963 date=old.date(),
926 extra=old.extra(),
964 extra=old.extra(),
927 )
965 )
928
966
929 newnode = new.commit()
967 newnode = new.commit()
930
968
931 mapping[old.node()] = [newnode]
969 mapping[old.node()] = [newnode]
932 # Update diff property
970 # Update diff property
933 # If it fails just warn and keep going, otherwise the DREV
971 # If it fails just warn and keep going, otherwise the DREV
934 # associations will be lost
972 # associations will be lost
935 try:
973 try:
936 writediffproperties(unfi[newnode], diffmap[old.node()])
974 writediffproperties(unfi[newnode], diffmap[old.node()])
937 except util.urlerr.urlerror:
975 except util.urlerr.urlerror:
938 ui.warnnoi18n(
976 ui.warnnoi18n(
939 b'Failed to update metadata for D%s\n' % drevid
977 b'Failed to update metadata for D%s\n' % drevid
940 )
978 )
941 # Remove local tags since it's no longer necessary
979 # Remove local tags since it's no longer necessary
942 tagname = b'D%d' % drevid
980 tagname = b'D%d' % drevid
943 if tagname in repo.tags():
981 if tagname in repo.tags():
944 tags.tag(
982 tags.tag(
945 repo,
983 repo,
946 tagname,
984 tagname,
947 nullid,
985 nullid,
948 message=None,
986 message=None,
949 user=None,
987 user=None,
950 date=None,
988 date=None,
951 local=True,
989 local=True,
952 )
990 )
953 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
991 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
954 if wnode in mapping:
992 if wnode in mapping:
955 unfi.setparents(mapping[wnode][0])
993 unfi.setparents(mapping[wnode][0])
956
994
957
995
958 # Map from "hg:meta" keys to header understood by "hg import". The order is
996 # Map from "hg:meta" keys to header understood by "hg import". The order is
959 # consistent with "hg export" output.
997 # consistent with "hg export" output.
960 _metanamemap = util.sortdict(
998 _metanamemap = util.sortdict(
961 [
999 [
962 (b'user', b'User'),
1000 (b'user', b'User'),
963 (b'date', b'Date'),
1001 (b'date', b'Date'),
964 (b'branch', b'Branch'),
1002 (b'branch', b'Branch'),
965 (b'node', b'Node ID'),
1003 (b'node', b'Node ID'),
966 (b'parent', b'Parent '),
1004 (b'parent', b'Parent '),
967 ]
1005 ]
968 )
1006 )
969
1007
970
1008
971 def _confirmbeforesend(repo, revs, oldmap):
1009 def _confirmbeforesend(repo, revs, oldmap):
972 url, token = readurltoken(repo.ui)
1010 url, token = readurltoken(repo.ui)
973 ui = repo.ui
1011 ui = repo.ui
974 for rev in revs:
1012 for rev in revs:
975 ctx = repo[rev]
1013 ctx = repo[rev]
976 desc = ctx.description().splitlines()[0]
1014 desc = ctx.description().splitlines()[0]
977 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1015 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
978 if drevid:
1016 if drevid:
979 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1017 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
980 else:
1018 else:
981 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1019 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
982
1020
983 ui.write(
1021 ui.write(
984 _(b'%s - %s: %s\n')
1022 _(b'%s - %s: %s\n')
985 % (
1023 % (
986 drevdesc,
1024 drevdesc,
987 ui.label(bytes(ctx), b'phabricator.node'),
1025 ui.label(bytes(ctx), b'phabricator.node'),
988 ui.label(desc, b'phabricator.desc'),
1026 ui.label(desc, b'phabricator.desc'),
989 )
1027 )
990 )
1028 )
991
1029
992 if ui.promptchoice(
1030 if ui.promptchoice(
993 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1031 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
994 ):
1032 ):
995 return False
1033 return False
996
1034
997 return True
1035 return True
998
1036
999
1037
1000 _knownstatusnames = {
1038 _knownstatusnames = {
1001 b'accepted',
1039 b'accepted',
1002 b'needsreview',
1040 b'needsreview',
1003 b'needsrevision',
1041 b'needsrevision',
1004 b'closed',
1042 b'closed',
1005 b'abandoned',
1043 b'abandoned',
1006 }
1044 }
1007
1045
1008
1046
1009 def _getstatusname(drev):
1047 def _getstatusname(drev):
1010 """get normalized status name from a Differential Revision"""
1048 """get normalized status name from a Differential Revision"""
1011 return drev[b'statusName'].replace(b' ', b'').lower()
1049 return drev[b'statusName'].replace(b' ', b'').lower()
1012
1050
1013
1051
1014 # Small language to specify differential revisions. Support symbols: (), :X,
1052 # Small language to specify differential revisions. Support symbols: (), :X,
1015 # +, and -.
1053 # +, and -.
1016
1054
1017 _elements = {
1055 _elements = {
1018 # token-type: binding-strength, primary, prefix, infix, suffix
1056 # token-type: binding-strength, primary, prefix, infix, suffix
1019 b'(': (12, None, (b'group', 1, b')'), None, None),
1057 b'(': (12, None, (b'group', 1, b')'), None, None),
1020 b':': (8, None, (b'ancestors', 8), None, None),
1058 b':': (8, None, (b'ancestors', 8), None, None),
1021 b'&': (5, None, None, (b'and_', 5), None),
1059 b'&': (5, None, None, (b'and_', 5), None),
1022 b'+': (4, None, None, (b'add', 4), None),
1060 b'+': (4, None, None, (b'add', 4), None),
1023 b'-': (4, None, None, (b'sub', 4), None),
1061 b'-': (4, None, None, (b'sub', 4), None),
1024 b')': (0, None, None, None, None),
1062 b')': (0, None, None, None, None),
1025 b'symbol': (0, b'symbol', None, None, None),
1063 b'symbol': (0, b'symbol', None, None, None),
1026 b'end': (0, None, None, None, None),
1064 b'end': (0, None, None, None, None),
1027 }
1065 }
1028
1066
1029
1067
1030 def _tokenize(text):
1068 def _tokenize(text):
1031 view = memoryview(text) # zero-copy slice
1069 view = memoryview(text) # zero-copy slice
1032 special = b'():+-& '
1070 special = b'():+-& '
1033 pos = 0
1071 pos = 0
1034 length = len(text)
1072 length = len(text)
1035 while pos < length:
1073 while pos < length:
1036 symbol = b''.join(
1074 symbol = b''.join(
1037 itertools.takewhile(
1075 itertools.takewhile(
1038 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1076 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1039 )
1077 )
1040 )
1078 )
1041 if symbol:
1079 if symbol:
1042 yield (b'symbol', symbol, pos)
1080 yield (b'symbol', symbol, pos)
1043 pos += len(symbol)
1081 pos += len(symbol)
1044 else: # special char, ignore space
1082 else: # special char, ignore space
1045 if text[pos] != b' ':
1083 if text[pos] != b' ':
1046 yield (text[pos], None, pos)
1084 yield (text[pos], None, pos)
1047 pos += 1
1085 pos += 1
1048 yield (b'end', None, pos)
1086 yield (b'end', None, pos)
1049
1087
1050
1088
1051 def _parse(text):
1089 def _parse(text):
1052 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1090 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1053 if pos != len(text):
1091 if pos != len(text):
1054 raise error.ParseError(b'invalid token', pos)
1092 raise error.ParseError(b'invalid token', pos)
1055 return tree
1093 return tree
1056
1094
1057
1095
1058 def _parsedrev(symbol):
1096 def _parsedrev(symbol):
1059 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1097 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1060 if symbol.startswith(b'D') and symbol[1:].isdigit():
1098 if symbol.startswith(b'D') and symbol[1:].isdigit():
1061 return int(symbol[1:])
1099 return int(symbol[1:])
1062 if symbol.isdigit():
1100 if symbol.isdigit():
1063 return int(symbol)
1101 return int(symbol)
1064
1102
1065
1103
1066 def _prefetchdrevs(tree):
1104 def _prefetchdrevs(tree):
1067 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1105 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1068 drevs = set()
1106 drevs = set()
1069 ancestordrevs = set()
1107 ancestordrevs = set()
1070 op = tree[0]
1108 op = tree[0]
1071 if op == b'symbol':
1109 if op == b'symbol':
1072 r = _parsedrev(tree[1])
1110 r = _parsedrev(tree[1])
1073 if r:
1111 if r:
1074 drevs.add(r)
1112 drevs.add(r)
1075 elif op == b'ancestors':
1113 elif op == b'ancestors':
1076 r, a = _prefetchdrevs(tree[1])
1114 r, a = _prefetchdrevs(tree[1])
1077 drevs.update(r)
1115 drevs.update(r)
1078 ancestordrevs.update(r)
1116 ancestordrevs.update(r)
1079 ancestordrevs.update(a)
1117 ancestordrevs.update(a)
1080 else:
1118 else:
1081 for t in tree[1:]:
1119 for t in tree[1:]:
1082 r, a = _prefetchdrevs(t)
1120 r, a = _prefetchdrevs(t)
1083 drevs.update(r)
1121 drevs.update(r)
1084 ancestordrevs.update(a)
1122 ancestordrevs.update(a)
1085 return drevs, ancestordrevs
1123 return drevs, ancestordrevs
1086
1124
1087
1125
1088 def querydrev(repo, spec):
1126 def querydrev(repo, spec):
1089 """return a list of "Differential Revision" dicts
1127 """return a list of "Differential Revision" dicts
1090
1128
1091 spec is a string using a simple query language, see docstring in phabread
1129 spec is a string using a simple query language, see docstring in phabread
1092 for details.
1130 for details.
1093
1131
1094 A "Differential Revision dict" looks like:
1132 A "Differential Revision dict" looks like:
1095
1133
1096 {
1134 {
1097 "id": "2",
1135 "id": "2",
1098 "phid": "PHID-DREV-672qvysjcczopag46qty",
1136 "phid": "PHID-DREV-672qvysjcczopag46qty",
1099 "title": "example",
1137 "title": "example",
1100 "uri": "https://phab.example.com/D2",
1138 "uri": "https://phab.example.com/D2",
1101 "dateCreated": "1499181406",
1139 "dateCreated": "1499181406",
1102 "dateModified": "1499182103",
1140 "dateModified": "1499182103",
1103 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1141 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1104 "status": "0",
1142 "status": "0",
1105 "statusName": "Needs Review",
1143 "statusName": "Needs Review",
1106 "properties": [],
1144 "properties": [],
1107 "branch": null,
1145 "branch": null,
1108 "summary": "",
1146 "summary": "",
1109 "testPlan": "",
1147 "testPlan": "",
1110 "lineCount": "2",
1148 "lineCount": "2",
1111 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1149 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1112 "diffs": [
1150 "diffs": [
1113 "3",
1151 "3",
1114 "4",
1152 "4",
1115 ],
1153 ],
1116 "commits": [],
1154 "commits": [],
1117 "reviewers": [],
1155 "reviewers": [],
1118 "ccs": [],
1156 "ccs": [],
1119 "hashes": [],
1157 "hashes": [],
1120 "auxiliary": {
1158 "auxiliary": {
1121 "phabricator:projects": [],
1159 "phabricator:projects": [],
1122 "phabricator:depends-on": [
1160 "phabricator:depends-on": [
1123 "PHID-DREV-gbapp366kutjebt7agcd"
1161 "PHID-DREV-gbapp366kutjebt7agcd"
1124 ]
1162 ]
1125 },
1163 },
1126 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1164 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1127 "sourcePath": null
1165 "sourcePath": null
1128 }
1166 }
1129 """
1167 """
1130
1168
1131 def fetch(params):
1169 def fetch(params):
1132 """params -> single drev or None"""
1170 """params -> single drev or None"""
1133 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1171 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1134 if key in prefetched:
1172 if key in prefetched:
1135 return prefetched[key]
1173 return prefetched[key]
1136 drevs = callconduit(repo.ui, b'differential.query', params)
1174 drevs = callconduit(repo.ui, b'differential.query', params)
1137 # Fill prefetched with the result
1175 # Fill prefetched with the result
1138 for drev in drevs:
1176 for drev in drevs:
1139 prefetched[drev[b'phid']] = drev
1177 prefetched[drev[b'phid']] = drev
1140 prefetched[int(drev[b'id'])] = drev
1178 prefetched[int(drev[b'id'])] = drev
1141 if key not in prefetched:
1179 if key not in prefetched:
1142 raise error.Abort(
1180 raise error.Abort(
1143 _(b'cannot get Differential Revision %r') % params
1181 _(b'cannot get Differential Revision %r') % params
1144 )
1182 )
1145 return prefetched[key]
1183 return prefetched[key]
1146
1184
1147 def getstack(topdrevids):
1185 def getstack(topdrevids):
1148 """given a top, get a stack from the bottom, [id] -> [id]"""
1186 """given a top, get a stack from the bottom, [id] -> [id]"""
1149 visited = set()
1187 visited = set()
1150 result = []
1188 result = []
1151 queue = [{b'ids': [i]} for i in topdrevids]
1189 queue = [{b'ids': [i]} for i in topdrevids]
1152 while queue:
1190 while queue:
1153 params = queue.pop()
1191 params = queue.pop()
1154 drev = fetch(params)
1192 drev = fetch(params)
1155 if drev[b'id'] in visited:
1193 if drev[b'id'] in visited:
1156 continue
1194 continue
1157 visited.add(drev[b'id'])
1195 visited.add(drev[b'id'])
1158 result.append(int(drev[b'id']))
1196 result.append(int(drev[b'id']))
1159 auxiliary = drev.get(b'auxiliary', {})
1197 auxiliary = drev.get(b'auxiliary', {})
1160 depends = auxiliary.get(b'phabricator:depends-on', [])
1198 depends = auxiliary.get(b'phabricator:depends-on', [])
1161 for phid in depends:
1199 for phid in depends:
1162 queue.append({b'phids': [phid]})
1200 queue.append({b'phids': [phid]})
1163 result.reverse()
1201 result.reverse()
1164 return smartset.baseset(result)
1202 return smartset.baseset(result)
1165
1203
1166 # Initialize prefetch cache
1204 # Initialize prefetch cache
1167 prefetched = {} # {id or phid: drev}
1205 prefetched = {} # {id or phid: drev}
1168
1206
1169 tree = _parse(spec)
1207 tree = _parse(spec)
1170 drevs, ancestordrevs = _prefetchdrevs(tree)
1208 drevs, ancestordrevs = _prefetchdrevs(tree)
1171
1209
1172 # developer config: phabricator.batchsize
1210 # developer config: phabricator.batchsize
1173 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1211 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1174
1212
1175 # Prefetch Differential Revisions in batch
1213 # Prefetch Differential Revisions in batch
1176 tofetch = set(drevs)
1214 tofetch = set(drevs)
1177 for r in ancestordrevs:
1215 for r in ancestordrevs:
1178 tofetch.update(range(max(1, r - batchsize), r + 1))
1216 tofetch.update(range(max(1, r - batchsize), r + 1))
1179 if drevs:
1217 if drevs:
1180 fetch({b'ids': list(tofetch)})
1218 fetch({b'ids': list(tofetch)})
1181 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1219 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1182
1220
1183 # Walk through the tree, return smartsets
1221 # Walk through the tree, return smartsets
1184 def walk(tree):
1222 def walk(tree):
1185 op = tree[0]
1223 op = tree[0]
1186 if op == b'symbol':
1224 if op == b'symbol':
1187 drev = _parsedrev(tree[1])
1225 drev = _parsedrev(tree[1])
1188 if drev:
1226 if drev:
1189 return smartset.baseset([drev])
1227 return smartset.baseset([drev])
1190 elif tree[1] in _knownstatusnames:
1228 elif tree[1] in _knownstatusnames:
1191 drevs = [
1229 drevs = [
1192 r
1230 r
1193 for r in validids
1231 for r in validids
1194 if _getstatusname(prefetched[r]) == tree[1]
1232 if _getstatusname(prefetched[r]) == tree[1]
1195 ]
1233 ]
1196 return smartset.baseset(drevs)
1234 return smartset.baseset(drevs)
1197 else:
1235 else:
1198 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1236 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1199 elif op in {b'and_', b'add', b'sub'}:
1237 elif op in {b'and_', b'add', b'sub'}:
1200 assert len(tree) == 3
1238 assert len(tree) == 3
1201 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1239 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1202 elif op == b'group':
1240 elif op == b'group':
1203 return walk(tree[1])
1241 return walk(tree[1])
1204 elif op == b'ancestors':
1242 elif op == b'ancestors':
1205 return getstack(walk(tree[1]))
1243 return getstack(walk(tree[1]))
1206 else:
1244 else:
1207 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1245 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1208
1246
1209 return [prefetched[r] for r in walk(tree)]
1247 return [prefetched[r] for r in walk(tree)]
1210
1248
1211
1249
1212 def getdescfromdrev(drev):
1250 def getdescfromdrev(drev):
1213 """get description (commit message) from "Differential Revision"
1251 """get description (commit message) from "Differential Revision"
1214
1252
1215 This is similar to differential.getcommitmessage API. But we only care
1253 This is similar to differential.getcommitmessage API. But we only care
1216 about limited fields: title, summary, test plan, and URL.
1254 about limited fields: title, summary, test plan, and URL.
1217 """
1255 """
1218 title = drev[b'title']
1256 title = drev[b'title']
1219 summary = drev[b'summary'].rstrip()
1257 summary = drev[b'summary'].rstrip()
1220 testplan = drev[b'testPlan'].rstrip()
1258 testplan = drev[b'testPlan'].rstrip()
1221 if testplan:
1259 if testplan:
1222 testplan = b'Test Plan:\n%s' % testplan
1260 testplan = b'Test Plan:\n%s' % testplan
1223 uri = b'Differential Revision: %s' % drev[b'uri']
1261 uri = b'Differential Revision: %s' % drev[b'uri']
1224 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1262 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1225
1263
1226
1264
1227 def getdiffmeta(diff):
1265 def getdiffmeta(diff):
1228 """get commit metadata (date, node, user, p1) from a diff object
1266 """get commit metadata (date, node, user, p1) from a diff object
1229
1267
1230 The metadata could be "hg:meta", sent by phabsend, like:
1268 The metadata could be "hg:meta", sent by phabsend, like:
1231
1269
1232 "properties": {
1270 "properties": {
1233 "hg:meta": {
1271 "hg:meta": {
1234 "date": "1499571514 25200",
1272 "date": "1499571514 25200",
1235 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1273 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1236 "user": "Foo Bar <foo@example.com>",
1274 "user": "Foo Bar <foo@example.com>",
1237 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1275 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1238 }
1276 }
1239 }
1277 }
1240
1278
1241 Or converted from "local:commits", sent by "arc", like:
1279 Or converted from "local:commits", sent by "arc", like:
1242
1280
1243 "properties": {
1281 "properties": {
1244 "local:commits": {
1282 "local:commits": {
1245 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1283 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1246 "author": "Foo Bar",
1284 "author": "Foo Bar",
1247 "time": 1499546314,
1285 "time": 1499546314,
1248 "branch": "default",
1286 "branch": "default",
1249 "tag": "",
1287 "tag": "",
1250 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1288 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1251 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1289 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1252 "local": "1000",
1290 "local": "1000",
1253 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1291 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1254 "summary": "...",
1292 "summary": "...",
1255 "message": "...",
1293 "message": "...",
1256 "authorEmail": "foo@example.com"
1294 "authorEmail": "foo@example.com"
1257 }
1295 }
1258 }
1296 }
1259 }
1297 }
1260
1298
1261 Note: metadata extracted from "local:commits" will lose time zone
1299 Note: metadata extracted from "local:commits" will lose time zone
1262 information.
1300 information.
1263 """
1301 """
1264 props = diff.get(b'properties') or {}
1302 props = diff.get(b'properties') or {}
1265 meta = props.get(b'hg:meta')
1303 meta = props.get(b'hg:meta')
1266 if not meta:
1304 if not meta:
1267 if props.get(b'local:commits'):
1305 if props.get(b'local:commits'):
1268 commit = sorted(props[b'local:commits'].values())[0]
1306 commit = sorted(props[b'local:commits'].values())[0]
1269 meta = {}
1307 meta = {}
1270 if b'author' in commit and b'authorEmail' in commit:
1308 if b'author' in commit and b'authorEmail' in commit:
1271 meta[b'user'] = b'%s <%s>' % (
1309 meta[b'user'] = b'%s <%s>' % (
1272 commit[b'author'],
1310 commit[b'author'],
1273 commit[b'authorEmail'],
1311 commit[b'authorEmail'],
1274 )
1312 )
1275 if b'time' in commit:
1313 if b'time' in commit:
1276 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1314 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1277 if b'branch' in commit:
1315 if b'branch' in commit:
1278 meta[b'branch'] = commit[b'branch']
1316 meta[b'branch'] = commit[b'branch']
1279 node = commit.get(b'commit', commit.get(b'rev'))
1317 node = commit.get(b'commit', commit.get(b'rev'))
1280 if node:
1318 if node:
1281 meta[b'node'] = node
1319 meta[b'node'] = node
1282 if len(commit.get(b'parents', ())) >= 1:
1320 if len(commit.get(b'parents', ())) >= 1:
1283 meta[b'parent'] = commit[b'parents'][0]
1321 meta[b'parent'] = commit[b'parents'][0]
1284 else:
1322 else:
1285 meta = {}
1323 meta = {}
1286 if b'date' not in meta and b'dateCreated' in diff:
1324 if b'date' not in meta and b'dateCreated' in diff:
1287 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1325 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1288 if b'branch' not in meta and diff.get(b'branch'):
1326 if b'branch' not in meta and diff.get(b'branch'):
1289 meta[b'branch'] = diff[b'branch']
1327 meta[b'branch'] = diff[b'branch']
1290 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1328 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1291 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1329 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1292 return meta
1330 return meta
1293
1331
1294
1332
1295 def readpatch(repo, drevs, write):
1333 def readpatch(repo, drevs, write):
1296 """generate plain-text patch readable by 'hg import'
1334 """generate plain-text patch readable by 'hg import'
1297
1335
1298 write is usually ui.write. drevs is what "querydrev" returns, results of
1336 write is usually ui.write. drevs is what "querydrev" returns, results of
1299 "differential.query".
1337 "differential.query".
1300 """
1338 """
1301 # Prefetch hg:meta property for all diffs
1339 # Prefetch hg:meta property for all diffs
1302 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1340 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1303 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1341 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1304
1342
1305 # Generate patch for each drev
1343 # Generate patch for each drev
1306 for drev in drevs:
1344 for drev in drevs:
1307 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1345 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1308
1346
1309 diffid = max(int(v) for v in drev[b'diffs'])
1347 diffid = max(int(v) for v in drev[b'diffs'])
1310 body = callconduit(
1348 body = callconduit(
1311 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1349 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1312 )
1350 )
1313 desc = getdescfromdrev(drev)
1351 desc = getdescfromdrev(drev)
1314 header = b'# HG changeset patch\n'
1352 header = b'# HG changeset patch\n'
1315
1353
1316 # Try to preserve metadata from hg:meta property. Write hg patch
1354 # Try to preserve metadata from hg:meta property. Write hg patch
1317 # headers that can be read by the "import" command. See patchheadermap
1355 # headers that can be read by the "import" command. See patchheadermap
1318 # and extract in mercurial/patch.py for supported headers.
1356 # and extract in mercurial/patch.py for supported headers.
1319 meta = getdiffmeta(diffs[b'%d' % diffid])
1357 meta = getdiffmeta(diffs[b'%d' % diffid])
1320 for k in _metanamemap.keys():
1358 for k in _metanamemap.keys():
1321 if k in meta:
1359 if k in meta:
1322 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1360 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1323
1361
1324 content = b'%s%s\n%s' % (header, desc, body)
1362 content = b'%s%s\n%s' % (header, desc, body)
1325 write(content)
1363 write(content)
1326
1364
1327
1365
1328 @vcrcommand(
1366 @vcrcommand(
1329 b'phabread',
1367 b'phabread',
1330 [(b'', b'stack', False, _(b'read dependencies'))],
1368 [(b'', b'stack', False, _(b'read dependencies'))],
1331 _(b'DREVSPEC [OPTIONS]'),
1369 _(b'DREVSPEC [OPTIONS]'),
1332 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1333 )
1371 )
1334 def phabread(ui, repo, spec, **opts):
1372 def phabread(ui, repo, spec, **opts):
1335 """print patches from Phabricator suitable for importing
1373 """print patches from Phabricator suitable for importing
1336
1374
1337 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1375 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1338 the number ``123``. It could also have common operators like ``+``, ``-``,
1376 the number ``123``. It could also have common operators like ``+``, ``-``,
1339 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1377 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1340 select a stack.
1378 select a stack.
1341
1379
1342 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1380 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1343 could be used to filter patches by status. For performance reason, they
1381 could be used to filter patches by status. For performance reason, they
1344 only represent a subset of non-status selections and cannot be used alone.
1382 only represent a subset of non-status selections and cannot be used alone.
1345
1383
1346 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1384 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1347 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1385 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1348 stack up to D9.
1386 stack up to D9.
1349
1387
1350 If --stack is given, follow dependencies information and read all patches.
1388 If --stack is given, follow dependencies information and read all patches.
1351 It is equivalent to the ``:`` operator.
1389 It is equivalent to the ``:`` operator.
1352 """
1390 """
1353 opts = pycompat.byteskwargs(opts)
1391 opts = pycompat.byteskwargs(opts)
1354 if opts.get(b'stack'):
1392 if opts.get(b'stack'):
1355 spec = b':(%s)' % spec
1393 spec = b':(%s)' % spec
1356 drevs = querydrev(repo, spec)
1394 drevs = querydrev(repo, spec)
1357 readpatch(repo, drevs, ui.write)
1395 readpatch(repo, drevs, ui.write)
1358
1396
1359
1397
1360 @vcrcommand(
1398 @vcrcommand(
1361 b'phabupdate',
1399 b'phabupdate',
1362 [
1400 [
1363 (b'', b'accept', False, _(b'accept revisions')),
1401 (b'', b'accept', False, _(b'accept revisions')),
1364 (b'', b'reject', False, _(b'reject revisions')),
1402 (b'', b'reject', False, _(b'reject revisions')),
1365 (b'', b'abandon', False, _(b'abandon revisions')),
1403 (b'', b'abandon', False, _(b'abandon revisions')),
1366 (b'', b'reclaim', False, _(b'reclaim revisions')),
1404 (b'', b'reclaim', False, _(b'reclaim revisions')),
1367 (b'm', b'comment', b'', _(b'comment on the last revision')),
1405 (b'm', b'comment', b'', _(b'comment on the last revision')),
1368 ],
1406 ],
1369 _(b'DREVSPEC [OPTIONS]'),
1407 _(b'DREVSPEC [OPTIONS]'),
1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1408 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1371 )
1409 )
1372 def phabupdate(ui, repo, spec, **opts):
1410 def phabupdate(ui, repo, spec, **opts):
1373 """update Differential Revision in batch
1411 """update Differential Revision in batch
1374
1412
1375 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1413 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1376 """
1414 """
1377 opts = pycompat.byteskwargs(opts)
1415 opts = pycompat.byteskwargs(opts)
1378 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1416 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1379 if len(flags) > 1:
1417 if len(flags) > 1:
1380 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1418 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1381
1419
1382 actions = []
1420 actions = []
1383 for f in flags:
1421 for f in flags:
1384 actions.append({b'type': f, b'value': b'true'})
1422 actions.append({b'type': f, b'value': b'true'})
1385
1423
1386 drevs = querydrev(repo, spec)
1424 drevs = querydrev(repo, spec)
1387 for i, drev in enumerate(drevs):
1425 for i, drev in enumerate(drevs):
1388 if i + 1 == len(drevs) and opts.get(b'comment'):
1426 if i + 1 == len(drevs) and opts.get(b'comment'):
1389 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1427 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1390 if actions:
1428 if actions:
1391 params = {
1429 params = {
1392 b'objectIdentifier': drev[b'phid'],
1430 b'objectIdentifier': drev[b'phid'],
1393 b'transactions': actions,
1431 b'transactions': actions,
1394 }
1432 }
1395 callconduit(ui, b'differential.revision.edit', params)
1433 callconduit(ui, b'differential.revision.edit', params)
1396
1434
1397
1435
1398 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1436 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1399 def template_review(context, mapping):
1437 def template_review(context, mapping):
1400 """:phabreview: Object describing the review for this changeset.
1438 """:phabreview: Object describing the review for this changeset.
1401 Has attributes `url` and `id`.
1439 Has attributes `url` and `id`.
1402 """
1440 """
1403 ctx = context.resource(mapping, b'ctx')
1441 ctx = context.resource(mapping, b'ctx')
1404 m = _differentialrevisiondescre.search(ctx.description())
1442 m = _differentialrevisiondescre.search(ctx.description())
1405 if m:
1443 if m:
1406 return templateutil.hybriddict(
1444 return templateutil.hybriddict(
1407 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1445 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1408 )
1446 )
1409 else:
1447 else:
1410 tags = ctx.repo().nodetags(ctx.node())
1448 tags = ctx.repo().nodetags(ctx.node())
1411 for t in tags:
1449 for t in tags:
1412 if _differentialrevisiontagre.match(t):
1450 if _differentialrevisiontagre.match(t):
1413 url = ctx.repo().ui.config(b'phabricator', b'url')
1451 url = ctx.repo().ui.config(b'phabricator', b'url')
1414 if not url.endswith(b'/'):
1452 if not url.endswith(b'/'):
1415 url += b'/'
1453 url += b'/'
1416 url += t
1454 url += t
1417
1455
1418 return templateutil.hybriddict({b'url': url, b'id': t,})
1456 return templateutil.hybriddict({b'url': url, b'id': t,})
1419 return None
1457 return None
General Comments 0
You need to be logged in to leave comments. Login now