##// END OF EJS Templates
phabricator: ensure that the return of urlopener.open() is closed...
Matt Harbison -
r41111:9d35ae3d default
parent child Browse files
Show More
@@ -1,992 +1,994 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import itertools
45 import itertools
45 import json
46 import json
46 import operator
47 import operator
47 import re
48 import re
48
49
49 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
50 from mercurial.i18n import _
51 from mercurial.i18n import _
51 from mercurial import (
52 from mercurial import (
52 cmdutil,
53 cmdutil,
53 context,
54 context,
54 encoding,
55 encoding,
55 error,
56 error,
56 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
57 mdiff,
58 mdiff,
58 obsutil,
59 obsutil,
59 parser,
60 parser,
60 patch,
61 patch,
61 registrar,
62 registrar,
62 scmutil,
63 scmutil,
63 smartset,
64 smartset,
64 tags,
65 tags,
65 templateutil,
66 templateutil,
66 url as urlmod,
67 url as urlmod,
67 util,
68 util,
68 )
69 )
69 from mercurial.utils import (
70 from mercurial.utils import (
70 procutil,
71 procutil,
71 stringutil,
72 stringutil,
72 )
73 )
73
74
74 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
75 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
75 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
76 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
76 # be specifying the version(s) of Mercurial they are tested with, or
77 # be specifying the version(s) of Mercurial they are tested with, or
77 # leave the attribute unspecified.
78 # leave the attribute unspecified.
78 testedwith = 'ships-with-hg-core'
79 testedwith = 'ships-with-hg-core'
79
80
80 cmdtable = {}
81 cmdtable = {}
81 command = registrar.command(cmdtable)
82 command = registrar.command(cmdtable)
82
83
83 configtable = {}
84 configtable = {}
84 configitem = registrar.configitem(configtable)
85 configitem = registrar.configitem(configtable)
85
86
86 # developer config: phabricator.batchsize
87 # developer config: phabricator.batchsize
87 configitem(b'phabricator', b'batchsize',
88 configitem(b'phabricator', b'batchsize',
88 default=12,
89 default=12,
89 )
90 )
90 configitem(b'phabricator', b'callsign',
91 configitem(b'phabricator', b'callsign',
91 default=None,
92 default=None,
92 )
93 )
93 configitem(b'phabricator', b'curlcmd',
94 configitem(b'phabricator', b'curlcmd',
94 default=None,
95 default=None,
95 )
96 )
96 # developer config: phabricator.repophid
97 # developer config: phabricator.repophid
97 configitem(b'phabricator', b'repophid',
98 configitem(b'phabricator', b'repophid',
98 default=None,
99 default=None,
99 )
100 )
100 configitem(b'phabricator', b'url',
101 configitem(b'phabricator', b'url',
101 default=None,
102 default=None,
102 )
103 )
103 configitem(b'phabsend', b'confirm',
104 configitem(b'phabsend', b'confirm',
104 default=False,
105 default=False,
105 )
106 )
106
107
107 colortable = {
108 colortable = {
108 b'phabricator.action.created': b'green',
109 b'phabricator.action.created': b'green',
109 b'phabricator.action.skipped': b'magenta',
110 b'phabricator.action.skipped': b'magenta',
110 b'phabricator.action.updated': b'magenta',
111 b'phabricator.action.updated': b'magenta',
111 b'phabricator.desc': b'',
112 b'phabricator.desc': b'',
112 b'phabricator.drev': b'bold',
113 b'phabricator.drev': b'bold',
113 b'phabricator.node': b'',
114 b'phabricator.node': b'',
114 }
115 }
115
116
116 _VCR_FLAGS = [
117 _VCR_FLAGS = [
117 (b'', b'test-vcr', b'',
118 (b'', b'test-vcr', b'',
118 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
119 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
119 b', otherwise will mock all http requests using the specified vcr file.'
120 b', otherwise will mock all http requests using the specified vcr file.'
120 b' (ADVANCED)'
121 b' (ADVANCED)'
121 )),
122 )),
122 ]
123 ]
123
124
124 def vcrcommand(name, flags, spec, helpcategory=None):
125 def vcrcommand(name, flags, spec, helpcategory=None):
125 fullflags = flags + _VCR_FLAGS
126 fullflags = flags + _VCR_FLAGS
126 def decorate(fn):
127 def decorate(fn):
127 def inner(*args, **kwargs):
128 def inner(*args, **kwargs):
128 cassette = kwargs.pop(r'test_vcr', None)
129 cassette = kwargs.pop(r'test_vcr', None)
129 if cassette:
130 if cassette:
130 import hgdemandimport
131 import hgdemandimport
131 with hgdemandimport.deactivated():
132 with hgdemandimport.deactivated():
132 import vcr as vcrmod
133 import vcr as vcrmod
133 import vcr.stubs as stubs
134 import vcr.stubs as stubs
134 vcr = vcrmod.VCR(
135 vcr = vcrmod.VCR(
135 serializer=r'json',
136 serializer=r'json',
136 custom_patches=[
137 custom_patches=[
137 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
138 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
138 (urlmod, 'httpsconnection',
139 (urlmod, 'httpsconnection',
139 stubs.VCRHTTPSConnection),
140 stubs.VCRHTTPSConnection),
140 ])
141 ])
141 with vcr.use_cassette(cassette):
142 with vcr.use_cassette(cassette):
142 return fn(*args, **kwargs)
143 return fn(*args, **kwargs)
143 return fn(*args, **kwargs)
144 return fn(*args, **kwargs)
144 inner.__name__ = fn.__name__
145 inner.__name__ = fn.__name__
145 inner.__doc__ = fn.__doc__
146 inner.__doc__ = fn.__doc__
146 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
147 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
147 return decorate
148 return decorate
148
149
149 def urlencodenested(params):
150 def urlencodenested(params):
150 """like urlencode, but works with nested parameters.
151 """like urlencode, but works with nested parameters.
151
152
152 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
153 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
153 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
154 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
154 urlencode. Note: the encoding is consistent with PHP's http_build_query.
155 urlencode. Note: the encoding is consistent with PHP's http_build_query.
155 """
156 """
156 flatparams = util.sortdict()
157 flatparams = util.sortdict()
157 def process(prefix, obj):
158 def process(prefix, obj):
158 if isinstance(obj, bool):
159 if isinstance(obj, bool):
159 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
160 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
160 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
161 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
161 if items is None:
162 if items is None:
162 flatparams[prefix] = obj
163 flatparams[prefix] = obj
163 else:
164 else:
164 for k, v in items(obj):
165 for k, v in items(obj):
165 if prefix:
166 if prefix:
166 process(b'%s[%s]' % (prefix, k), v)
167 process(b'%s[%s]' % (prefix, k), v)
167 else:
168 else:
168 process(k, v)
169 process(k, v)
169 process(b'', params)
170 process(b'', params)
170 return util.urlreq.urlencode(flatparams)
171 return util.urlreq.urlencode(flatparams)
171
172
172 def readurltoken(repo):
173 def readurltoken(repo):
173 """return conduit url, token and make sure they exist
174 """return conduit url, token and make sure they exist
174
175
175 Currently read from [auth] config section. In the future, it might
176 Currently read from [auth] config section. In the future, it might
176 make sense to read from .arcconfig and .arcrc as well.
177 make sense to read from .arcconfig and .arcrc as well.
177 """
178 """
178 url = repo.ui.config(b'phabricator', b'url')
179 url = repo.ui.config(b'phabricator', b'url')
179 if not url:
180 if not url:
180 raise error.Abort(_(b'config %s.%s is required')
181 raise error.Abort(_(b'config %s.%s is required')
181 % (b'phabricator', b'url'))
182 % (b'phabricator', b'url'))
182
183
183 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
184 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
184 token = None
185 token = None
185
186
186 if res:
187 if res:
187 group, auth = res
188 group, auth = res
188
189
189 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
190 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
190
191
191 token = auth.get(b'phabtoken')
192 token = auth.get(b'phabtoken')
192
193
193 if not token:
194 if not token:
194 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
195 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
195 % (url,))
196 % (url,))
196
197
197 return url, token
198 return url, token
198
199
199 def callconduit(repo, name, params):
200 def callconduit(repo, name, params):
200 """call Conduit API, params is a dict. return json.loads result, or None"""
201 """call Conduit API, params is a dict. return json.loads result, or None"""
201 host, token = readurltoken(repo)
202 host, token = readurltoken(repo)
202 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
203 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
203 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
204 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
204 params = params.copy()
205 params = params.copy()
205 params[b'api.token'] = token
206 params[b'api.token'] = token
206 data = urlencodenested(params)
207 data = urlencodenested(params)
207 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
208 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
208 if curlcmd:
209 if curlcmd:
209 sin, sout = procutil.popen2(b'%s -d @- %s'
210 sin, sout = procutil.popen2(b'%s -d @- %s'
210 % (curlcmd, procutil.shellquote(url)))
211 % (curlcmd, procutil.shellquote(url)))
211 sin.write(data)
212 sin.write(data)
212 sin.close()
213 sin.close()
213 body = sout.read()
214 body = sout.read()
214 else:
215 else:
215 urlopener = urlmod.opener(repo.ui, authinfo)
216 urlopener = urlmod.opener(repo.ui, authinfo)
216 request = util.urlreq.request(url, data=data)
217 request = util.urlreq.request(url, data=data)
217 body = urlopener.open(request).read()
218 with contextlib.closing(urlopener.open(request)) as rsp:
219 body = rsp.read()
218 repo.ui.debug(b'Conduit Response: %s\n' % body)
220 repo.ui.debug(b'Conduit Response: %s\n' % body)
219 parsed = json.loads(body)
221 parsed = json.loads(body)
220 if parsed.get(r'error_code'):
222 if parsed.get(r'error_code'):
221 msg = (_(b'Conduit Error (%s): %s')
223 msg = (_(b'Conduit Error (%s): %s')
222 % (parsed[r'error_code'], parsed[r'error_info']))
224 % (parsed[r'error_code'], parsed[r'error_info']))
223 raise error.Abort(msg)
225 raise error.Abort(msg)
224 return parsed[r'result']
226 return parsed[r'result']
225
227
226 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
228 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
227 def debugcallconduit(ui, repo, name):
229 def debugcallconduit(ui, repo, name):
228 """call Conduit API
230 """call Conduit API
229
231
230 Call parameters are read from stdin as a JSON blob. Result will be written
232 Call parameters are read from stdin as a JSON blob. Result will be written
231 to stdout as a JSON blob.
233 to stdout as a JSON blob.
232 """
234 """
233 params = json.loads(ui.fin.read())
235 params = json.loads(ui.fin.read())
234 result = callconduit(repo, name, params)
236 result = callconduit(repo, name, params)
235 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
237 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
236 ui.write(b'%s\n' % s)
238 ui.write(b'%s\n' % s)
237
239
238 def getrepophid(repo):
240 def getrepophid(repo):
239 """given callsign, return repository PHID or None"""
241 """given callsign, return repository PHID or None"""
240 # developer config: phabricator.repophid
242 # developer config: phabricator.repophid
241 repophid = repo.ui.config(b'phabricator', b'repophid')
243 repophid = repo.ui.config(b'phabricator', b'repophid')
242 if repophid:
244 if repophid:
243 return repophid
245 return repophid
244 callsign = repo.ui.config(b'phabricator', b'callsign')
246 callsign = repo.ui.config(b'phabricator', b'callsign')
245 if not callsign:
247 if not callsign:
246 return None
248 return None
247 query = callconduit(repo, b'diffusion.repository.search',
249 query = callconduit(repo, b'diffusion.repository.search',
248 {b'constraints': {b'callsigns': [callsign]}})
250 {b'constraints': {b'callsigns': [callsign]}})
249 if len(query[r'data']) == 0:
251 if len(query[r'data']) == 0:
250 return None
252 return None
251 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
253 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
252 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
254 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
253 return repophid
255 return repophid
254
256
255 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
257 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
256 _differentialrevisiondescre = re.compile(
258 _differentialrevisiondescre = re.compile(
257 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
259 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
258
260
259 def getoldnodedrevmap(repo, nodelist):
261 def getoldnodedrevmap(repo, nodelist):
260 """find previous nodes that has been sent to Phabricator
262 """find previous nodes that has been sent to Phabricator
261
263
262 return {node: (oldnode, Differential diff, Differential Revision ID)}
264 return {node: (oldnode, Differential diff, Differential Revision ID)}
263 for node in nodelist with known previous sent versions, or associated
265 for node in nodelist with known previous sent versions, or associated
264 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
266 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
265 be ``None``.
267 be ``None``.
266
268
267 Examines commit messages like "Differential Revision:" to get the
269 Examines commit messages like "Differential Revision:" to get the
268 association information.
270 association information.
269
271
270 If such commit message line is not found, examines all precursors and their
272 If such commit message line is not found, examines all precursors and their
271 tags. Tags with format like "D1234" are considered a match and the node
273 tags. Tags with format like "D1234" are considered a match and the node
272 with that tag, and the number after "D" (ex. 1234) will be returned.
274 with that tag, and the number after "D" (ex. 1234) will be returned.
273
275
274 The ``old node``, if not None, is guaranteed to be the last diff of
276 The ``old node``, if not None, is guaranteed to be the last diff of
275 corresponding Differential Revision, and exist in the repo.
277 corresponding Differential Revision, and exist in the repo.
276 """
278 """
277 url, token = readurltoken(repo)
279 url, token = readurltoken(repo)
278 unfi = repo.unfiltered()
280 unfi = repo.unfiltered()
279 nodemap = unfi.changelog.nodemap
281 nodemap = unfi.changelog.nodemap
280
282
281 result = {} # {node: (oldnode?, lastdiff?, drev)}
283 result = {} # {node: (oldnode?, lastdiff?, drev)}
282 toconfirm = {} # {node: (force, {precnode}, drev)}
284 toconfirm = {} # {node: (force, {precnode}, drev)}
283 for node in nodelist:
285 for node in nodelist:
284 ctx = unfi[node]
286 ctx = unfi[node]
285 # For tags like "D123", put them into "toconfirm" to verify later
287 # For tags like "D123", put them into "toconfirm" to verify later
286 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
287 for n in precnodes:
289 for n in precnodes:
288 if n in nodemap:
290 if n in nodemap:
289 for tag in unfi.nodetags(n):
291 for tag in unfi.nodetags(n):
290 m = _differentialrevisiontagre.match(tag)
292 m = _differentialrevisiontagre.match(tag)
291 if m:
293 if m:
292 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
293 continue
295 continue
294
296
295 # Check commit message
297 # Check commit message
296 m = _differentialrevisiondescre.search(ctx.description())
298 m = _differentialrevisiondescre.search(ctx.description())
297 if m:
299 if m:
298 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
299
301
300 # Double check if tags are genuine by collecting all old nodes from
302 # Double check if tags are genuine by collecting all old nodes from
301 # Phabricator, and expect precursors overlap with it.
303 # Phabricator, and expect precursors overlap with it.
302 if toconfirm:
304 if toconfirm:
303 drevs = [drev for force, precs, drev in toconfirm.values()]
305 drevs = [drev for force, precs, drev in toconfirm.values()]
304 alldiffs = callconduit(unfi, b'differential.querydiffs',
306 alldiffs = callconduit(unfi, b'differential.querydiffs',
305 {b'revisionIDs': drevs})
307 {b'revisionIDs': drevs})
306 getnode = lambda d: bin(encoding.unitolocal(
308 getnode = lambda d: bin(encoding.unitolocal(
307 getdiffmeta(d).get(r'node', b''))) or None
309 getdiffmeta(d).get(r'node', b''))) or None
308 for newnode, (force, precset, drev) in toconfirm.items():
310 for newnode, (force, precset, drev) in toconfirm.items():
309 diffs = [d for d in alldiffs.values()
311 diffs = [d for d in alldiffs.values()
310 if int(d[r'revisionID']) == drev]
312 if int(d[r'revisionID']) == drev]
311
313
312 # "precursors" as known by Phabricator
314 # "precursors" as known by Phabricator
313 phprecset = set(getnode(d) for d in diffs)
315 phprecset = set(getnode(d) for d in diffs)
314
316
315 # Ignore if precursors (Phabricator and local repo) do not overlap,
317 # Ignore if precursors (Phabricator and local repo) do not overlap,
316 # and force is not set (when commit message says nothing)
318 # and force is not set (when commit message says nothing)
317 if not force and not bool(phprecset & precset):
319 if not force and not bool(phprecset & precset):
318 tagname = b'D%d' % drev
320 tagname = b'D%d' % drev
319 tags.tag(repo, tagname, nullid, message=None, user=None,
321 tags.tag(repo, tagname, nullid, message=None, user=None,
320 date=None, local=True)
322 date=None, local=True)
321 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
322 b'Differential history\n') % drev)
324 b'Differential history\n') % drev)
323 continue
325 continue
324
326
325 # Find the last node using Phabricator metadata, and make sure it
327 # Find the last node using Phabricator metadata, and make sure it
326 # exists in the repo
328 # exists in the repo
327 oldnode = lastdiff = None
329 oldnode = lastdiff = None
328 if diffs:
330 if diffs:
329 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
330 oldnode = getnode(lastdiff)
332 oldnode = getnode(lastdiff)
331 if oldnode and oldnode not in nodemap:
333 if oldnode and oldnode not in nodemap:
332 oldnode = None
334 oldnode = None
333
335
334 result[newnode] = (oldnode, lastdiff, drev)
336 result[newnode] = (oldnode, lastdiff, drev)
335
337
336 return result
338 return result
337
339
338 def getdiff(ctx, diffopts):
340 def getdiff(ctx, diffopts):
339 """plain-text diff without header (user, commit message, etc)"""
341 """plain-text diff without header (user, commit message, etc)"""
340 output = util.stringio()
342 output = util.stringio()
341 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
342 None, opts=diffopts):
344 None, opts=diffopts):
343 output.write(chunk)
345 output.write(chunk)
344 return output.getvalue()
346 return output.getvalue()
345
347
346 def creatediff(ctx):
348 def creatediff(ctx):
347 """create a Differential Diff"""
349 """create a Differential Diff"""
348 repo = ctx.repo()
350 repo = ctx.repo()
349 repophid = getrepophid(repo)
351 repophid = getrepophid(repo)
350 # Create a "Differential Diff" via "differential.createrawdiff" API
352 # Create a "Differential Diff" via "differential.createrawdiff" API
351 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
352 if repophid:
354 if repophid:
353 params[b'repositoryPHID'] = repophid
355 params[b'repositoryPHID'] = repophid
354 diff = callconduit(repo, b'differential.createrawdiff', params)
356 diff = callconduit(repo, b'differential.createrawdiff', params)
355 if not diff:
357 if not diff:
356 raise error.Abort(_(b'cannot create diff for %s') % ctx)
358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
357 return diff
359 return diff
358
360
359 def writediffproperties(ctx, diff):
361 def writediffproperties(ctx, diff):
360 """write metadata to diff so patches could be applied losslessly"""
362 """write metadata to diff so patches could be applied losslessly"""
361 params = {
363 params = {
362 b'diff_id': diff[r'id'],
364 b'diff_id': diff[r'id'],
363 b'name': b'hg:meta',
365 b'name': b'hg:meta',
364 b'data': json.dumps({
366 b'data': json.dumps({
365 b'user': ctx.user(),
367 b'user': ctx.user(),
366 b'date': b'%d %d' % ctx.date(),
368 b'date': b'%d %d' % ctx.date(),
367 b'node': ctx.hex(),
369 b'node': ctx.hex(),
368 b'parent': ctx.p1().hex(),
370 b'parent': ctx.p1().hex(),
369 }),
371 }),
370 }
372 }
371 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
372
374
373 params = {
375 params = {
374 b'diff_id': diff[r'id'],
376 b'diff_id': diff[r'id'],
375 b'name': b'local:commits',
377 b'name': b'local:commits',
376 b'data': json.dumps({
378 b'data': json.dumps({
377 ctx.hex(): {
379 ctx.hex(): {
378 b'author': stringutil.person(ctx.user()),
380 b'author': stringutil.person(ctx.user()),
379 b'authorEmail': stringutil.email(ctx.user()),
381 b'authorEmail': stringutil.email(ctx.user()),
380 b'time': ctx.date()[0],
382 b'time': ctx.date()[0],
381 },
383 },
382 }),
384 }),
383 }
385 }
384 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
385
387
386 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
387 olddiff=None, actions=None):
389 olddiff=None, actions=None):
388 """create or update a Differential Revision
390 """create or update a Differential Revision
389
391
390 If revid is None, create a new Differential Revision, otherwise update
392 If revid is None, create a new Differential Revision, otherwise update
391 revid. If parentrevid is not None, set it as a dependency.
393 revid. If parentrevid is not None, set it as a dependency.
392
394
393 If oldnode is not None, check if the patch content (without commit message
395 If oldnode is not None, check if the patch content (without commit message
394 and metadata) has changed before creating another diff.
396 and metadata) has changed before creating another diff.
395
397
396 If actions is not None, they will be appended to the transaction.
398 If actions is not None, they will be appended to the transaction.
397 """
399 """
398 repo = ctx.repo()
400 repo = ctx.repo()
399 if oldnode:
401 if oldnode:
400 diffopts = mdiff.diffopts(git=True, context=32767)
402 diffopts = mdiff.diffopts(git=True, context=32767)
401 oldctx = repo.unfiltered()[oldnode]
403 oldctx = repo.unfiltered()[oldnode]
402 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
403 else:
405 else:
404 neednewdiff = True
406 neednewdiff = True
405
407
406 transactions = []
408 transactions = []
407 if neednewdiff:
409 if neednewdiff:
408 diff = creatediff(ctx)
410 diff = creatediff(ctx)
409 transactions.append({b'type': b'update', b'value': diff[r'phid']})
411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
410 else:
412 else:
411 # Even if we don't need to upload a new diff because the patch content
413 # Even if we don't need to upload a new diff because the patch content
412 # does not change. We might still need to update its metadata so
414 # does not change. We might still need to update its metadata so
413 # pushers could know the correct node metadata.
415 # pushers could know the correct node metadata.
414 assert olddiff
416 assert olddiff
415 diff = olddiff
417 diff = olddiff
416 writediffproperties(ctx, diff)
418 writediffproperties(ctx, diff)
417
419
418 # Use a temporary summary to set dependency. There might be better ways but
420 # Use a temporary summary to set dependency. There might be better ways but
419 # I cannot find them for now. But do not do that if we are updating an
421 # I cannot find them for now. But do not do that if we are updating an
420 # existing revision (revid is not None) since that introduces visible
422 # existing revision (revid is not None) since that introduces visible
421 # churns (someone edited "Summary" twice) on the web page.
423 # churns (someone edited "Summary" twice) on the web page.
422 if parentrevid and revid is None:
424 if parentrevid and revid is None:
423 summary = b'Depends on D%s' % parentrevid
425 summary = b'Depends on D%s' % parentrevid
424 transactions += [{b'type': b'summary', b'value': summary},
426 transactions += [{b'type': b'summary', b'value': summary},
425 {b'type': b'summary', b'value': b' '}]
427 {b'type': b'summary', b'value': b' '}]
426
428
427 if actions:
429 if actions:
428 transactions += actions
430 transactions += actions
429
431
430 # Parse commit message and update related fields.
432 # Parse commit message and update related fields.
431 desc = ctx.description()
433 desc = ctx.description()
432 info = callconduit(repo, b'differential.parsecommitmessage',
434 info = callconduit(repo, b'differential.parsecommitmessage',
433 {b'corpus': desc})
435 {b'corpus': desc})
434 for k, v in info[r'fields'].items():
436 for k, v in info[r'fields'].items():
435 if k in [b'title', b'summary', b'testPlan']:
437 if k in [b'title', b'summary', b'testPlan']:
436 transactions.append({b'type': k, b'value': v})
438 transactions.append({b'type': k, b'value': v})
437
439
438 params = {b'transactions': transactions}
440 params = {b'transactions': transactions}
439 if revid is not None:
441 if revid is not None:
440 # Update an existing Differential Revision
442 # Update an existing Differential Revision
441 params[b'objectIdentifier'] = revid
443 params[b'objectIdentifier'] = revid
442
444
443 revision = callconduit(repo, b'differential.revision.edit', params)
445 revision = callconduit(repo, b'differential.revision.edit', params)
444 if not revision:
446 if not revision:
445 raise error.Abort(_(b'cannot create revision for %s') % ctx)
447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
446
448
447 return revision, diff
449 return revision, diff
448
450
449 def userphids(repo, names):
451 def userphids(repo, names):
450 """convert user names to PHIDs"""
452 """convert user names to PHIDs"""
451 query = {b'constraints': {b'usernames': names}}
453 query = {b'constraints': {b'usernames': names}}
452 result = callconduit(repo, b'user.search', query)
454 result = callconduit(repo, b'user.search', query)
453 # username not found is not an error of the API. So check if we have missed
455 # username not found is not an error of the API. So check if we have missed
454 # some names here.
456 # some names here.
455 data = result[r'data']
457 data = result[r'data']
456 resolved = set(entry[r'fields'][r'username'] for entry in data)
458 resolved = set(entry[r'fields'][r'username'] for entry in data)
457 unresolved = set(names) - resolved
459 unresolved = set(names) - resolved
458 if unresolved:
460 if unresolved:
459 raise error.Abort(_(b'unknown username: %s')
461 raise error.Abort(_(b'unknown username: %s')
460 % b' '.join(sorted(unresolved)))
462 % b' '.join(sorted(unresolved)))
461 return [entry[r'phid'] for entry in data]
463 return [entry[r'phid'] for entry in data]
462
464
463 @vcrcommand(b'phabsend',
465 @vcrcommand(b'phabsend',
464 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
466 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
465 (b'', b'amend', True, _(b'update commit messages')),
467 (b'', b'amend', True, _(b'update commit messages')),
466 (b'', b'reviewer', [], _(b'specify reviewers')),
468 (b'', b'reviewer', [], _(b'specify reviewers')),
467 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
469 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
468 _(b'REV [OPTIONS]'),
470 _(b'REV [OPTIONS]'),
469 helpcategory=command.CATEGORY_IMPORT_EXPORT)
471 helpcategory=command.CATEGORY_IMPORT_EXPORT)
470 def phabsend(ui, repo, *revs, **opts):
472 def phabsend(ui, repo, *revs, **opts):
471 """upload changesets to Phabricator
473 """upload changesets to Phabricator
472
474
473 If there are multiple revisions specified, they will be send as a stack
475 If there are multiple revisions specified, they will be send as a stack
474 with a linear dependencies relationship using the order specified by the
476 with a linear dependencies relationship using the order specified by the
475 revset.
477 revset.
476
478
477 For the first time uploading changesets, local tags will be created to
479 For the first time uploading changesets, local tags will be created to
478 maintain the association. After the first time, phabsend will check
480 maintain the association. After the first time, phabsend will check
479 obsstore and tags information so it can figure out whether to update an
481 obsstore and tags information so it can figure out whether to update an
480 existing Differential Revision, or create a new one.
482 existing Differential Revision, or create a new one.
481
483
482 If --amend is set, update commit messages so they have the
484 If --amend is set, update commit messages so they have the
483 ``Differential Revision`` URL, remove related tags. This is similar to what
485 ``Differential Revision`` URL, remove related tags. This is similar to what
484 arcanist will do, and is more desired in author-push workflows. Otherwise,
486 arcanist will do, and is more desired in author-push workflows. Otherwise,
485 use local tags to record the ``Differential Revision`` association.
487 use local tags to record the ``Differential Revision`` association.
486
488
487 The --confirm option lets you confirm changesets before sending them. You
489 The --confirm option lets you confirm changesets before sending them. You
488 can also add following to your configuration file to make it default
490 can also add following to your configuration file to make it default
489 behaviour::
491 behaviour::
490
492
491 [phabsend]
493 [phabsend]
492 confirm = true
494 confirm = true
493
495
494 phabsend will check obsstore and the above association to decide whether to
496 phabsend will check obsstore and the above association to decide whether to
495 update an existing Differential Revision, or create a new one.
497 update an existing Differential Revision, or create a new one.
496 """
498 """
497 revs = list(revs) + opts.get(b'rev', [])
499 revs = list(revs) + opts.get(b'rev', [])
498 revs = scmutil.revrange(repo, revs)
500 revs = scmutil.revrange(repo, revs)
499
501
500 if not revs:
502 if not revs:
501 raise error.Abort(_(b'phabsend requires at least one changeset'))
503 raise error.Abort(_(b'phabsend requires at least one changeset'))
502 if opts.get(b'amend'):
504 if opts.get(b'amend'):
503 cmdutil.checkunfinished(repo)
505 cmdutil.checkunfinished(repo)
504
506
505 # {newnode: (oldnode, olddiff, olddrev}
507 # {newnode: (oldnode, olddiff, olddrev}
506 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
508 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
507
509
508 confirm = ui.configbool(b'phabsend', b'confirm')
510 confirm = ui.configbool(b'phabsend', b'confirm')
509 confirm |= bool(opts.get(b'confirm'))
511 confirm |= bool(opts.get(b'confirm'))
510 if confirm:
512 if confirm:
511 confirmed = _confirmbeforesend(repo, revs, oldmap)
513 confirmed = _confirmbeforesend(repo, revs, oldmap)
512 if not confirmed:
514 if not confirmed:
513 raise error.Abort(_(b'phabsend cancelled'))
515 raise error.Abort(_(b'phabsend cancelled'))
514
516
515 actions = []
517 actions = []
516 reviewers = opts.get(b'reviewer', [])
518 reviewers = opts.get(b'reviewer', [])
517 if reviewers:
519 if reviewers:
518 phids = userphids(repo, reviewers)
520 phids = userphids(repo, reviewers)
519 actions.append({b'type': b'reviewers.add', b'value': phids})
521 actions.append({b'type': b'reviewers.add', b'value': phids})
520
522
521 drevids = [] # [int]
523 drevids = [] # [int]
522 diffmap = {} # {newnode: diff}
524 diffmap = {} # {newnode: diff}
523
525
524 # Send patches one by one so we know their Differential Revision IDs and
526 # Send patches one by one so we know their Differential Revision IDs and
525 # can provide dependency relationship
527 # can provide dependency relationship
526 lastrevid = None
528 lastrevid = None
527 for rev in revs:
529 for rev in revs:
528 ui.debug(b'sending rev %d\n' % rev)
530 ui.debug(b'sending rev %d\n' % rev)
529 ctx = repo[rev]
531 ctx = repo[rev]
530
532
531 # Get Differential Revision ID
533 # Get Differential Revision ID
532 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
534 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
533 if oldnode != ctx.node() or opts.get(b'amend'):
535 if oldnode != ctx.node() or opts.get(b'amend'):
534 # Create or update Differential Revision
536 # Create or update Differential Revision
535 revision, diff = createdifferentialrevision(
537 revision, diff = createdifferentialrevision(
536 ctx, revid, lastrevid, oldnode, olddiff, actions)
538 ctx, revid, lastrevid, oldnode, olddiff, actions)
537 diffmap[ctx.node()] = diff
539 diffmap[ctx.node()] = diff
538 newrevid = int(revision[r'object'][r'id'])
540 newrevid = int(revision[r'object'][r'id'])
539 if revid:
541 if revid:
540 action = b'updated'
542 action = b'updated'
541 else:
543 else:
542 action = b'created'
544 action = b'created'
543
545
544 # Create a local tag to note the association, if commit message
546 # Create a local tag to note the association, if commit message
545 # does not have it already
547 # does not have it already
546 m = _differentialrevisiondescre.search(ctx.description())
548 m = _differentialrevisiondescre.search(ctx.description())
547 if not m or int(m.group(b'id')) != newrevid:
549 if not m or int(m.group(b'id')) != newrevid:
548 tagname = b'D%d' % newrevid
550 tagname = b'D%d' % newrevid
549 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
551 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
550 date=None, local=True)
552 date=None, local=True)
551 else:
553 else:
552 # Nothing changed. But still set "newrevid" so the next revision
554 # Nothing changed. But still set "newrevid" so the next revision
553 # could depend on this one.
555 # could depend on this one.
554 newrevid = revid
556 newrevid = revid
555 action = b'skipped'
557 action = b'skipped'
556
558
557 actiondesc = ui.label(
559 actiondesc = ui.label(
558 {b'created': _(b'created'),
560 {b'created': _(b'created'),
559 b'skipped': _(b'skipped'),
561 b'skipped': _(b'skipped'),
560 b'updated': _(b'updated')}[action],
562 b'updated': _(b'updated')}[action],
561 b'phabricator.action.%s' % action)
563 b'phabricator.action.%s' % action)
562 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
564 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
563 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
565 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
564 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
566 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
565 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
567 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
566 desc))
568 desc))
567 drevids.append(newrevid)
569 drevids.append(newrevid)
568 lastrevid = newrevid
570 lastrevid = newrevid
569
571
570 # Update commit messages and remove tags
572 # Update commit messages and remove tags
571 if opts.get(b'amend'):
573 if opts.get(b'amend'):
572 unfi = repo.unfiltered()
574 unfi = repo.unfiltered()
573 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
575 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
574 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
576 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
575 wnode = unfi[b'.'].node()
577 wnode = unfi[b'.'].node()
576 mapping = {} # {oldnode: [newnode]}
578 mapping = {} # {oldnode: [newnode]}
577 for i, rev in enumerate(revs):
579 for i, rev in enumerate(revs):
578 old = unfi[rev]
580 old = unfi[rev]
579 drevid = drevids[i]
581 drevid = drevids[i]
580 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
582 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
581 newdesc = getdescfromdrev(drev)
583 newdesc = getdescfromdrev(drev)
582 newdesc = encoding.unitolocal(newdesc)
584 newdesc = encoding.unitolocal(newdesc)
583 # Make sure commit message contain "Differential Revision"
585 # Make sure commit message contain "Differential Revision"
584 if old.description() != newdesc:
586 if old.description() != newdesc:
585 parents = [
587 parents = [
586 mapping.get(old.p1().node(), (old.p1(),))[0],
588 mapping.get(old.p1().node(), (old.p1(),))[0],
587 mapping.get(old.p2().node(), (old.p2(),))[0],
589 mapping.get(old.p2().node(), (old.p2(),))[0],
588 ]
590 ]
589 new = context.metadataonlyctx(
591 new = context.metadataonlyctx(
590 repo, old, parents=parents, text=newdesc,
592 repo, old, parents=parents, text=newdesc,
591 user=old.user(), date=old.date(), extra=old.extra())
593 user=old.user(), date=old.date(), extra=old.extra())
592
594
593 newnode = new.commit()
595 newnode = new.commit()
594
596
595 mapping[old.node()] = [newnode]
597 mapping[old.node()] = [newnode]
596 # Update diff property
598 # Update diff property
597 writediffproperties(unfi[newnode], diffmap[old.node()])
599 writediffproperties(unfi[newnode], diffmap[old.node()])
598 # Remove local tags since it's no longer necessary
600 # Remove local tags since it's no longer necessary
599 tagname = b'D%d' % drevid
601 tagname = b'D%d' % drevid
600 if tagname in repo.tags():
602 if tagname in repo.tags():
601 tags.tag(repo, tagname, nullid, message=None, user=None,
603 tags.tag(repo, tagname, nullid, message=None, user=None,
602 date=None, local=True)
604 date=None, local=True)
603 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
605 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
604 if wnode in mapping:
606 if wnode in mapping:
605 unfi.setparents(mapping[wnode][0])
607 unfi.setparents(mapping[wnode][0])
606
608
607 # Map from "hg:meta" keys to header understood by "hg import". The order is
609 # Map from "hg:meta" keys to header understood by "hg import". The order is
608 # consistent with "hg export" output.
610 # consistent with "hg export" output.
609 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
611 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
610 (r'node', b'Node ID'), (r'parent', b'Parent ')])
612 (r'node', b'Node ID'), (r'parent', b'Parent ')])
611
613
612 def _confirmbeforesend(repo, revs, oldmap):
614 def _confirmbeforesend(repo, revs, oldmap):
613 url, token = readurltoken(repo)
615 url, token = readurltoken(repo)
614 ui = repo.ui
616 ui = repo.ui
615 for rev in revs:
617 for rev in revs:
616 ctx = repo[rev]
618 ctx = repo[rev]
617 desc = ctx.description().splitlines()[0]
619 desc = ctx.description().splitlines()[0]
618 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
620 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
619 if drevid:
621 if drevid:
620 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
622 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
621 else:
623 else:
622 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
624 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
623
625
624 ui.write(_(b'%s - %s: %s\n')
626 ui.write(_(b'%s - %s: %s\n')
625 % (drevdesc,
627 % (drevdesc,
626 ui.label(bytes(ctx), b'phabricator.node'),
628 ui.label(bytes(ctx), b'phabricator.node'),
627 ui.label(desc, b'phabricator.desc')))
629 ui.label(desc, b'phabricator.desc')))
628
630
629 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
631 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
630 b'$$ &Yes $$ &No') % url):
632 b'$$ &Yes $$ &No') % url):
631 return False
633 return False
632
634
633 return True
635 return True
634
636
635 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
637 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
636 b'abandoned'}
638 b'abandoned'}
637
639
638 def _getstatusname(drev):
640 def _getstatusname(drev):
639 """get normalized status name from a Differential Revision"""
641 """get normalized status name from a Differential Revision"""
640 return drev[r'statusName'].replace(b' ', b'').lower()
642 return drev[r'statusName'].replace(b' ', b'').lower()
641
643
642 # Small language to specify differential revisions. Support symbols: (), :X,
644 # Small language to specify differential revisions. Support symbols: (), :X,
643 # +, and -.
645 # +, and -.
644
646
645 _elements = {
647 _elements = {
646 # token-type: binding-strength, primary, prefix, infix, suffix
648 # token-type: binding-strength, primary, prefix, infix, suffix
647 b'(': (12, None, (b'group', 1, b')'), None, None),
649 b'(': (12, None, (b'group', 1, b')'), None, None),
648 b':': (8, None, (b'ancestors', 8), None, None),
650 b':': (8, None, (b'ancestors', 8), None, None),
649 b'&': (5, None, None, (b'and_', 5), None),
651 b'&': (5, None, None, (b'and_', 5), None),
650 b'+': (4, None, None, (b'add', 4), None),
652 b'+': (4, None, None, (b'add', 4), None),
651 b'-': (4, None, None, (b'sub', 4), None),
653 b'-': (4, None, None, (b'sub', 4), None),
652 b')': (0, None, None, None, None),
654 b')': (0, None, None, None, None),
653 b'symbol': (0, b'symbol', None, None, None),
655 b'symbol': (0, b'symbol', None, None, None),
654 b'end': (0, None, None, None, None),
656 b'end': (0, None, None, None, None),
655 }
657 }
656
658
657 def _tokenize(text):
659 def _tokenize(text):
658 view = memoryview(text) # zero-copy slice
660 view = memoryview(text) # zero-copy slice
659 special = b'():+-& '
661 special = b'():+-& '
660 pos = 0
662 pos = 0
661 length = len(text)
663 length = len(text)
662 while pos < length:
664 while pos < length:
663 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
665 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
664 view[pos:]))
666 view[pos:]))
665 if symbol:
667 if symbol:
666 yield (b'symbol', symbol, pos)
668 yield (b'symbol', symbol, pos)
667 pos += len(symbol)
669 pos += len(symbol)
668 else: # special char, ignore space
670 else: # special char, ignore space
669 if text[pos] != b' ':
671 if text[pos] != b' ':
670 yield (text[pos], None, pos)
672 yield (text[pos], None, pos)
671 pos += 1
673 pos += 1
672 yield (b'end', None, pos)
674 yield (b'end', None, pos)
673
675
674 def _parse(text):
676 def _parse(text):
675 tree, pos = parser.parser(_elements).parse(_tokenize(text))
677 tree, pos = parser.parser(_elements).parse(_tokenize(text))
676 if pos != len(text):
678 if pos != len(text):
677 raise error.ParseError(b'invalid token', pos)
679 raise error.ParseError(b'invalid token', pos)
678 return tree
680 return tree
679
681
680 def _parsedrev(symbol):
682 def _parsedrev(symbol):
681 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
683 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
682 if symbol.startswith(b'D') and symbol[1:].isdigit():
684 if symbol.startswith(b'D') and symbol[1:].isdigit():
683 return int(symbol[1:])
685 return int(symbol[1:])
684 if symbol.isdigit():
686 if symbol.isdigit():
685 return int(symbol)
687 return int(symbol)
686
688
687 def _prefetchdrevs(tree):
689 def _prefetchdrevs(tree):
688 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
690 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
689 drevs = set()
691 drevs = set()
690 ancestordrevs = set()
692 ancestordrevs = set()
691 op = tree[0]
693 op = tree[0]
692 if op == b'symbol':
694 if op == b'symbol':
693 r = _parsedrev(tree[1])
695 r = _parsedrev(tree[1])
694 if r:
696 if r:
695 drevs.add(r)
697 drevs.add(r)
696 elif op == b'ancestors':
698 elif op == b'ancestors':
697 r, a = _prefetchdrevs(tree[1])
699 r, a = _prefetchdrevs(tree[1])
698 drevs.update(r)
700 drevs.update(r)
699 ancestordrevs.update(r)
701 ancestordrevs.update(r)
700 ancestordrevs.update(a)
702 ancestordrevs.update(a)
701 else:
703 else:
702 for t in tree[1:]:
704 for t in tree[1:]:
703 r, a = _prefetchdrevs(t)
705 r, a = _prefetchdrevs(t)
704 drevs.update(r)
706 drevs.update(r)
705 ancestordrevs.update(a)
707 ancestordrevs.update(a)
706 return drevs, ancestordrevs
708 return drevs, ancestordrevs
707
709
708 def querydrev(repo, spec):
710 def querydrev(repo, spec):
709 """return a list of "Differential Revision" dicts
711 """return a list of "Differential Revision" dicts
710
712
711 spec is a string using a simple query language, see docstring in phabread
713 spec is a string using a simple query language, see docstring in phabread
712 for details.
714 for details.
713
715
714 A "Differential Revision dict" looks like:
716 A "Differential Revision dict" looks like:
715
717
716 {
718 {
717 "id": "2",
719 "id": "2",
718 "phid": "PHID-DREV-672qvysjcczopag46qty",
720 "phid": "PHID-DREV-672qvysjcczopag46qty",
719 "title": "example",
721 "title": "example",
720 "uri": "https://phab.example.com/D2",
722 "uri": "https://phab.example.com/D2",
721 "dateCreated": "1499181406",
723 "dateCreated": "1499181406",
722 "dateModified": "1499182103",
724 "dateModified": "1499182103",
723 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
725 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
724 "status": "0",
726 "status": "0",
725 "statusName": "Needs Review",
727 "statusName": "Needs Review",
726 "properties": [],
728 "properties": [],
727 "branch": null,
729 "branch": null,
728 "summary": "",
730 "summary": "",
729 "testPlan": "",
731 "testPlan": "",
730 "lineCount": "2",
732 "lineCount": "2",
731 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
733 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
732 "diffs": [
734 "diffs": [
733 "3",
735 "3",
734 "4",
736 "4",
735 ],
737 ],
736 "commits": [],
738 "commits": [],
737 "reviewers": [],
739 "reviewers": [],
738 "ccs": [],
740 "ccs": [],
739 "hashes": [],
741 "hashes": [],
740 "auxiliary": {
742 "auxiliary": {
741 "phabricator:projects": [],
743 "phabricator:projects": [],
742 "phabricator:depends-on": [
744 "phabricator:depends-on": [
743 "PHID-DREV-gbapp366kutjebt7agcd"
745 "PHID-DREV-gbapp366kutjebt7agcd"
744 ]
746 ]
745 },
747 },
746 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
748 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
747 "sourcePath": null
749 "sourcePath": null
748 }
750 }
749 """
751 """
750 def fetch(params):
752 def fetch(params):
751 """params -> single drev or None"""
753 """params -> single drev or None"""
752 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
754 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
753 if key in prefetched:
755 if key in prefetched:
754 return prefetched[key]
756 return prefetched[key]
755 drevs = callconduit(repo, b'differential.query', params)
757 drevs = callconduit(repo, b'differential.query', params)
756 # Fill prefetched with the result
758 # Fill prefetched with the result
757 for drev in drevs:
759 for drev in drevs:
758 prefetched[drev[r'phid']] = drev
760 prefetched[drev[r'phid']] = drev
759 prefetched[int(drev[r'id'])] = drev
761 prefetched[int(drev[r'id'])] = drev
760 if key not in prefetched:
762 if key not in prefetched:
761 raise error.Abort(_(b'cannot get Differential Revision %r')
763 raise error.Abort(_(b'cannot get Differential Revision %r')
762 % params)
764 % params)
763 return prefetched[key]
765 return prefetched[key]
764
766
765 def getstack(topdrevids):
767 def getstack(topdrevids):
766 """given a top, get a stack from the bottom, [id] -> [id]"""
768 """given a top, get a stack from the bottom, [id] -> [id]"""
767 visited = set()
769 visited = set()
768 result = []
770 result = []
769 queue = [{r'ids': [i]} for i in topdrevids]
771 queue = [{r'ids': [i]} for i in topdrevids]
770 while queue:
772 while queue:
771 params = queue.pop()
773 params = queue.pop()
772 drev = fetch(params)
774 drev = fetch(params)
773 if drev[r'id'] in visited:
775 if drev[r'id'] in visited:
774 continue
776 continue
775 visited.add(drev[r'id'])
777 visited.add(drev[r'id'])
776 result.append(int(drev[r'id']))
778 result.append(int(drev[r'id']))
777 auxiliary = drev.get(r'auxiliary', {})
779 auxiliary = drev.get(r'auxiliary', {})
778 depends = auxiliary.get(r'phabricator:depends-on', [])
780 depends = auxiliary.get(r'phabricator:depends-on', [])
779 for phid in depends:
781 for phid in depends:
780 queue.append({b'phids': [phid]})
782 queue.append({b'phids': [phid]})
781 result.reverse()
783 result.reverse()
782 return smartset.baseset(result)
784 return smartset.baseset(result)
783
785
784 # Initialize prefetch cache
786 # Initialize prefetch cache
785 prefetched = {} # {id or phid: drev}
787 prefetched = {} # {id or phid: drev}
786
788
787 tree = _parse(spec)
789 tree = _parse(spec)
788 drevs, ancestordrevs = _prefetchdrevs(tree)
790 drevs, ancestordrevs = _prefetchdrevs(tree)
789
791
790 # developer config: phabricator.batchsize
792 # developer config: phabricator.batchsize
791 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
793 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
792
794
793 # Prefetch Differential Revisions in batch
795 # Prefetch Differential Revisions in batch
794 tofetch = set(drevs)
796 tofetch = set(drevs)
795 for r in ancestordrevs:
797 for r in ancestordrevs:
796 tofetch.update(range(max(1, r - batchsize), r + 1))
798 tofetch.update(range(max(1, r - batchsize), r + 1))
797 if drevs:
799 if drevs:
798 fetch({r'ids': list(tofetch)})
800 fetch({r'ids': list(tofetch)})
799 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
801 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
800
802
801 # Walk through the tree, return smartsets
803 # Walk through the tree, return smartsets
802 def walk(tree):
804 def walk(tree):
803 op = tree[0]
805 op = tree[0]
804 if op == b'symbol':
806 if op == b'symbol':
805 drev = _parsedrev(tree[1])
807 drev = _parsedrev(tree[1])
806 if drev:
808 if drev:
807 return smartset.baseset([drev])
809 return smartset.baseset([drev])
808 elif tree[1] in _knownstatusnames:
810 elif tree[1] in _knownstatusnames:
809 drevs = [r for r in validids
811 drevs = [r for r in validids
810 if _getstatusname(prefetched[r]) == tree[1]]
812 if _getstatusname(prefetched[r]) == tree[1]]
811 return smartset.baseset(drevs)
813 return smartset.baseset(drevs)
812 else:
814 else:
813 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
815 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
814 elif op in {b'and_', b'add', b'sub'}:
816 elif op in {b'and_', b'add', b'sub'}:
815 assert len(tree) == 3
817 assert len(tree) == 3
816 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
818 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
817 elif op == b'group':
819 elif op == b'group':
818 return walk(tree[1])
820 return walk(tree[1])
819 elif op == b'ancestors':
821 elif op == b'ancestors':
820 return getstack(walk(tree[1]))
822 return getstack(walk(tree[1]))
821 else:
823 else:
822 raise error.ProgrammingError(b'illegal tree: %r' % tree)
824 raise error.ProgrammingError(b'illegal tree: %r' % tree)
823
825
824 return [prefetched[r] for r in walk(tree)]
826 return [prefetched[r] for r in walk(tree)]
825
827
826 def getdescfromdrev(drev):
828 def getdescfromdrev(drev):
827 """get description (commit message) from "Differential Revision"
829 """get description (commit message) from "Differential Revision"
828
830
829 This is similar to differential.getcommitmessage API. But we only care
831 This is similar to differential.getcommitmessage API. But we only care
830 about limited fields: title, summary, test plan, and URL.
832 about limited fields: title, summary, test plan, and URL.
831 """
833 """
832 title = drev[r'title']
834 title = drev[r'title']
833 summary = drev[r'summary'].rstrip()
835 summary = drev[r'summary'].rstrip()
834 testplan = drev[r'testPlan'].rstrip()
836 testplan = drev[r'testPlan'].rstrip()
835 if testplan:
837 if testplan:
836 testplan = b'Test Plan:\n%s' % testplan
838 testplan = b'Test Plan:\n%s' % testplan
837 uri = b'Differential Revision: %s' % drev[r'uri']
839 uri = b'Differential Revision: %s' % drev[r'uri']
838 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
840 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
839
841
840 def getdiffmeta(diff):
842 def getdiffmeta(diff):
841 """get commit metadata (date, node, user, p1) from a diff object
843 """get commit metadata (date, node, user, p1) from a diff object
842
844
843 The metadata could be "hg:meta", sent by phabsend, like:
845 The metadata could be "hg:meta", sent by phabsend, like:
844
846
845 "properties": {
847 "properties": {
846 "hg:meta": {
848 "hg:meta": {
847 "date": "1499571514 25200",
849 "date": "1499571514 25200",
848 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
850 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
849 "user": "Foo Bar <foo@example.com>",
851 "user": "Foo Bar <foo@example.com>",
850 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
852 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
851 }
853 }
852 }
854 }
853
855
854 Or converted from "local:commits", sent by "arc", like:
856 Or converted from "local:commits", sent by "arc", like:
855
857
856 "properties": {
858 "properties": {
857 "local:commits": {
859 "local:commits": {
858 "98c08acae292b2faf60a279b4189beb6cff1414d": {
860 "98c08acae292b2faf60a279b4189beb6cff1414d": {
859 "author": "Foo Bar",
861 "author": "Foo Bar",
860 "time": 1499546314,
862 "time": 1499546314,
861 "branch": "default",
863 "branch": "default",
862 "tag": "",
864 "tag": "",
863 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
865 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
864 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
866 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
865 "local": "1000",
867 "local": "1000",
866 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
868 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
867 "summary": "...",
869 "summary": "...",
868 "message": "...",
870 "message": "...",
869 "authorEmail": "foo@example.com"
871 "authorEmail": "foo@example.com"
870 }
872 }
871 }
873 }
872 }
874 }
873
875
874 Note: metadata extracted from "local:commits" will lose time zone
876 Note: metadata extracted from "local:commits" will lose time zone
875 information.
877 information.
876 """
878 """
877 props = diff.get(r'properties') or {}
879 props = diff.get(r'properties') or {}
878 meta = props.get(r'hg:meta')
880 meta = props.get(r'hg:meta')
879 if not meta and props.get(r'local:commits'):
881 if not meta and props.get(r'local:commits'):
880 commit = sorted(props[r'local:commits'].values())[0]
882 commit = sorted(props[r'local:commits'].values())[0]
881 meta = {
883 meta = {
882 r'date': r'%d 0' % commit[r'time'],
884 r'date': r'%d 0' % commit[r'time'],
883 r'node': commit[r'rev'],
885 r'node': commit[r'rev'],
884 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
886 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
885 }
887 }
886 if len(commit.get(r'parents', ())) >= 1:
888 if len(commit.get(r'parents', ())) >= 1:
887 meta[r'parent'] = commit[r'parents'][0]
889 meta[r'parent'] = commit[r'parents'][0]
888 return meta or {}
890 return meta or {}
889
891
890 def readpatch(repo, drevs, write):
892 def readpatch(repo, drevs, write):
891 """generate plain-text patch readable by 'hg import'
893 """generate plain-text patch readable by 'hg import'
892
894
893 write is usually ui.write. drevs is what "querydrev" returns, results of
895 write is usually ui.write. drevs is what "querydrev" returns, results of
894 "differential.query".
896 "differential.query".
895 """
897 """
896 # Prefetch hg:meta property for all diffs
898 # Prefetch hg:meta property for all diffs
897 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
899 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
898 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
900 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
899
901
900 # Generate patch for each drev
902 # Generate patch for each drev
901 for drev in drevs:
903 for drev in drevs:
902 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
904 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
903
905
904 diffid = max(int(v) for v in drev[r'diffs'])
906 diffid = max(int(v) for v in drev[r'diffs'])
905 body = callconduit(repo, b'differential.getrawdiff',
907 body = callconduit(repo, b'differential.getrawdiff',
906 {b'diffID': diffid})
908 {b'diffID': diffid})
907 desc = getdescfromdrev(drev)
909 desc = getdescfromdrev(drev)
908 header = b'# HG changeset patch\n'
910 header = b'# HG changeset patch\n'
909
911
910 # Try to preserve metadata from hg:meta property. Write hg patch
912 # Try to preserve metadata from hg:meta property. Write hg patch
911 # headers that can be read by the "import" command. See patchheadermap
913 # headers that can be read by the "import" command. See patchheadermap
912 # and extract in mercurial/patch.py for supported headers.
914 # and extract in mercurial/patch.py for supported headers.
913 meta = getdiffmeta(diffs[str(diffid)])
915 meta = getdiffmeta(diffs[str(diffid)])
914 for k in _metanamemap.keys():
916 for k in _metanamemap.keys():
915 if k in meta:
917 if k in meta:
916 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
918 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
917
919
918 content = b'%s%s\n%s' % (header, desc, body)
920 content = b'%s%s\n%s' % (header, desc, body)
919 write(encoding.unitolocal(content))
921 write(encoding.unitolocal(content))
920
922
921 @vcrcommand(b'phabread',
923 @vcrcommand(b'phabread',
922 [(b'', b'stack', False, _(b'read dependencies'))],
924 [(b'', b'stack', False, _(b'read dependencies'))],
923 _(b'DREVSPEC [OPTIONS]'),
925 _(b'DREVSPEC [OPTIONS]'),
924 helpcategory=command.CATEGORY_IMPORT_EXPORT)
926 helpcategory=command.CATEGORY_IMPORT_EXPORT)
925 def phabread(ui, repo, spec, **opts):
927 def phabread(ui, repo, spec, **opts):
926 """print patches from Phabricator suitable for importing
928 """print patches from Phabricator suitable for importing
927
929
928 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
930 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
929 the number ``123``. It could also have common operators like ``+``, ``-``,
931 the number ``123``. It could also have common operators like ``+``, ``-``,
930 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
932 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
931 select a stack.
933 select a stack.
932
934
933 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
935 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
934 could be used to filter patches by status. For performance reason, they
936 could be used to filter patches by status. For performance reason, they
935 only represent a subset of non-status selections and cannot be used alone.
937 only represent a subset of non-status selections and cannot be used alone.
936
938
937 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
939 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
938 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
940 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
939 stack up to D9.
941 stack up to D9.
940
942
941 If --stack is given, follow dependencies information and read all patches.
943 If --stack is given, follow dependencies information and read all patches.
942 It is equivalent to the ``:`` operator.
944 It is equivalent to the ``:`` operator.
943 """
945 """
944 if opts.get(b'stack'):
946 if opts.get(b'stack'):
945 spec = b':(%s)' % spec
947 spec = b':(%s)' % spec
946 drevs = querydrev(repo, spec)
948 drevs = querydrev(repo, spec)
947 readpatch(repo, drevs, ui.write)
949 readpatch(repo, drevs, ui.write)
948
950
949 @vcrcommand(b'phabupdate',
951 @vcrcommand(b'phabupdate',
950 [(b'', b'accept', False, _(b'accept revisions')),
952 [(b'', b'accept', False, _(b'accept revisions')),
951 (b'', b'reject', False, _(b'reject revisions')),
953 (b'', b'reject', False, _(b'reject revisions')),
952 (b'', b'abandon', False, _(b'abandon revisions')),
954 (b'', b'abandon', False, _(b'abandon revisions')),
953 (b'', b'reclaim', False, _(b'reclaim revisions')),
955 (b'', b'reclaim', False, _(b'reclaim revisions')),
954 (b'm', b'comment', b'', _(b'comment on the last revision')),
956 (b'm', b'comment', b'', _(b'comment on the last revision')),
955 ], _(b'DREVSPEC [OPTIONS]'),
957 ], _(b'DREVSPEC [OPTIONS]'),
956 helpcategory=command.CATEGORY_IMPORT_EXPORT)
958 helpcategory=command.CATEGORY_IMPORT_EXPORT)
957 def phabupdate(ui, repo, spec, **opts):
959 def phabupdate(ui, repo, spec, **opts):
958 """update Differential Revision in batch
960 """update Differential Revision in batch
959
961
960 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
962 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
961 """
963 """
962 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
964 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
963 if len(flags) > 1:
965 if len(flags) > 1:
964 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
966 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
965
967
966 actions = []
968 actions = []
967 for f in flags:
969 for f in flags:
968 actions.append({b'type': f, b'value': b'true'})
970 actions.append({b'type': f, b'value': b'true'})
969
971
970 drevs = querydrev(repo, spec)
972 drevs = querydrev(repo, spec)
971 for i, drev in enumerate(drevs):
973 for i, drev in enumerate(drevs):
972 if i + 1 == len(drevs) and opts.get(b'comment'):
974 if i + 1 == len(drevs) and opts.get(b'comment'):
973 actions.append({b'type': b'comment', b'value': opts[b'comment']})
975 actions.append({b'type': b'comment', b'value': opts[b'comment']})
974 if actions:
976 if actions:
975 params = {b'objectIdentifier': drev[r'phid'],
977 params = {b'objectIdentifier': drev[r'phid'],
976 b'transactions': actions}
978 b'transactions': actions}
977 callconduit(repo, b'differential.revision.edit', params)
979 callconduit(repo, b'differential.revision.edit', params)
978
980
979 templatekeyword = registrar.templatekeyword()
981 templatekeyword = registrar.templatekeyword()
980
982
981 @templatekeyword(b'phabreview', requires={b'ctx'})
983 @templatekeyword(b'phabreview', requires={b'ctx'})
982 def template_review(context, mapping):
984 def template_review(context, mapping):
983 """:phabreview: Object describing the review for this changeset.
985 """:phabreview: Object describing the review for this changeset.
984 Has attributes `url` and `id`.
986 Has attributes `url` and `id`.
985 """
987 """
986 ctx = context.resource(mapping, b'ctx')
988 ctx = context.resource(mapping, b'ctx')
987 m = _differentialrevisiondescre.search(ctx.description())
989 m = _differentialrevisiondescre.search(ctx.description())
988 if m:
990 if m:
989 return templateutil.hybriddict({
991 return templateutil.hybriddict({
990 b'url': m.group(b'url'),
992 b'url': m.group(b'url'),
991 b'id': b"D{}".format(m.group(b'id')),
993 b'id': b"D{}".format(m.group(b'id')),
992 })
994 })
General Comments 0
You need to be logged in to leave comments. Login now