##// END OF EJS Templates
py3: use fsencode for vcr recording paths and strings for custom_patches args...
Ian Moody -
r42064:2bad8f92 default
parent child Browse files
Show More
@@ -1,1017 +1,1018
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templateutil,
68 templateutil,
69 url as urlmod,
69 url as urlmod,
70 util,
70 util,
71 )
71 )
72 from mercurial.utils import (
72 from mercurial.utils import (
73 procutil,
73 procutil,
74 stringutil,
74 stringutil,
75 )
75 )
76
76
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # be specifying the version(s) of Mercurial they are tested with, or
79 # be specifying the version(s) of Mercurial they are tested with, or
80 # leave the attribute unspecified.
80 # leave the attribute unspecified.
81 testedwith = 'ships-with-hg-core'
81 testedwith = 'ships-with-hg-core'
82
82
83 cmdtable = {}
83 cmdtable = {}
84 command = registrar.command(cmdtable)
84 command = registrar.command(cmdtable)
85
85
86 configtable = {}
86 configtable = {}
87 configitem = registrar.configitem(configtable)
87 configitem = registrar.configitem(configtable)
88
88
89 # developer config: phabricator.batchsize
89 # developer config: phabricator.batchsize
90 configitem(b'phabricator', b'batchsize',
90 configitem(b'phabricator', b'batchsize',
91 default=12,
91 default=12,
92 )
92 )
93 configitem(b'phabricator', b'callsign',
93 configitem(b'phabricator', b'callsign',
94 default=None,
94 default=None,
95 )
95 )
96 configitem(b'phabricator', b'curlcmd',
96 configitem(b'phabricator', b'curlcmd',
97 default=None,
97 default=None,
98 )
98 )
99 # developer config: phabricator.repophid
99 # developer config: phabricator.repophid
100 configitem(b'phabricator', b'repophid',
100 configitem(b'phabricator', b'repophid',
101 default=None,
101 default=None,
102 )
102 )
103 configitem(b'phabricator', b'url',
103 configitem(b'phabricator', b'url',
104 default=None,
104 default=None,
105 )
105 )
106 configitem(b'phabsend', b'confirm',
106 configitem(b'phabsend', b'confirm',
107 default=False,
107 default=False,
108 )
108 )
109
109
110 colortable = {
110 colortable = {
111 b'phabricator.action.created': b'green',
111 b'phabricator.action.created': b'green',
112 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.updated': b'magenta',
113 b'phabricator.action.updated': b'magenta',
114 b'phabricator.desc': b'',
114 b'phabricator.desc': b'',
115 b'phabricator.drev': b'bold',
115 b'phabricator.drev': b'bold',
116 b'phabricator.node': b'',
116 b'phabricator.node': b'',
117 }
117 }
118
118
119 _VCR_FLAGS = [
119 _VCR_FLAGS = [
120 (b'', b'test-vcr', b'',
120 (b'', b'test-vcr', b'',
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 b', otherwise will mock all http requests using the specified vcr file.'
122 b', otherwise will mock all http requests using the specified vcr file.'
123 b' (ADVANCED)'
123 b' (ADVANCED)'
124 )),
124 )),
125 ]
125 ]
126
126
127 def vcrcommand(name, flags, spec, helpcategory=None):
127 def vcrcommand(name, flags, spec, helpcategory=None):
128 fullflags = flags + _VCR_FLAGS
128 fullflags = flags + _VCR_FLAGS
129 def decorate(fn):
129 def decorate(fn):
130 def inner(*args, **kwargs):
130 def inner(*args, **kwargs):
131 cassette = kwargs.pop(r'test_vcr', None)
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 if cassette:
132 if cassette:
133 import hgdemandimport
133 import hgdemandimport
134 with hgdemandimport.deactivated():
134 with hgdemandimport.deactivated():
135 import vcr as vcrmod
135 import vcr as vcrmod
136 import vcr.stubs as stubs
136 import vcr.stubs as stubs
137 vcr = vcrmod.VCR(
137 vcr = vcrmod.VCR(
138 serializer=r'json',
138 serializer=r'json',
139 custom_patches=[
139 custom_patches=[
140 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
140 (urlmod, r'httpconnection',
141 (urlmod, 'httpsconnection',
141 stubs.VCRHTTPConnection),
142 (urlmod, r'httpsconnection',
142 stubs.VCRHTTPSConnection),
143 stubs.VCRHTTPSConnection),
143 ])
144 ])
144 with vcr.use_cassette(cassette):
145 with vcr.use_cassette(cassette):
145 return fn(*args, **kwargs)
146 return fn(*args, **kwargs)
146 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
147 inner.__name__ = fn.__name__
148 inner.__name__ = fn.__name__
148 inner.__doc__ = fn.__doc__
149 inner.__doc__ = fn.__doc__
149 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
150 return decorate
151 return decorate
151
152
152 def urlencodenested(params):
153 def urlencodenested(params):
153 """like urlencode, but works with nested parameters.
154 """like urlencode, but works with nested parameters.
154
155
155 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
156 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
157 urlencode. Note: the encoding is consistent with PHP's http_build_query.
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
158 """
159 """
159 flatparams = util.sortdict()
160 flatparams = util.sortdict()
160 def process(prefix, obj):
161 def process(prefix, obj):
161 if isinstance(obj, bool):
162 if isinstance(obj, bool):
162 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
163 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
164 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
164 if items is None:
165 if items is None:
165 flatparams[prefix] = obj
166 flatparams[prefix] = obj
166 else:
167 else:
167 for k, v in items(obj):
168 for k, v in items(obj):
168 if prefix:
169 if prefix:
169 process(b'%s[%s]' % (prefix, k), v)
170 process(b'%s[%s]' % (prefix, k), v)
170 else:
171 else:
171 process(k, v)
172 process(k, v)
172 process(b'', params)
173 process(b'', params)
173 return util.urlreq.urlencode(flatparams)
174 return util.urlreq.urlencode(flatparams)
174
175
175 def readurltoken(repo):
176 def readurltoken(repo):
176 """return conduit url, token and make sure they exist
177 """return conduit url, token and make sure they exist
177
178
178 Currently read from [auth] config section. In the future, it might
179 Currently read from [auth] config section. In the future, it might
179 make sense to read from .arcconfig and .arcrc as well.
180 make sense to read from .arcconfig and .arcrc as well.
180 """
181 """
181 url = repo.ui.config(b'phabricator', b'url')
182 url = repo.ui.config(b'phabricator', b'url')
182 if not url:
183 if not url:
183 raise error.Abort(_(b'config %s.%s is required')
184 raise error.Abort(_(b'config %s.%s is required')
184 % (b'phabricator', b'url'))
185 % (b'phabricator', b'url'))
185
186
186 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
187 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
187 token = None
188 token = None
188
189
189 if res:
190 if res:
190 group, auth = res
191 group, auth = res
191
192
192 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
193 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
193
194
194 token = auth.get(b'phabtoken')
195 token = auth.get(b'phabtoken')
195
196
196 if not token:
197 if not token:
197 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
198 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
198 % (url,))
199 % (url,))
199
200
200 return url, token
201 return url, token
201
202
202 def callconduit(repo, name, params):
203 def callconduit(repo, name, params):
203 """call Conduit API, params is a dict. return json.loads result, or None"""
204 """call Conduit API, params is a dict. return json.loads result, or None"""
204 host, token = readurltoken(repo)
205 host, token = readurltoken(repo)
205 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
206 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
206 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
207 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
207 params = params.copy()
208 params = params.copy()
208 params[b'api.token'] = token
209 params[b'api.token'] = token
209 data = urlencodenested(params)
210 data = urlencodenested(params)
210 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
211 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
211 if curlcmd:
212 if curlcmd:
212 sin, sout = procutil.popen2(b'%s -d @- %s'
213 sin, sout = procutil.popen2(b'%s -d @- %s'
213 % (curlcmd, procutil.shellquote(url)))
214 % (curlcmd, procutil.shellquote(url)))
214 sin.write(data)
215 sin.write(data)
215 sin.close()
216 sin.close()
216 body = sout.read()
217 body = sout.read()
217 else:
218 else:
218 urlopener = urlmod.opener(repo.ui, authinfo)
219 urlopener = urlmod.opener(repo.ui, authinfo)
219 request = util.urlreq.request(url, data=data)
220 request = util.urlreq.request(url, data=data)
220 with contextlib.closing(urlopener.open(request)) as rsp:
221 with contextlib.closing(urlopener.open(request)) as rsp:
221 body = rsp.read()
222 body = rsp.read()
222 repo.ui.debug(b'Conduit Response: %s\n' % body)
223 repo.ui.debug(b'Conduit Response: %s\n' % body)
223 parsed = pycompat.rapply(
224 parsed = pycompat.rapply(
224 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
225 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
225 else x,
226 else x,
226 json.loads(body)
227 json.loads(body)
227 )
228 )
228 if parsed.get(b'error_code'):
229 if parsed.get(b'error_code'):
229 msg = (_(b'Conduit Error (%s): %s')
230 msg = (_(b'Conduit Error (%s): %s')
230 % (parsed[b'error_code'], parsed[b'error_info']))
231 % (parsed[b'error_code'], parsed[b'error_info']))
231 raise error.Abort(msg)
232 raise error.Abort(msg)
232 return parsed[b'result']
233 return parsed[b'result']
233
234
234 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
235 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
235 def debugcallconduit(ui, repo, name):
236 def debugcallconduit(ui, repo, name):
236 """call Conduit API
237 """call Conduit API
237
238
238 Call parameters are read from stdin as a JSON blob. Result will be written
239 Call parameters are read from stdin as a JSON blob. Result will be written
239 to stdout as a JSON blob.
240 to stdout as a JSON blob.
240 """
241 """
241 params = json.loads(ui.fin.read())
242 params = json.loads(ui.fin.read())
242 result = callconduit(repo, name, params)
243 result = callconduit(repo, name, params)
243 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
244 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
244 ui.write(b'%s\n' % s)
245 ui.write(b'%s\n' % s)
245
246
246 def getrepophid(repo):
247 def getrepophid(repo):
247 """given callsign, return repository PHID or None"""
248 """given callsign, return repository PHID or None"""
248 # developer config: phabricator.repophid
249 # developer config: phabricator.repophid
249 repophid = repo.ui.config(b'phabricator', b'repophid')
250 repophid = repo.ui.config(b'phabricator', b'repophid')
250 if repophid:
251 if repophid:
251 return repophid
252 return repophid
252 callsign = repo.ui.config(b'phabricator', b'callsign')
253 callsign = repo.ui.config(b'phabricator', b'callsign')
253 if not callsign:
254 if not callsign:
254 return None
255 return None
255 query = callconduit(repo, b'diffusion.repository.search',
256 query = callconduit(repo, b'diffusion.repository.search',
256 {b'constraints': {b'callsigns': [callsign]}})
257 {b'constraints': {b'callsigns': [callsign]}})
257 if len(query[b'data']) == 0:
258 if len(query[b'data']) == 0:
258 return None
259 return None
259 repophid = query[b'data'][0][b'phid']
260 repophid = query[b'data'][0][b'phid']
260 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
261 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
261 return repophid
262 return repophid
262
263
263 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
264 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
264 _differentialrevisiondescre = re.compile(
265 _differentialrevisiondescre = re.compile(
265 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
266 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
266
267
267 def getoldnodedrevmap(repo, nodelist):
268 def getoldnodedrevmap(repo, nodelist):
268 """find previous nodes that has been sent to Phabricator
269 """find previous nodes that has been sent to Phabricator
269
270
270 return {node: (oldnode, Differential diff, Differential Revision ID)}
271 return {node: (oldnode, Differential diff, Differential Revision ID)}
271 for node in nodelist with known previous sent versions, or associated
272 for node in nodelist with known previous sent versions, or associated
272 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
273 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
273 be ``None``.
274 be ``None``.
274
275
275 Examines commit messages like "Differential Revision:" to get the
276 Examines commit messages like "Differential Revision:" to get the
276 association information.
277 association information.
277
278
278 If such commit message line is not found, examines all precursors and their
279 If such commit message line is not found, examines all precursors and their
279 tags. Tags with format like "D1234" are considered a match and the node
280 tags. Tags with format like "D1234" are considered a match and the node
280 with that tag, and the number after "D" (ex. 1234) will be returned.
281 with that tag, and the number after "D" (ex. 1234) will be returned.
281
282
282 The ``old node``, if not None, is guaranteed to be the last diff of
283 The ``old node``, if not None, is guaranteed to be the last diff of
283 corresponding Differential Revision, and exist in the repo.
284 corresponding Differential Revision, and exist in the repo.
284 """
285 """
285 unfi = repo.unfiltered()
286 unfi = repo.unfiltered()
286 nodemap = unfi.changelog.nodemap
287 nodemap = unfi.changelog.nodemap
287
288
288 result = {} # {node: (oldnode?, lastdiff?, drev)}
289 result = {} # {node: (oldnode?, lastdiff?, drev)}
289 toconfirm = {} # {node: (force, {precnode}, drev)}
290 toconfirm = {} # {node: (force, {precnode}, drev)}
290 for node in nodelist:
291 for node in nodelist:
291 ctx = unfi[node]
292 ctx = unfi[node]
292 # For tags like "D123", put them into "toconfirm" to verify later
293 # For tags like "D123", put them into "toconfirm" to verify later
293 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
294 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
294 for n in precnodes:
295 for n in precnodes:
295 if n in nodemap:
296 if n in nodemap:
296 for tag in unfi.nodetags(n):
297 for tag in unfi.nodetags(n):
297 m = _differentialrevisiontagre.match(tag)
298 m = _differentialrevisiontagre.match(tag)
298 if m:
299 if m:
299 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
300 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
300 continue
301 continue
301
302
302 # Check commit message
303 # Check commit message
303 m = _differentialrevisiondescre.search(ctx.description())
304 m = _differentialrevisiondescre.search(ctx.description())
304 if m:
305 if m:
305 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
306 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
306
307
307 # Double check if tags are genuine by collecting all old nodes from
308 # Double check if tags are genuine by collecting all old nodes from
308 # Phabricator, and expect precursors overlap with it.
309 # Phabricator, and expect precursors overlap with it.
309 if toconfirm:
310 if toconfirm:
310 drevs = [drev for force, precs, drev in toconfirm.values()]
311 drevs = [drev for force, precs, drev in toconfirm.values()]
311 alldiffs = callconduit(unfi, b'differential.querydiffs',
312 alldiffs = callconduit(unfi, b'differential.querydiffs',
312 {b'revisionIDs': drevs})
313 {b'revisionIDs': drevs})
313 getnode = lambda d: bin(
314 getnode = lambda d: bin(
314 getdiffmeta(d).get(b'node', b'')) or None
315 getdiffmeta(d).get(b'node', b'')) or None
315 for newnode, (force, precset, drev) in toconfirm.items():
316 for newnode, (force, precset, drev) in toconfirm.items():
316 diffs = [d for d in alldiffs.values()
317 diffs = [d for d in alldiffs.values()
317 if int(d[b'revisionID']) == drev]
318 if int(d[b'revisionID']) == drev]
318
319
319 # "precursors" as known by Phabricator
320 # "precursors" as known by Phabricator
320 phprecset = set(getnode(d) for d in diffs)
321 phprecset = set(getnode(d) for d in diffs)
321
322
322 # Ignore if precursors (Phabricator and local repo) do not overlap,
323 # Ignore if precursors (Phabricator and local repo) do not overlap,
323 # and force is not set (when commit message says nothing)
324 # and force is not set (when commit message says nothing)
324 if not force and not bool(phprecset & precset):
325 if not force and not bool(phprecset & precset):
325 tagname = b'D%d' % drev
326 tagname = b'D%d' % drev
326 tags.tag(repo, tagname, nullid, message=None, user=None,
327 tags.tag(repo, tagname, nullid, message=None, user=None,
327 date=None, local=True)
328 date=None, local=True)
328 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
329 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
329 b'Differential history\n') % drev)
330 b'Differential history\n') % drev)
330 continue
331 continue
331
332
332 # Find the last node using Phabricator metadata, and make sure it
333 # Find the last node using Phabricator metadata, and make sure it
333 # exists in the repo
334 # exists in the repo
334 oldnode = lastdiff = None
335 oldnode = lastdiff = None
335 if diffs:
336 if diffs:
336 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
337 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
337 oldnode = getnode(lastdiff)
338 oldnode = getnode(lastdiff)
338 if oldnode and oldnode not in nodemap:
339 if oldnode and oldnode not in nodemap:
339 oldnode = None
340 oldnode = None
340
341
341 result[newnode] = (oldnode, lastdiff, drev)
342 result[newnode] = (oldnode, lastdiff, drev)
342
343
343 return result
344 return result
344
345
345 def getdiff(ctx, diffopts):
346 def getdiff(ctx, diffopts):
346 """plain-text diff without header (user, commit message, etc)"""
347 """plain-text diff without header (user, commit message, etc)"""
347 output = util.stringio()
348 output = util.stringio()
348 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
349 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
349 None, opts=diffopts):
350 None, opts=diffopts):
350 output.write(chunk)
351 output.write(chunk)
351 return output.getvalue()
352 return output.getvalue()
352
353
353 def creatediff(ctx):
354 def creatediff(ctx):
354 """create a Differential Diff"""
355 """create a Differential Diff"""
355 repo = ctx.repo()
356 repo = ctx.repo()
356 repophid = getrepophid(repo)
357 repophid = getrepophid(repo)
357 # Create a "Differential Diff" via "differential.createrawdiff" API
358 # Create a "Differential Diff" via "differential.createrawdiff" API
358 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
359 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
359 if repophid:
360 if repophid:
360 params[b'repositoryPHID'] = repophid
361 params[b'repositoryPHID'] = repophid
361 diff = callconduit(repo, b'differential.createrawdiff', params)
362 diff = callconduit(repo, b'differential.createrawdiff', params)
362 if not diff:
363 if not diff:
363 raise error.Abort(_(b'cannot create diff for %s') % ctx)
364 raise error.Abort(_(b'cannot create diff for %s') % ctx)
364 return diff
365 return diff
365
366
366 def writediffproperties(ctx, diff):
367 def writediffproperties(ctx, diff):
367 """write metadata to diff so patches could be applied losslessly"""
368 """write metadata to diff so patches could be applied losslessly"""
368 params = {
369 params = {
369 b'diff_id': diff[b'id'],
370 b'diff_id': diff[b'id'],
370 b'name': b'hg:meta',
371 b'name': b'hg:meta',
371 b'data': json.dumps({
372 b'data': json.dumps({
372 b'user': ctx.user(),
373 b'user': ctx.user(),
373 b'date': b'%d %d' % ctx.date(),
374 b'date': b'%d %d' % ctx.date(),
374 b'node': ctx.hex(),
375 b'node': ctx.hex(),
375 b'parent': ctx.p1().hex(),
376 b'parent': ctx.p1().hex(),
376 }),
377 }),
377 }
378 }
378 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
379 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
379
380
380 params = {
381 params = {
381 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
382 b'name': b'local:commits',
383 b'name': b'local:commits',
383 b'data': json.dumps({
384 b'data': json.dumps({
384 ctx.hex(): {
385 ctx.hex(): {
385 b'author': stringutil.person(ctx.user()),
386 b'author': stringutil.person(ctx.user()),
386 b'authorEmail': stringutil.email(ctx.user()),
387 b'authorEmail': stringutil.email(ctx.user()),
387 b'time': ctx.date()[0],
388 b'time': ctx.date()[0],
388 },
389 },
389 }),
390 }),
390 }
391 }
391 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392
393
393 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
394 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
394 olddiff=None, actions=None):
395 olddiff=None, actions=None):
395 """create or update a Differential Revision
396 """create or update a Differential Revision
396
397
397 If revid is None, create a new Differential Revision, otherwise update
398 If revid is None, create a new Differential Revision, otherwise update
398 revid. If parentrevid is not None, set it as a dependency.
399 revid. If parentrevid is not None, set it as a dependency.
399
400
400 If oldnode is not None, check if the patch content (without commit message
401 If oldnode is not None, check if the patch content (without commit message
401 and metadata) has changed before creating another diff.
402 and metadata) has changed before creating another diff.
402
403
403 If actions is not None, they will be appended to the transaction.
404 If actions is not None, they will be appended to the transaction.
404 """
405 """
405 repo = ctx.repo()
406 repo = ctx.repo()
406 if oldnode:
407 if oldnode:
407 diffopts = mdiff.diffopts(git=True, context=32767)
408 diffopts = mdiff.diffopts(git=True, context=32767)
408 oldctx = repo.unfiltered()[oldnode]
409 oldctx = repo.unfiltered()[oldnode]
409 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
410 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
410 else:
411 else:
411 neednewdiff = True
412 neednewdiff = True
412
413
413 transactions = []
414 transactions = []
414 if neednewdiff:
415 if neednewdiff:
415 diff = creatediff(ctx)
416 diff = creatediff(ctx)
416 transactions.append({b'type': b'update', b'value': diff[b'phid']})
417 transactions.append({b'type': b'update', b'value': diff[b'phid']})
417 else:
418 else:
418 # Even if we don't need to upload a new diff because the patch content
419 # Even if we don't need to upload a new diff because the patch content
419 # does not change. We might still need to update its metadata so
420 # does not change. We might still need to update its metadata so
420 # pushers could know the correct node metadata.
421 # pushers could know the correct node metadata.
421 assert olddiff
422 assert olddiff
422 diff = olddiff
423 diff = olddiff
423 writediffproperties(ctx, diff)
424 writediffproperties(ctx, diff)
424
425
425 # Use a temporary summary to set dependency. There might be better ways but
426 # Use a temporary summary to set dependency. There might be better ways but
426 # I cannot find them for now. But do not do that if we are updating an
427 # I cannot find them for now. But do not do that if we are updating an
427 # existing revision (revid is not None) since that introduces visible
428 # existing revision (revid is not None) since that introduces visible
428 # churns (someone edited "Summary" twice) on the web page.
429 # churns (someone edited "Summary" twice) on the web page.
429 if parentrevid and revid is None:
430 if parentrevid and revid is None:
430 summary = b'Depends on D%s' % parentrevid
431 summary = b'Depends on D%s' % parentrevid
431 transactions += [{b'type': b'summary', b'value': summary},
432 transactions += [{b'type': b'summary', b'value': summary},
432 {b'type': b'summary', b'value': b' '}]
433 {b'type': b'summary', b'value': b' '}]
433
434
434 if actions:
435 if actions:
435 transactions += actions
436 transactions += actions
436
437
437 # Parse commit message and update related fields.
438 # Parse commit message and update related fields.
438 desc = ctx.description()
439 desc = ctx.description()
439 info = callconduit(repo, b'differential.parsecommitmessage',
440 info = callconduit(repo, b'differential.parsecommitmessage',
440 {b'corpus': desc})
441 {b'corpus': desc})
441 for k, v in info[b'fields'].items():
442 for k, v in info[b'fields'].items():
442 if k in [b'title', b'summary', b'testPlan']:
443 if k in [b'title', b'summary', b'testPlan']:
443 transactions.append({b'type': k, b'value': v})
444 transactions.append({b'type': k, b'value': v})
444
445
445 params = {b'transactions': transactions}
446 params = {b'transactions': transactions}
446 if revid is not None:
447 if revid is not None:
447 # Update an existing Differential Revision
448 # Update an existing Differential Revision
448 params[b'objectIdentifier'] = revid
449 params[b'objectIdentifier'] = revid
449
450
450 revision = callconduit(repo, b'differential.revision.edit', params)
451 revision = callconduit(repo, b'differential.revision.edit', params)
451 if not revision:
452 if not revision:
452 raise error.Abort(_(b'cannot create revision for %s') % ctx)
453 raise error.Abort(_(b'cannot create revision for %s') % ctx)
453
454
454 return revision, diff
455 return revision, diff
455
456
456 def userphids(repo, names):
457 def userphids(repo, names):
457 """convert user names to PHIDs"""
458 """convert user names to PHIDs"""
458 names = [name.lower() for name in names]
459 names = [name.lower() for name in names]
459 query = {b'constraints': {b'usernames': names}}
460 query = {b'constraints': {b'usernames': names}}
460 result = callconduit(repo, b'user.search', query)
461 result = callconduit(repo, b'user.search', query)
461 # username not found is not an error of the API. So check if we have missed
462 # username not found is not an error of the API. So check if we have missed
462 # some names here.
463 # some names here.
463 data = result[b'data']
464 data = result[b'data']
464 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
465 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
465 unresolved = set(names) - resolved
466 unresolved = set(names) - resolved
466 if unresolved:
467 if unresolved:
467 raise error.Abort(_(b'unknown username: %s')
468 raise error.Abort(_(b'unknown username: %s')
468 % b' '.join(sorted(unresolved)))
469 % b' '.join(sorted(unresolved)))
469 return [entry[b'phid'] for entry in data]
470 return [entry[b'phid'] for entry in data]
470
471
471 @vcrcommand(b'phabsend',
472 @vcrcommand(b'phabsend',
472 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
473 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
473 (b'', b'amend', True, _(b'update commit messages')),
474 (b'', b'amend', True, _(b'update commit messages')),
474 (b'', b'reviewer', [], _(b'specify reviewers')),
475 (b'', b'reviewer', [], _(b'specify reviewers')),
475 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
476 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
476 _(b'REV [OPTIONS]'),
477 _(b'REV [OPTIONS]'),
477 helpcategory=command.CATEGORY_IMPORT_EXPORT)
478 helpcategory=command.CATEGORY_IMPORT_EXPORT)
478 def phabsend(ui, repo, *revs, **opts):
479 def phabsend(ui, repo, *revs, **opts):
479 """upload changesets to Phabricator
480 """upload changesets to Phabricator
480
481
481 If there are multiple revisions specified, they will be send as a stack
482 If there are multiple revisions specified, they will be send as a stack
482 with a linear dependencies relationship using the order specified by the
483 with a linear dependencies relationship using the order specified by the
483 revset.
484 revset.
484
485
485 For the first time uploading changesets, local tags will be created to
486 For the first time uploading changesets, local tags will be created to
486 maintain the association. After the first time, phabsend will check
487 maintain the association. After the first time, phabsend will check
487 obsstore and tags information so it can figure out whether to update an
488 obsstore and tags information so it can figure out whether to update an
488 existing Differential Revision, or create a new one.
489 existing Differential Revision, or create a new one.
489
490
490 If --amend is set, update commit messages so they have the
491 If --amend is set, update commit messages so they have the
491 ``Differential Revision`` URL, remove related tags. This is similar to what
492 ``Differential Revision`` URL, remove related tags. This is similar to what
492 arcanist will do, and is more desired in author-push workflows. Otherwise,
493 arcanist will do, and is more desired in author-push workflows. Otherwise,
493 use local tags to record the ``Differential Revision`` association.
494 use local tags to record the ``Differential Revision`` association.
494
495
495 The --confirm option lets you confirm changesets before sending them. You
496 The --confirm option lets you confirm changesets before sending them. You
496 can also add following to your configuration file to make it default
497 can also add following to your configuration file to make it default
497 behaviour::
498 behaviour::
498
499
499 [phabsend]
500 [phabsend]
500 confirm = true
501 confirm = true
501
502
502 phabsend will check obsstore and the above association to decide whether to
503 phabsend will check obsstore and the above association to decide whether to
503 update an existing Differential Revision, or create a new one.
504 update an existing Differential Revision, or create a new one.
504 """
505 """
505 revs = list(revs) + opts.get(b'rev', [])
506 revs = list(revs) + opts.get(b'rev', [])
506 revs = scmutil.revrange(repo, revs)
507 revs = scmutil.revrange(repo, revs)
507
508
508 if not revs:
509 if not revs:
509 raise error.Abort(_(b'phabsend requires at least one changeset'))
510 raise error.Abort(_(b'phabsend requires at least one changeset'))
510 if opts.get(b'amend'):
511 if opts.get(b'amend'):
511 cmdutil.checkunfinished(repo)
512 cmdutil.checkunfinished(repo)
512
513
513 # {newnode: (oldnode, olddiff, olddrev}
514 # {newnode: (oldnode, olddiff, olddrev}
514 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
515 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
515
516
516 confirm = ui.configbool(b'phabsend', b'confirm')
517 confirm = ui.configbool(b'phabsend', b'confirm')
517 confirm |= bool(opts.get(b'confirm'))
518 confirm |= bool(opts.get(b'confirm'))
518 if confirm:
519 if confirm:
519 confirmed = _confirmbeforesend(repo, revs, oldmap)
520 confirmed = _confirmbeforesend(repo, revs, oldmap)
520 if not confirmed:
521 if not confirmed:
521 raise error.Abort(_(b'phabsend cancelled'))
522 raise error.Abort(_(b'phabsend cancelled'))
522
523
523 actions = []
524 actions = []
524 reviewers = opts.get(b'reviewer', [])
525 reviewers = opts.get(b'reviewer', [])
525 if reviewers:
526 if reviewers:
526 phids = userphids(repo, reviewers)
527 phids = userphids(repo, reviewers)
527 actions.append({b'type': b'reviewers.add', b'value': phids})
528 actions.append({b'type': b'reviewers.add', b'value': phids})
528
529
529 drevids = [] # [int]
530 drevids = [] # [int]
530 diffmap = {} # {newnode: diff}
531 diffmap = {} # {newnode: diff}
531
532
532 # Send patches one by one so we know their Differential Revision IDs and
533 # Send patches one by one so we know their Differential Revision IDs and
533 # can provide dependency relationship
534 # can provide dependency relationship
534 lastrevid = None
535 lastrevid = None
535 for rev in revs:
536 for rev in revs:
536 ui.debug(b'sending rev %d\n' % rev)
537 ui.debug(b'sending rev %d\n' % rev)
537 ctx = repo[rev]
538 ctx = repo[rev]
538
539
539 # Get Differential Revision ID
540 # Get Differential Revision ID
540 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
541 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
541 if oldnode != ctx.node() or opts.get(b'amend'):
542 if oldnode != ctx.node() or opts.get(b'amend'):
542 # Create or update Differential Revision
543 # Create or update Differential Revision
543 revision, diff = createdifferentialrevision(
544 revision, diff = createdifferentialrevision(
544 ctx, revid, lastrevid, oldnode, olddiff, actions)
545 ctx, revid, lastrevid, oldnode, olddiff, actions)
545 diffmap[ctx.node()] = diff
546 diffmap[ctx.node()] = diff
546 newrevid = int(revision[b'object'][b'id'])
547 newrevid = int(revision[b'object'][b'id'])
547 if revid:
548 if revid:
548 action = b'updated'
549 action = b'updated'
549 else:
550 else:
550 action = b'created'
551 action = b'created'
551
552
552 # Create a local tag to note the association, if commit message
553 # Create a local tag to note the association, if commit message
553 # does not have it already
554 # does not have it already
554 m = _differentialrevisiondescre.search(ctx.description())
555 m = _differentialrevisiondescre.search(ctx.description())
555 if not m or int(m.group(b'id')) != newrevid:
556 if not m or int(m.group(b'id')) != newrevid:
556 tagname = b'D%d' % newrevid
557 tagname = b'D%d' % newrevid
557 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
558 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
558 date=None, local=True)
559 date=None, local=True)
559 else:
560 else:
560 # Nothing changed. But still set "newrevid" so the next revision
561 # Nothing changed. But still set "newrevid" so the next revision
561 # could depend on this one.
562 # could depend on this one.
562 newrevid = revid
563 newrevid = revid
563 action = b'skipped'
564 action = b'skipped'
564
565
565 actiondesc = ui.label(
566 actiondesc = ui.label(
566 {b'created': _(b'created'),
567 {b'created': _(b'created'),
567 b'skipped': _(b'skipped'),
568 b'skipped': _(b'skipped'),
568 b'updated': _(b'updated')}[action],
569 b'updated': _(b'updated')}[action],
569 b'phabricator.action.%s' % action)
570 b'phabricator.action.%s' % action)
570 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
571 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
571 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
572 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
572 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
573 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
573 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
574 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
574 desc))
575 desc))
575 drevids.append(newrevid)
576 drevids.append(newrevid)
576 lastrevid = newrevid
577 lastrevid = newrevid
577
578
578 # Update commit messages and remove tags
579 # Update commit messages and remove tags
579 if opts.get(b'amend'):
580 if opts.get(b'amend'):
580 unfi = repo.unfiltered()
581 unfi = repo.unfiltered()
581 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
582 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
582 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
583 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
583 wnode = unfi[b'.'].node()
584 wnode = unfi[b'.'].node()
584 mapping = {} # {oldnode: [newnode]}
585 mapping = {} # {oldnode: [newnode]}
585 for i, rev in enumerate(revs):
586 for i, rev in enumerate(revs):
586 old = unfi[rev]
587 old = unfi[rev]
587 drevid = drevids[i]
588 drevid = drevids[i]
588 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
589 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
589 newdesc = getdescfromdrev(drev)
590 newdesc = getdescfromdrev(drev)
590 # Make sure commit message contain "Differential Revision"
591 # Make sure commit message contain "Differential Revision"
591 if old.description() != newdesc:
592 if old.description() != newdesc:
592 if old.phase() == phases.public:
593 if old.phase() == phases.public:
593 ui.warn(_("warning: not updating public commit %s\n")
594 ui.warn(_("warning: not updating public commit %s\n")
594 % scmutil.formatchangeid(old))
595 % scmutil.formatchangeid(old))
595 continue
596 continue
596 parents = [
597 parents = [
597 mapping.get(old.p1().node(), (old.p1(),))[0],
598 mapping.get(old.p1().node(), (old.p1(),))[0],
598 mapping.get(old.p2().node(), (old.p2(),))[0],
599 mapping.get(old.p2().node(), (old.p2(),))[0],
599 ]
600 ]
600 new = context.metadataonlyctx(
601 new = context.metadataonlyctx(
601 repo, old, parents=parents, text=newdesc,
602 repo, old, parents=parents, text=newdesc,
602 user=old.user(), date=old.date(), extra=old.extra())
603 user=old.user(), date=old.date(), extra=old.extra())
603
604
604 newnode = new.commit()
605 newnode = new.commit()
605
606
606 mapping[old.node()] = [newnode]
607 mapping[old.node()] = [newnode]
607 # Update diff property
608 # Update diff property
608 writediffproperties(unfi[newnode], diffmap[old.node()])
609 writediffproperties(unfi[newnode], diffmap[old.node()])
609 # Remove local tags since it's no longer necessary
610 # Remove local tags since it's no longer necessary
610 tagname = b'D%d' % drevid
611 tagname = b'D%d' % drevid
611 if tagname in repo.tags():
612 if tagname in repo.tags():
612 tags.tag(repo, tagname, nullid, message=None, user=None,
613 tags.tag(repo, tagname, nullid, message=None, user=None,
613 date=None, local=True)
614 date=None, local=True)
614 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
615 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
615 if wnode in mapping:
616 if wnode in mapping:
616 unfi.setparents(mapping[wnode][0])
617 unfi.setparents(mapping[wnode][0])
617
618
618 # Map from "hg:meta" keys to header understood by "hg import". The order is
619 # Map from "hg:meta" keys to header understood by "hg import". The order is
619 # consistent with "hg export" output.
620 # consistent with "hg export" output.
620 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
621 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
621 (b'node', b'Node ID'), (b'parent', b'Parent ')])
622 (b'node', b'Node ID'), (b'parent', b'Parent ')])
622
623
623 def _confirmbeforesend(repo, revs, oldmap):
624 def _confirmbeforesend(repo, revs, oldmap):
624 url, token = readurltoken(repo)
625 url, token = readurltoken(repo)
625 ui = repo.ui
626 ui = repo.ui
626 for rev in revs:
627 for rev in revs:
627 ctx = repo[rev]
628 ctx = repo[rev]
628 desc = ctx.description().splitlines()[0]
629 desc = ctx.description().splitlines()[0]
629 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
630 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
630 if drevid:
631 if drevid:
631 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
632 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
632 else:
633 else:
633 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
634 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
634
635
635 ui.write(_(b'%s - %s: %s\n')
636 ui.write(_(b'%s - %s: %s\n')
636 % (drevdesc,
637 % (drevdesc,
637 ui.label(bytes(ctx), b'phabricator.node'),
638 ui.label(bytes(ctx), b'phabricator.node'),
638 ui.label(desc, b'phabricator.desc')))
639 ui.label(desc, b'phabricator.desc')))
639
640
640 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
641 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
641 b'$$ &Yes $$ &No') % url):
642 b'$$ &Yes $$ &No') % url):
642 return False
643 return False
643
644
644 return True
645 return True
645
646
646 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
647 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
647 b'abandoned'}
648 b'abandoned'}
648
649
649 def _getstatusname(drev):
650 def _getstatusname(drev):
650 """get normalized status name from a Differential Revision"""
651 """get normalized status name from a Differential Revision"""
651 return drev[b'statusName'].replace(b' ', b'').lower()
652 return drev[b'statusName'].replace(b' ', b'').lower()
652
653
653 # Small language to specify differential revisions. Support symbols: (), :X,
654 # Small language to specify differential revisions. Support symbols: (), :X,
654 # +, and -.
655 # +, and -.
655
656
656 _elements = {
657 _elements = {
657 # token-type: binding-strength, primary, prefix, infix, suffix
658 # token-type: binding-strength, primary, prefix, infix, suffix
658 b'(': (12, None, (b'group', 1, b')'), None, None),
659 b'(': (12, None, (b'group', 1, b')'), None, None),
659 b':': (8, None, (b'ancestors', 8), None, None),
660 b':': (8, None, (b'ancestors', 8), None, None),
660 b'&': (5, None, None, (b'and_', 5), None),
661 b'&': (5, None, None, (b'and_', 5), None),
661 b'+': (4, None, None, (b'add', 4), None),
662 b'+': (4, None, None, (b'add', 4), None),
662 b'-': (4, None, None, (b'sub', 4), None),
663 b'-': (4, None, None, (b'sub', 4), None),
663 b')': (0, None, None, None, None),
664 b')': (0, None, None, None, None),
664 b'symbol': (0, b'symbol', None, None, None),
665 b'symbol': (0, b'symbol', None, None, None),
665 b'end': (0, None, None, None, None),
666 b'end': (0, None, None, None, None),
666 }
667 }
667
668
668 def _tokenize(text):
669 def _tokenize(text):
669 view = memoryview(text) # zero-copy slice
670 view = memoryview(text) # zero-copy slice
670 special = b'():+-& '
671 special = b'():+-& '
671 pos = 0
672 pos = 0
672 length = len(text)
673 length = len(text)
673 while pos < length:
674 while pos < length:
674 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
675 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
675 view[pos:]))
676 view[pos:]))
676 if symbol:
677 if symbol:
677 yield (b'symbol', symbol, pos)
678 yield (b'symbol', symbol, pos)
678 pos += len(symbol)
679 pos += len(symbol)
679 else: # special char, ignore space
680 else: # special char, ignore space
680 if text[pos] != b' ':
681 if text[pos] != b' ':
681 yield (text[pos], None, pos)
682 yield (text[pos], None, pos)
682 pos += 1
683 pos += 1
683 yield (b'end', None, pos)
684 yield (b'end', None, pos)
684
685
685 def _parse(text):
686 def _parse(text):
686 tree, pos = parser.parser(_elements).parse(_tokenize(text))
687 tree, pos = parser.parser(_elements).parse(_tokenize(text))
687 if pos != len(text):
688 if pos != len(text):
688 raise error.ParseError(b'invalid token', pos)
689 raise error.ParseError(b'invalid token', pos)
689 return tree
690 return tree
690
691
691 def _parsedrev(symbol):
692 def _parsedrev(symbol):
692 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
693 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
693 if symbol.startswith(b'D') and symbol[1:].isdigit():
694 if symbol.startswith(b'D') and symbol[1:].isdigit():
694 return int(symbol[1:])
695 return int(symbol[1:])
695 if symbol.isdigit():
696 if symbol.isdigit():
696 return int(symbol)
697 return int(symbol)
697
698
698 def _prefetchdrevs(tree):
699 def _prefetchdrevs(tree):
699 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
700 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
700 drevs = set()
701 drevs = set()
701 ancestordrevs = set()
702 ancestordrevs = set()
702 op = tree[0]
703 op = tree[0]
703 if op == b'symbol':
704 if op == b'symbol':
704 r = _parsedrev(tree[1])
705 r = _parsedrev(tree[1])
705 if r:
706 if r:
706 drevs.add(r)
707 drevs.add(r)
707 elif op == b'ancestors':
708 elif op == b'ancestors':
708 r, a = _prefetchdrevs(tree[1])
709 r, a = _prefetchdrevs(tree[1])
709 drevs.update(r)
710 drevs.update(r)
710 ancestordrevs.update(r)
711 ancestordrevs.update(r)
711 ancestordrevs.update(a)
712 ancestordrevs.update(a)
712 else:
713 else:
713 for t in tree[1:]:
714 for t in tree[1:]:
714 r, a = _prefetchdrevs(t)
715 r, a = _prefetchdrevs(t)
715 drevs.update(r)
716 drevs.update(r)
716 ancestordrevs.update(a)
717 ancestordrevs.update(a)
717 return drevs, ancestordrevs
718 return drevs, ancestordrevs
718
719
719 def querydrev(repo, spec):
720 def querydrev(repo, spec):
720 """return a list of "Differential Revision" dicts
721 """return a list of "Differential Revision" dicts
721
722
722 spec is a string using a simple query language, see docstring in phabread
723 spec is a string using a simple query language, see docstring in phabread
723 for details.
724 for details.
724
725
725 A "Differential Revision dict" looks like:
726 A "Differential Revision dict" looks like:
726
727
727 {
728 {
728 "id": "2",
729 "id": "2",
729 "phid": "PHID-DREV-672qvysjcczopag46qty",
730 "phid": "PHID-DREV-672qvysjcczopag46qty",
730 "title": "example",
731 "title": "example",
731 "uri": "https://phab.example.com/D2",
732 "uri": "https://phab.example.com/D2",
732 "dateCreated": "1499181406",
733 "dateCreated": "1499181406",
733 "dateModified": "1499182103",
734 "dateModified": "1499182103",
734 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
735 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
735 "status": "0",
736 "status": "0",
736 "statusName": "Needs Review",
737 "statusName": "Needs Review",
737 "properties": [],
738 "properties": [],
738 "branch": null,
739 "branch": null,
739 "summary": "",
740 "summary": "",
740 "testPlan": "",
741 "testPlan": "",
741 "lineCount": "2",
742 "lineCount": "2",
742 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
743 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
743 "diffs": [
744 "diffs": [
744 "3",
745 "3",
745 "4",
746 "4",
746 ],
747 ],
747 "commits": [],
748 "commits": [],
748 "reviewers": [],
749 "reviewers": [],
749 "ccs": [],
750 "ccs": [],
750 "hashes": [],
751 "hashes": [],
751 "auxiliary": {
752 "auxiliary": {
752 "phabricator:projects": [],
753 "phabricator:projects": [],
753 "phabricator:depends-on": [
754 "phabricator:depends-on": [
754 "PHID-DREV-gbapp366kutjebt7agcd"
755 "PHID-DREV-gbapp366kutjebt7agcd"
755 ]
756 ]
756 },
757 },
757 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
758 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
758 "sourcePath": null
759 "sourcePath": null
759 }
760 }
760 """
761 """
761 def fetch(params):
762 def fetch(params):
762 """params -> single drev or None"""
763 """params -> single drev or None"""
763 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
764 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
764 if key in prefetched:
765 if key in prefetched:
765 return prefetched[key]
766 return prefetched[key]
766 drevs = callconduit(repo, b'differential.query', params)
767 drevs = callconduit(repo, b'differential.query', params)
767 # Fill prefetched with the result
768 # Fill prefetched with the result
768 for drev in drevs:
769 for drev in drevs:
769 prefetched[drev[b'phid']] = drev
770 prefetched[drev[b'phid']] = drev
770 prefetched[int(drev[b'id'])] = drev
771 prefetched[int(drev[b'id'])] = drev
771 if key not in prefetched:
772 if key not in prefetched:
772 raise error.Abort(_(b'cannot get Differential Revision %r')
773 raise error.Abort(_(b'cannot get Differential Revision %r')
773 % params)
774 % params)
774 return prefetched[key]
775 return prefetched[key]
775
776
776 def getstack(topdrevids):
777 def getstack(topdrevids):
777 """given a top, get a stack from the bottom, [id] -> [id]"""
778 """given a top, get a stack from the bottom, [id] -> [id]"""
778 visited = set()
779 visited = set()
779 result = []
780 result = []
780 queue = [{r'ids': [i]} for i in topdrevids]
781 queue = [{r'ids': [i]} for i in topdrevids]
781 while queue:
782 while queue:
782 params = queue.pop()
783 params = queue.pop()
783 drev = fetch(params)
784 drev = fetch(params)
784 if drev[b'id'] in visited:
785 if drev[b'id'] in visited:
785 continue
786 continue
786 visited.add(drev[b'id'])
787 visited.add(drev[b'id'])
787 result.append(int(drev[b'id']))
788 result.append(int(drev[b'id']))
788 auxiliary = drev.get(b'auxiliary', {})
789 auxiliary = drev.get(b'auxiliary', {})
789 depends = auxiliary.get(b'phabricator:depends-on', [])
790 depends = auxiliary.get(b'phabricator:depends-on', [])
790 for phid in depends:
791 for phid in depends:
791 queue.append({b'phids': [phid]})
792 queue.append({b'phids': [phid]})
792 result.reverse()
793 result.reverse()
793 return smartset.baseset(result)
794 return smartset.baseset(result)
794
795
795 # Initialize prefetch cache
796 # Initialize prefetch cache
796 prefetched = {} # {id or phid: drev}
797 prefetched = {} # {id or phid: drev}
797
798
798 tree = _parse(spec)
799 tree = _parse(spec)
799 drevs, ancestordrevs = _prefetchdrevs(tree)
800 drevs, ancestordrevs = _prefetchdrevs(tree)
800
801
801 # developer config: phabricator.batchsize
802 # developer config: phabricator.batchsize
802 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
803 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
803
804
804 # Prefetch Differential Revisions in batch
805 # Prefetch Differential Revisions in batch
805 tofetch = set(drevs)
806 tofetch = set(drevs)
806 for r in ancestordrevs:
807 for r in ancestordrevs:
807 tofetch.update(range(max(1, r - batchsize), r + 1))
808 tofetch.update(range(max(1, r - batchsize), r + 1))
808 if drevs:
809 if drevs:
809 fetch({b'ids': list(tofetch)})
810 fetch({b'ids': list(tofetch)})
810 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
811 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
811
812
812 # Walk through the tree, return smartsets
813 # Walk through the tree, return smartsets
813 def walk(tree):
814 def walk(tree):
814 op = tree[0]
815 op = tree[0]
815 if op == b'symbol':
816 if op == b'symbol':
816 drev = _parsedrev(tree[1])
817 drev = _parsedrev(tree[1])
817 if drev:
818 if drev:
818 return smartset.baseset([drev])
819 return smartset.baseset([drev])
819 elif tree[1] in _knownstatusnames:
820 elif tree[1] in _knownstatusnames:
820 drevs = [r for r in validids
821 drevs = [r for r in validids
821 if _getstatusname(prefetched[r]) == tree[1]]
822 if _getstatusname(prefetched[r]) == tree[1]]
822 return smartset.baseset(drevs)
823 return smartset.baseset(drevs)
823 else:
824 else:
824 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
825 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
825 elif op in {b'and_', b'add', b'sub'}:
826 elif op in {b'and_', b'add', b'sub'}:
826 assert len(tree) == 3
827 assert len(tree) == 3
827 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
828 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
828 elif op == b'group':
829 elif op == b'group':
829 return walk(tree[1])
830 return walk(tree[1])
830 elif op == b'ancestors':
831 elif op == b'ancestors':
831 return getstack(walk(tree[1]))
832 return getstack(walk(tree[1]))
832 else:
833 else:
833 raise error.ProgrammingError(b'illegal tree: %r' % tree)
834 raise error.ProgrammingError(b'illegal tree: %r' % tree)
834
835
835 return [prefetched[r] for r in walk(tree)]
836 return [prefetched[r] for r in walk(tree)]
836
837
837 def getdescfromdrev(drev):
838 def getdescfromdrev(drev):
838 """get description (commit message) from "Differential Revision"
839 """get description (commit message) from "Differential Revision"
839
840
840 This is similar to differential.getcommitmessage API. But we only care
841 This is similar to differential.getcommitmessage API. But we only care
841 about limited fields: title, summary, test plan, and URL.
842 about limited fields: title, summary, test plan, and URL.
842 """
843 """
843 title = drev[b'title']
844 title = drev[b'title']
844 summary = drev[b'summary'].rstrip()
845 summary = drev[b'summary'].rstrip()
845 testplan = drev[b'testPlan'].rstrip()
846 testplan = drev[b'testPlan'].rstrip()
846 if testplan:
847 if testplan:
847 testplan = b'Test Plan:\n%s' % testplan
848 testplan = b'Test Plan:\n%s' % testplan
848 uri = b'Differential Revision: %s' % drev[b'uri']
849 uri = b'Differential Revision: %s' % drev[b'uri']
849 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
850 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
850
851
851 def getdiffmeta(diff):
852 def getdiffmeta(diff):
852 """get commit metadata (date, node, user, p1) from a diff object
853 """get commit metadata (date, node, user, p1) from a diff object
853
854
854 The metadata could be "hg:meta", sent by phabsend, like:
855 The metadata could be "hg:meta", sent by phabsend, like:
855
856
856 "properties": {
857 "properties": {
857 "hg:meta": {
858 "hg:meta": {
858 "date": "1499571514 25200",
859 "date": "1499571514 25200",
859 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
860 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
860 "user": "Foo Bar <foo@example.com>",
861 "user": "Foo Bar <foo@example.com>",
861 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
862 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
862 }
863 }
863 }
864 }
864
865
865 Or converted from "local:commits", sent by "arc", like:
866 Or converted from "local:commits", sent by "arc", like:
866
867
867 "properties": {
868 "properties": {
868 "local:commits": {
869 "local:commits": {
869 "98c08acae292b2faf60a279b4189beb6cff1414d": {
870 "98c08acae292b2faf60a279b4189beb6cff1414d": {
870 "author": "Foo Bar",
871 "author": "Foo Bar",
871 "time": 1499546314,
872 "time": 1499546314,
872 "branch": "default",
873 "branch": "default",
873 "tag": "",
874 "tag": "",
874 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
875 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
875 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "local": "1000",
877 "local": "1000",
877 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
878 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
878 "summary": "...",
879 "summary": "...",
879 "message": "...",
880 "message": "...",
880 "authorEmail": "foo@example.com"
881 "authorEmail": "foo@example.com"
881 }
882 }
882 }
883 }
883 }
884 }
884
885
885 Note: metadata extracted from "local:commits" will lose time zone
886 Note: metadata extracted from "local:commits" will lose time zone
886 information.
887 information.
887 """
888 """
888 props = diff.get(b'properties') or {}
889 props = diff.get(b'properties') or {}
889 meta = props.get(b'hg:meta')
890 meta = props.get(b'hg:meta')
890 if not meta and props.get(b'local:commits'):
891 if not meta and props.get(b'local:commits'):
891 commit = sorted(props[b'local:commits'].values())[0]
892 commit = sorted(props[b'local:commits'].values())[0]
892 meta = {
893 meta = {
893 b'date': b'%d 0' % commit[b'time'],
894 b'date': b'%d 0' % commit[b'time'],
894 b'node': commit[b'rev'],
895 b'node': commit[b'rev'],
895 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
896 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
896 }
897 }
897 if len(commit.get(b'parents', ())) >= 1:
898 if len(commit.get(b'parents', ())) >= 1:
898 meta[b'parent'] = commit[b'parents'][0]
899 meta[b'parent'] = commit[b'parents'][0]
899 return meta or {}
900 return meta or {}
900
901
901 def readpatch(repo, drevs, write):
902 def readpatch(repo, drevs, write):
902 """generate plain-text patch readable by 'hg import'
903 """generate plain-text patch readable by 'hg import'
903
904
904 write is usually ui.write. drevs is what "querydrev" returns, results of
905 write is usually ui.write. drevs is what "querydrev" returns, results of
905 "differential.query".
906 "differential.query".
906 """
907 """
907 # Prefetch hg:meta property for all diffs
908 # Prefetch hg:meta property for all diffs
908 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
909 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
909 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
910 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
910
911
911 # Generate patch for each drev
912 # Generate patch for each drev
912 for drev in drevs:
913 for drev in drevs:
913 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
914 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
914
915
915 diffid = max(int(v) for v in drev[b'diffs'])
916 diffid = max(int(v) for v in drev[b'diffs'])
916 body = callconduit(repo, b'differential.getrawdiff',
917 body = callconduit(repo, b'differential.getrawdiff',
917 {b'diffID': diffid})
918 {b'diffID': diffid})
918 desc = getdescfromdrev(drev)
919 desc = getdescfromdrev(drev)
919 header = b'# HG changeset patch\n'
920 header = b'# HG changeset patch\n'
920
921
921 # Try to preserve metadata from hg:meta property. Write hg patch
922 # Try to preserve metadata from hg:meta property. Write hg patch
922 # headers that can be read by the "import" command. See patchheadermap
923 # headers that can be read by the "import" command. See patchheadermap
923 # and extract in mercurial/patch.py for supported headers.
924 # and extract in mercurial/patch.py for supported headers.
924 meta = getdiffmeta(diffs[str(diffid)])
925 meta = getdiffmeta(diffs[str(diffid)])
925 for k in _metanamemap.keys():
926 for k in _metanamemap.keys():
926 if k in meta:
927 if k in meta:
927 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
928 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
928
929
929 content = b'%s%s\n%s' % (header, desc, body)
930 content = b'%s%s\n%s' % (header, desc, body)
930 write(content)
931 write(content)
931
932
932 @vcrcommand(b'phabread',
933 @vcrcommand(b'phabread',
933 [(b'', b'stack', False, _(b'read dependencies'))],
934 [(b'', b'stack', False, _(b'read dependencies'))],
934 _(b'DREVSPEC [OPTIONS]'),
935 _(b'DREVSPEC [OPTIONS]'),
935 helpcategory=command.CATEGORY_IMPORT_EXPORT)
936 helpcategory=command.CATEGORY_IMPORT_EXPORT)
936 def phabread(ui, repo, spec, **opts):
937 def phabread(ui, repo, spec, **opts):
937 """print patches from Phabricator suitable for importing
938 """print patches from Phabricator suitable for importing
938
939
939 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
940 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
940 the number ``123``. It could also have common operators like ``+``, ``-``,
941 the number ``123``. It could also have common operators like ``+``, ``-``,
941 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
942 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
942 select a stack.
943 select a stack.
943
944
944 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
945 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
945 could be used to filter patches by status. For performance reason, they
946 could be used to filter patches by status. For performance reason, they
946 only represent a subset of non-status selections and cannot be used alone.
947 only represent a subset of non-status selections and cannot be used alone.
947
948
948 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
949 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
949 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
950 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
950 stack up to D9.
951 stack up to D9.
951
952
952 If --stack is given, follow dependencies information and read all patches.
953 If --stack is given, follow dependencies information and read all patches.
953 It is equivalent to the ``:`` operator.
954 It is equivalent to the ``:`` operator.
954 """
955 """
955 if opts.get(b'stack'):
956 if opts.get(b'stack'):
956 spec = b':(%s)' % spec
957 spec = b':(%s)' % spec
957 drevs = querydrev(repo, spec)
958 drevs = querydrev(repo, spec)
958 readpatch(repo, drevs, ui.write)
959 readpatch(repo, drevs, ui.write)
959
960
960 @vcrcommand(b'phabupdate',
961 @vcrcommand(b'phabupdate',
961 [(b'', b'accept', False, _(b'accept revisions')),
962 [(b'', b'accept', False, _(b'accept revisions')),
962 (b'', b'reject', False, _(b'reject revisions')),
963 (b'', b'reject', False, _(b'reject revisions')),
963 (b'', b'abandon', False, _(b'abandon revisions')),
964 (b'', b'abandon', False, _(b'abandon revisions')),
964 (b'', b'reclaim', False, _(b'reclaim revisions')),
965 (b'', b'reclaim', False, _(b'reclaim revisions')),
965 (b'm', b'comment', b'', _(b'comment on the last revision')),
966 (b'm', b'comment', b'', _(b'comment on the last revision')),
966 ], _(b'DREVSPEC [OPTIONS]'),
967 ], _(b'DREVSPEC [OPTIONS]'),
967 helpcategory=command.CATEGORY_IMPORT_EXPORT)
968 helpcategory=command.CATEGORY_IMPORT_EXPORT)
968 def phabupdate(ui, repo, spec, **opts):
969 def phabupdate(ui, repo, spec, **opts):
969 """update Differential Revision in batch
970 """update Differential Revision in batch
970
971
971 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
972 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
972 """
973 """
973 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
974 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
974 if len(flags) > 1:
975 if len(flags) > 1:
975 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
976 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
976
977
977 actions = []
978 actions = []
978 for f in flags:
979 for f in flags:
979 actions.append({b'type': f, b'value': b'true'})
980 actions.append({b'type': f, b'value': b'true'})
980
981
981 drevs = querydrev(repo, spec)
982 drevs = querydrev(repo, spec)
982 for i, drev in enumerate(drevs):
983 for i, drev in enumerate(drevs):
983 if i + 1 == len(drevs) and opts.get(b'comment'):
984 if i + 1 == len(drevs) and opts.get(b'comment'):
984 actions.append({b'type': b'comment', b'value': opts[b'comment']})
985 actions.append({b'type': b'comment', b'value': opts[b'comment']})
985 if actions:
986 if actions:
986 params = {b'objectIdentifier': drev[b'phid'],
987 params = {b'objectIdentifier': drev[b'phid'],
987 b'transactions': actions}
988 b'transactions': actions}
988 callconduit(repo, b'differential.revision.edit', params)
989 callconduit(repo, b'differential.revision.edit', params)
989
990
990 templatekeyword = registrar.templatekeyword()
991 templatekeyword = registrar.templatekeyword()
991
992
992 @templatekeyword(b'phabreview', requires={b'ctx'})
993 @templatekeyword(b'phabreview', requires={b'ctx'})
993 def template_review(context, mapping):
994 def template_review(context, mapping):
994 """:phabreview: Object describing the review for this changeset.
995 """:phabreview: Object describing the review for this changeset.
995 Has attributes `url` and `id`.
996 Has attributes `url` and `id`.
996 """
997 """
997 ctx = context.resource(mapping, b'ctx')
998 ctx = context.resource(mapping, b'ctx')
998 m = _differentialrevisiondescre.search(ctx.description())
999 m = _differentialrevisiondescre.search(ctx.description())
999 if m:
1000 if m:
1000 return templateutil.hybriddict({
1001 return templateutil.hybriddict({
1001 b'url': m.group(b'url'),
1002 b'url': m.group(b'url'),
1002 b'id': b"D{}".format(m.group(b'id')),
1003 b'id': b"D{}".format(m.group(b'id')),
1003 })
1004 })
1004 else:
1005 else:
1005 tags = ctx.repo().nodetags(ctx.node())
1006 tags = ctx.repo().nodetags(ctx.node())
1006 for t in tags:
1007 for t in tags:
1007 if _differentialrevisiontagre.match(t):
1008 if _differentialrevisiontagre.match(t):
1008 url = ctx.repo().ui.config(b'phabricator', b'url')
1009 url = ctx.repo().ui.config(b'phabricator', b'url')
1009 if not url.endswith(b'/'):
1010 if not url.endswith(b'/'):
1010 url += b'/'
1011 url += b'/'
1011 url += t
1012 url += t
1012
1013
1013 return templateutil.hybriddict({
1014 return templateutil.hybriddict({
1014 b'url': url,
1015 b'url': url,
1015 b'id': t,
1016 b'id': t,
1016 })
1017 })
1017 return None
1018 return None
General Comments 0
You need to be logged in to leave comments. Login now