##// END OF EJS Templates
phabricator: make user searches case-insensitive...
Julien Cristau -
r41854:570e62f1 default
parent child Browse files
Show More
@@ -1,1012 +1,1013 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 registrar,
63 registrar,
64 scmutil,
64 scmutil,
65 smartset,
65 smartset,
66 tags,
66 tags,
67 templateutil,
67 templateutil,
68 url as urlmod,
68 url as urlmod,
69 util,
69 util,
70 )
70 )
71 from mercurial.utils import (
71 from mercurial.utils import (
72 procutil,
72 procutil,
73 stringutil,
73 stringutil,
74 )
74 )
75
75
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # be specifying the version(s) of Mercurial they are tested with, or
78 # be specifying the version(s) of Mercurial they are tested with, or
79 # leave the attribute unspecified.
79 # leave the attribute unspecified.
80 testedwith = 'ships-with-hg-core'
80 testedwith = 'ships-with-hg-core'
81
81
82 cmdtable = {}
82 cmdtable = {}
83 command = registrar.command(cmdtable)
83 command = registrar.command(cmdtable)
84
84
85 configtable = {}
85 configtable = {}
86 configitem = registrar.configitem(configtable)
86 configitem = registrar.configitem(configtable)
87
87
88 # developer config: phabricator.batchsize
88 # developer config: phabricator.batchsize
89 configitem(b'phabricator', b'batchsize',
89 configitem(b'phabricator', b'batchsize',
90 default=12,
90 default=12,
91 )
91 )
92 configitem(b'phabricator', b'callsign',
92 configitem(b'phabricator', b'callsign',
93 default=None,
93 default=None,
94 )
94 )
95 configitem(b'phabricator', b'curlcmd',
95 configitem(b'phabricator', b'curlcmd',
96 default=None,
96 default=None,
97 )
97 )
98 # developer config: phabricator.repophid
98 # developer config: phabricator.repophid
99 configitem(b'phabricator', b'repophid',
99 configitem(b'phabricator', b'repophid',
100 default=None,
100 default=None,
101 )
101 )
102 configitem(b'phabricator', b'url',
102 configitem(b'phabricator', b'url',
103 default=None,
103 default=None,
104 )
104 )
105 configitem(b'phabsend', b'confirm',
105 configitem(b'phabsend', b'confirm',
106 default=False,
106 default=False,
107 )
107 )
108
108
109 colortable = {
109 colortable = {
110 b'phabricator.action.created': b'green',
110 b'phabricator.action.created': b'green',
111 b'phabricator.action.skipped': b'magenta',
111 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.updated': b'magenta',
112 b'phabricator.action.updated': b'magenta',
113 b'phabricator.desc': b'',
113 b'phabricator.desc': b'',
114 b'phabricator.drev': b'bold',
114 b'phabricator.drev': b'bold',
115 b'phabricator.node': b'',
115 b'phabricator.node': b'',
116 }
116 }
117
117
118 _VCR_FLAGS = [
118 _VCR_FLAGS = [
119 (b'', b'test-vcr', b'',
119 (b'', b'test-vcr', b'',
120 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
120 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 b', otherwise will mock all http requests using the specified vcr file.'
121 b', otherwise will mock all http requests using the specified vcr file.'
122 b' (ADVANCED)'
122 b' (ADVANCED)'
123 )),
123 )),
124 ]
124 ]
125
125
126 def vcrcommand(name, flags, spec, helpcategory=None):
126 def vcrcommand(name, flags, spec, helpcategory=None):
127 fullflags = flags + _VCR_FLAGS
127 fullflags = flags + _VCR_FLAGS
128 def decorate(fn):
128 def decorate(fn):
129 def inner(*args, **kwargs):
129 def inner(*args, **kwargs):
130 cassette = kwargs.pop(r'test_vcr', None)
130 cassette = kwargs.pop(r'test_vcr', None)
131 if cassette:
131 if cassette:
132 import hgdemandimport
132 import hgdemandimport
133 with hgdemandimport.deactivated():
133 with hgdemandimport.deactivated():
134 import vcr as vcrmod
134 import vcr as vcrmod
135 import vcr.stubs as stubs
135 import vcr.stubs as stubs
136 vcr = vcrmod.VCR(
136 vcr = vcrmod.VCR(
137 serializer=r'json',
137 serializer=r'json',
138 custom_patches=[
138 custom_patches=[
139 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
139 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
140 (urlmod, 'httpsconnection',
140 (urlmod, 'httpsconnection',
141 stubs.VCRHTTPSConnection),
141 stubs.VCRHTTPSConnection),
142 ])
142 ])
143 with vcr.use_cassette(cassette):
143 with vcr.use_cassette(cassette):
144 return fn(*args, **kwargs)
144 return fn(*args, **kwargs)
145 return fn(*args, **kwargs)
145 return fn(*args, **kwargs)
146 inner.__name__ = fn.__name__
146 inner.__name__ = fn.__name__
147 inner.__doc__ = fn.__doc__
147 inner.__doc__ = fn.__doc__
148 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
148 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
149 return decorate
149 return decorate
150
150
151 def urlencodenested(params):
151 def urlencodenested(params):
152 """like urlencode, but works with nested parameters.
152 """like urlencode, but works with nested parameters.
153
153
154 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
154 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
155 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
155 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
156 urlencode. Note: the encoding is consistent with PHP's http_build_query.
156 urlencode. Note: the encoding is consistent with PHP's http_build_query.
157 """
157 """
158 flatparams = util.sortdict()
158 flatparams = util.sortdict()
159 def process(prefix, obj):
159 def process(prefix, obj):
160 if isinstance(obj, bool):
160 if isinstance(obj, bool):
161 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
161 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
162 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
162 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
163 if items is None:
163 if items is None:
164 flatparams[prefix] = obj
164 flatparams[prefix] = obj
165 else:
165 else:
166 for k, v in items(obj):
166 for k, v in items(obj):
167 if prefix:
167 if prefix:
168 process(b'%s[%s]' % (prefix, k), v)
168 process(b'%s[%s]' % (prefix, k), v)
169 else:
169 else:
170 process(k, v)
170 process(k, v)
171 process(b'', params)
171 process(b'', params)
172 return util.urlreq.urlencode(flatparams)
172 return util.urlreq.urlencode(flatparams)
173
173
174 def readurltoken(repo):
174 def readurltoken(repo):
175 """return conduit url, token and make sure they exist
175 """return conduit url, token and make sure they exist
176
176
177 Currently read from [auth] config section. In the future, it might
177 Currently read from [auth] config section. In the future, it might
178 make sense to read from .arcconfig and .arcrc as well.
178 make sense to read from .arcconfig and .arcrc as well.
179 """
179 """
180 url = repo.ui.config(b'phabricator', b'url')
180 url = repo.ui.config(b'phabricator', b'url')
181 if not url:
181 if not url:
182 raise error.Abort(_(b'config %s.%s is required')
182 raise error.Abort(_(b'config %s.%s is required')
183 % (b'phabricator', b'url'))
183 % (b'phabricator', b'url'))
184
184
185 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
185 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
186 token = None
186 token = None
187
187
188 if res:
188 if res:
189 group, auth = res
189 group, auth = res
190
190
191 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
191 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
192
192
193 token = auth.get(b'phabtoken')
193 token = auth.get(b'phabtoken')
194
194
195 if not token:
195 if not token:
196 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
196 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
197 % (url,))
197 % (url,))
198
198
199 return url, token
199 return url, token
200
200
201 def callconduit(repo, name, params):
201 def callconduit(repo, name, params):
202 """call Conduit API, params is a dict. return json.loads result, or None"""
202 """call Conduit API, params is a dict. return json.loads result, or None"""
203 host, token = readurltoken(repo)
203 host, token = readurltoken(repo)
204 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
204 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
205 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
205 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
206 params = params.copy()
206 params = params.copy()
207 params[b'api.token'] = token
207 params[b'api.token'] = token
208 data = urlencodenested(params)
208 data = urlencodenested(params)
209 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
209 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
210 if curlcmd:
210 if curlcmd:
211 sin, sout = procutil.popen2(b'%s -d @- %s'
211 sin, sout = procutil.popen2(b'%s -d @- %s'
212 % (curlcmd, procutil.shellquote(url)))
212 % (curlcmd, procutil.shellquote(url)))
213 sin.write(data)
213 sin.write(data)
214 sin.close()
214 sin.close()
215 body = sout.read()
215 body = sout.read()
216 else:
216 else:
217 urlopener = urlmod.opener(repo.ui, authinfo)
217 urlopener = urlmod.opener(repo.ui, authinfo)
218 request = util.urlreq.request(url, data=data)
218 request = util.urlreq.request(url, data=data)
219 with contextlib.closing(urlopener.open(request)) as rsp:
219 with contextlib.closing(urlopener.open(request)) as rsp:
220 body = rsp.read()
220 body = rsp.read()
221 repo.ui.debug(b'Conduit Response: %s\n' % body)
221 repo.ui.debug(b'Conduit Response: %s\n' % body)
222 parsed = json.loads(body)
222 parsed = json.loads(body)
223 if parsed.get(r'error_code'):
223 if parsed.get(r'error_code'):
224 msg = (_(b'Conduit Error (%s): %s')
224 msg = (_(b'Conduit Error (%s): %s')
225 % (parsed[r'error_code'], parsed[r'error_info']))
225 % (parsed[r'error_code'], parsed[r'error_info']))
226 raise error.Abort(msg)
226 raise error.Abort(msg)
227 return parsed[r'result']
227 return parsed[r'result']
228
228
229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
230 def debugcallconduit(ui, repo, name):
230 def debugcallconduit(ui, repo, name):
231 """call Conduit API
231 """call Conduit API
232
232
233 Call parameters are read from stdin as a JSON blob. Result will be written
233 Call parameters are read from stdin as a JSON blob. Result will be written
234 to stdout as a JSON blob.
234 to stdout as a JSON blob.
235 """
235 """
236 params = json.loads(ui.fin.read())
236 params = json.loads(ui.fin.read())
237 result = callconduit(repo, name, params)
237 result = callconduit(repo, name, params)
238 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
238 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
239 ui.write(b'%s\n' % s)
239 ui.write(b'%s\n' % s)
240
240
241 def getrepophid(repo):
241 def getrepophid(repo):
242 """given callsign, return repository PHID or None"""
242 """given callsign, return repository PHID or None"""
243 # developer config: phabricator.repophid
243 # developer config: phabricator.repophid
244 repophid = repo.ui.config(b'phabricator', b'repophid')
244 repophid = repo.ui.config(b'phabricator', b'repophid')
245 if repophid:
245 if repophid:
246 return repophid
246 return repophid
247 callsign = repo.ui.config(b'phabricator', b'callsign')
247 callsign = repo.ui.config(b'phabricator', b'callsign')
248 if not callsign:
248 if not callsign:
249 return None
249 return None
250 query = callconduit(repo, b'diffusion.repository.search',
250 query = callconduit(repo, b'diffusion.repository.search',
251 {b'constraints': {b'callsigns': [callsign]}})
251 {b'constraints': {b'callsigns': [callsign]}})
252 if len(query[r'data']) == 0:
252 if len(query[r'data']) == 0:
253 return None
253 return None
254 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
254 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
255 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
255 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
256 return repophid
256 return repophid
257
257
258 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
258 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
259 _differentialrevisiondescre = re.compile(
259 _differentialrevisiondescre = re.compile(
260 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
260 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
261
261
262 def getoldnodedrevmap(repo, nodelist):
262 def getoldnodedrevmap(repo, nodelist):
263 """find previous nodes that has been sent to Phabricator
263 """find previous nodes that has been sent to Phabricator
264
264
265 return {node: (oldnode, Differential diff, Differential Revision ID)}
265 return {node: (oldnode, Differential diff, Differential Revision ID)}
266 for node in nodelist with known previous sent versions, or associated
266 for node in nodelist with known previous sent versions, or associated
267 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
267 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
268 be ``None``.
268 be ``None``.
269
269
270 Examines commit messages like "Differential Revision:" to get the
270 Examines commit messages like "Differential Revision:" to get the
271 association information.
271 association information.
272
272
273 If such commit message line is not found, examines all precursors and their
273 If such commit message line is not found, examines all precursors and their
274 tags. Tags with format like "D1234" are considered a match and the node
274 tags. Tags with format like "D1234" are considered a match and the node
275 with that tag, and the number after "D" (ex. 1234) will be returned.
275 with that tag, and the number after "D" (ex. 1234) will be returned.
276
276
277 The ``old node``, if not None, is guaranteed to be the last diff of
277 The ``old node``, if not None, is guaranteed to be the last diff of
278 corresponding Differential Revision, and exist in the repo.
278 corresponding Differential Revision, and exist in the repo.
279 """
279 """
280 unfi = repo.unfiltered()
280 unfi = repo.unfiltered()
281 nodemap = unfi.changelog.nodemap
281 nodemap = unfi.changelog.nodemap
282
282
283 result = {} # {node: (oldnode?, lastdiff?, drev)}
283 result = {} # {node: (oldnode?, lastdiff?, drev)}
284 toconfirm = {} # {node: (force, {precnode}, drev)}
284 toconfirm = {} # {node: (force, {precnode}, drev)}
285 for node in nodelist:
285 for node in nodelist:
286 ctx = unfi[node]
286 ctx = unfi[node]
287 # For tags like "D123", put them into "toconfirm" to verify later
287 # For tags like "D123", put them into "toconfirm" to verify later
288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
289 for n in precnodes:
289 for n in precnodes:
290 if n in nodemap:
290 if n in nodemap:
291 for tag in unfi.nodetags(n):
291 for tag in unfi.nodetags(n):
292 m = _differentialrevisiontagre.match(tag)
292 m = _differentialrevisiontagre.match(tag)
293 if m:
293 if m:
294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
295 continue
295 continue
296
296
297 # Check commit message
297 # Check commit message
298 m = _differentialrevisiondescre.search(ctx.description())
298 m = _differentialrevisiondescre.search(ctx.description())
299 if m:
299 if m:
300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
301
301
302 # Double check if tags are genuine by collecting all old nodes from
302 # Double check if tags are genuine by collecting all old nodes from
303 # Phabricator, and expect precursors overlap with it.
303 # Phabricator, and expect precursors overlap with it.
304 if toconfirm:
304 if toconfirm:
305 drevs = [drev for force, precs, drev in toconfirm.values()]
305 drevs = [drev for force, precs, drev in toconfirm.values()]
306 alldiffs = callconduit(unfi, b'differential.querydiffs',
306 alldiffs = callconduit(unfi, b'differential.querydiffs',
307 {b'revisionIDs': drevs})
307 {b'revisionIDs': drevs})
308 getnode = lambda d: bin(encoding.unitolocal(
308 getnode = lambda d: bin(encoding.unitolocal(
309 getdiffmeta(d).get(r'node', b''))) or None
309 getdiffmeta(d).get(r'node', b''))) or None
310 for newnode, (force, precset, drev) in toconfirm.items():
310 for newnode, (force, precset, drev) in toconfirm.items():
311 diffs = [d for d in alldiffs.values()
311 diffs = [d for d in alldiffs.values()
312 if int(d[r'revisionID']) == drev]
312 if int(d[r'revisionID']) == drev]
313
313
314 # "precursors" as known by Phabricator
314 # "precursors" as known by Phabricator
315 phprecset = set(getnode(d) for d in diffs)
315 phprecset = set(getnode(d) for d in diffs)
316
316
317 # Ignore if precursors (Phabricator and local repo) do not overlap,
317 # Ignore if precursors (Phabricator and local repo) do not overlap,
318 # and force is not set (when commit message says nothing)
318 # and force is not set (when commit message says nothing)
319 if not force and not bool(phprecset & precset):
319 if not force and not bool(phprecset & precset):
320 tagname = b'D%d' % drev
320 tagname = b'D%d' % drev
321 tags.tag(repo, tagname, nullid, message=None, user=None,
321 tags.tag(repo, tagname, nullid, message=None, user=None,
322 date=None, local=True)
322 date=None, local=True)
323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
324 b'Differential history\n') % drev)
324 b'Differential history\n') % drev)
325 continue
325 continue
326
326
327 # Find the last node using Phabricator metadata, and make sure it
327 # Find the last node using Phabricator metadata, and make sure it
328 # exists in the repo
328 # exists in the repo
329 oldnode = lastdiff = None
329 oldnode = lastdiff = None
330 if diffs:
330 if diffs:
331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
332 oldnode = getnode(lastdiff)
332 oldnode = getnode(lastdiff)
333 if oldnode and oldnode not in nodemap:
333 if oldnode and oldnode not in nodemap:
334 oldnode = None
334 oldnode = None
335
335
336 result[newnode] = (oldnode, lastdiff, drev)
336 result[newnode] = (oldnode, lastdiff, drev)
337
337
338 return result
338 return result
339
339
340 def getdiff(ctx, diffopts):
340 def getdiff(ctx, diffopts):
341 """plain-text diff without header (user, commit message, etc)"""
341 """plain-text diff without header (user, commit message, etc)"""
342 output = util.stringio()
342 output = util.stringio()
343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
344 None, opts=diffopts):
344 None, opts=diffopts):
345 output.write(chunk)
345 output.write(chunk)
346 return output.getvalue()
346 return output.getvalue()
347
347
348 def creatediff(ctx):
348 def creatediff(ctx):
349 """create a Differential Diff"""
349 """create a Differential Diff"""
350 repo = ctx.repo()
350 repo = ctx.repo()
351 repophid = getrepophid(repo)
351 repophid = getrepophid(repo)
352 # Create a "Differential Diff" via "differential.createrawdiff" API
352 # Create a "Differential Diff" via "differential.createrawdiff" API
353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
354 if repophid:
354 if repophid:
355 params[b'repositoryPHID'] = repophid
355 params[b'repositoryPHID'] = repophid
356 diff = callconduit(repo, b'differential.createrawdiff', params)
356 diff = callconduit(repo, b'differential.createrawdiff', params)
357 if not diff:
357 if not diff:
358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
359 return diff
359 return diff
360
360
361 def writediffproperties(ctx, diff):
361 def writediffproperties(ctx, diff):
362 """write metadata to diff so patches could be applied losslessly"""
362 """write metadata to diff so patches could be applied losslessly"""
363 params = {
363 params = {
364 b'diff_id': diff[r'id'],
364 b'diff_id': diff[r'id'],
365 b'name': b'hg:meta',
365 b'name': b'hg:meta',
366 b'data': json.dumps({
366 b'data': json.dumps({
367 b'user': ctx.user(),
367 b'user': ctx.user(),
368 b'date': b'%d %d' % ctx.date(),
368 b'date': b'%d %d' % ctx.date(),
369 b'node': ctx.hex(),
369 b'node': ctx.hex(),
370 b'parent': ctx.p1().hex(),
370 b'parent': ctx.p1().hex(),
371 }),
371 }),
372 }
372 }
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
374
374
375 params = {
375 params = {
376 b'diff_id': diff[r'id'],
376 b'diff_id': diff[r'id'],
377 b'name': b'local:commits',
377 b'name': b'local:commits',
378 b'data': json.dumps({
378 b'data': json.dumps({
379 ctx.hex(): {
379 ctx.hex(): {
380 b'author': stringutil.person(ctx.user()),
380 b'author': stringutil.person(ctx.user()),
381 b'authorEmail': stringutil.email(ctx.user()),
381 b'authorEmail': stringutil.email(ctx.user()),
382 b'time': ctx.date()[0],
382 b'time': ctx.date()[0],
383 },
383 },
384 }),
384 }),
385 }
385 }
386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
387
387
388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
389 olddiff=None, actions=None):
389 olddiff=None, actions=None):
390 """create or update a Differential Revision
390 """create or update a Differential Revision
391
391
392 If revid is None, create a new Differential Revision, otherwise update
392 If revid is None, create a new Differential Revision, otherwise update
393 revid. If parentrevid is not None, set it as a dependency.
393 revid. If parentrevid is not None, set it as a dependency.
394
394
395 If oldnode is not None, check if the patch content (without commit message
395 If oldnode is not None, check if the patch content (without commit message
396 and metadata) has changed before creating another diff.
396 and metadata) has changed before creating another diff.
397
397
398 If actions is not None, they will be appended to the transaction.
398 If actions is not None, they will be appended to the transaction.
399 """
399 """
400 repo = ctx.repo()
400 repo = ctx.repo()
401 if oldnode:
401 if oldnode:
402 diffopts = mdiff.diffopts(git=True, context=32767)
402 diffopts = mdiff.diffopts(git=True, context=32767)
403 oldctx = repo.unfiltered()[oldnode]
403 oldctx = repo.unfiltered()[oldnode]
404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
405 else:
405 else:
406 neednewdiff = True
406 neednewdiff = True
407
407
408 transactions = []
408 transactions = []
409 if neednewdiff:
409 if neednewdiff:
410 diff = creatediff(ctx)
410 diff = creatediff(ctx)
411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
412 else:
412 else:
413 # Even if we don't need to upload a new diff because the patch content
413 # Even if we don't need to upload a new diff because the patch content
414 # does not change. We might still need to update its metadata so
414 # does not change. We might still need to update its metadata so
415 # pushers could know the correct node metadata.
415 # pushers could know the correct node metadata.
416 assert olddiff
416 assert olddiff
417 diff = olddiff
417 diff = olddiff
418 writediffproperties(ctx, diff)
418 writediffproperties(ctx, diff)
419
419
420 # Use a temporary summary to set dependency. There might be better ways but
420 # Use a temporary summary to set dependency. There might be better ways but
421 # I cannot find them for now. But do not do that if we are updating an
421 # I cannot find them for now. But do not do that if we are updating an
422 # existing revision (revid is not None) since that introduces visible
422 # existing revision (revid is not None) since that introduces visible
423 # churns (someone edited "Summary" twice) on the web page.
423 # churns (someone edited "Summary" twice) on the web page.
424 if parentrevid and revid is None:
424 if parentrevid and revid is None:
425 summary = b'Depends on D%s' % parentrevid
425 summary = b'Depends on D%s' % parentrevid
426 transactions += [{b'type': b'summary', b'value': summary},
426 transactions += [{b'type': b'summary', b'value': summary},
427 {b'type': b'summary', b'value': b' '}]
427 {b'type': b'summary', b'value': b' '}]
428
428
429 if actions:
429 if actions:
430 transactions += actions
430 transactions += actions
431
431
432 # Parse commit message and update related fields.
432 # Parse commit message and update related fields.
433 desc = ctx.description()
433 desc = ctx.description()
434 info = callconduit(repo, b'differential.parsecommitmessage',
434 info = callconduit(repo, b'differential.parsecommitmessage',
435 {b'corpus': desc})
435 {b'corpus': desc})
436 for k, v in info[r'fields'].items():
436 for k, v in info[r'fields'].items():
437 if k in [b'title', b'summary', b'testPlan']:
437 if k in [b'title', b'summary', b'testPlan']:
438 transactions.append({b'type': k, b'value': v})
438 transactions.append({b'type': k, b'value': v})
439
439
440 params = {b'transactions': transactions}
440 params = {b'transactions': transactions}
441 if revid is not None:
441 if revid is not None:
442 # Update an existing Differential Revision
442 # Update an existing Differential Revision
443 params[b'objectIdentifier'] = revid
443 params[b'objectIdentifier'] = revid
444
444
445 revision = callconduit(repo, b'differential.revision.edit', params)
445 revision = callconduit(repo, b'differential.revision.edit', params)
446 if not revision:
446 if not revision:
447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
448
448
449 return revision, diff
449 return revision, diff
450
450
451 def userphids(repo, names):
451 def userphids(repo, names):
452 """convert user names to PHIDs"""
452 """convert user names to PHIDs"""
453 names = [name.lower() for name in names]
453 query = {b'constraints': {b'usernames': names}}
454 query = {b'constraints': {b'usernames': names}}
454 result = callconduit(repo, b'user.search', query)
455 result = callconduit(repo, b'user.search', query)
455 # username not found is not an error of the API. So check if we have missed
456 # username not found is not an error of the API. So check if we have missed
456 # some names here.
457 # some names here.
457 data = result[r'data']
458 data = result[r'data']
458 resolved = set(entry[r'fields'][r'username'] for entry in data)
459 resolved = set(entry[r'fields'][r'username'].lower() for entry in data)
459 unresolved = set(names) - resolved
460 unresolved = set(names) - resolved
460 if unresolved:
461 if unresolved:
461 raise error.Abort(_(b'unknown username: %s')
462 raise error.Abort(_(b'unknown username: %s')
462 % b' '.join(sorted(unresolved)))
463 % b' '.join(sorted(unresolved)))
463 return [entry[r'phid'] for entry in data]
464 return [entry[r'phid'] for entry in data]
464
465
465 @vcrcommand(b'phabsend',
466 @vcrcommand(b'phabsend',
466 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
467 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
467 (b'', b'amend', True, _(b'update commit messages')),
468 (b'', b'amend', True, _(b'update commit messages')),
468 (b'', b'reviewer', [], _(b'specify reviewers')),
469 (b'', b'reviewer', [], _(b'specify reviewers')),
469 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
470 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
470 _(b'REV [OPTIONS]'),
471 _(b'REV [OPTIONS]'),
471 helpcategory=command.CATEGORY_IMPORT_EXPORT)
472 helpcategory=command.CATEGORY_IMPORT_EXPORT)
472 def phabsend(ui, repo, *revs, **opts):
473 def phabsend(ui, repo, *revs, **opts):
473 """upload changesets to Phabricator
474 """upload changesets to Phabricator
474
475
475 If there are multiple revisions specified, they will be send as a stack
476 If there are multiple revisions specified, they will be send as a stack
476 with a linear dependencies relationship using the order specified by the
477 with a linear dependencies relationship using the order specified by the
477 revset.
478 revset.
478
479
479 For the first time uploading changesets, local tags will be created to
480 For the first time uploading changesets, local tags will be created to
480 maintain the association. After the first time, phabsend will check
481 maintain the association. After the first time, phabsend will check
481 obsstore and tags information so it can figure out whether to update an
482 obsstore and tags information so it can figure out whether to update an
482 existing Differential Revision, or create a new one.
483 existing Differential Revision, or create a new one.
483
484
484 If --amend is set, update commit messages so they have the
485 If --amend is set, update commit messages so they have the
485 ``Differential Revision`` URL, remove related tags. This is similar to what
486 ``Differential Revision`` URL, remove related tags. This is similar to what
486 arcanist will do, and is more desired in author-push workflows. Otherwise,
487 arcanist will do, and is more desired in author-push workflows. Otherwise,
487 use local tags to record the ``Differential Revision`` association.
488 use local tags to record the ``Differential Revision`` association.
488
489
489 The --confirm option lets you confirm changesets before sending them. You
490 The --confirm option lets you confirm changesets before sending them. You
490 can also add following to your configuration file to make it default
491 can also add following to your configuration file to make it default
491 behaviour::
492 behaviour::
492
493
493 [phabsend]
494 [phabsend]
494 confirm = true
495 confirm = true
495
496
496 phabsend will check obsstore and the above association to decide whether to
497 phabsend will check obsstore and the above association to decide whether to
497 update an existing Differential Revision, or create a new one.
498 update an existing Differential Revision, or create a new one.
498 """
499 """
499 revs = list(revs) + opts.get(b'rev', [])
500 revs = list(revs) + opts.get(b'rev', [])
500 revs = scmutil.revrange(repo, revs)
501 revs = scmutil.revrange(repo, revs)
501
502
502 if not revs:
503 if not revs:
503 raise error.Abort(_(b'phabsend requires at least one changeset'))
504 raise error.Abort(_(b'phabsend requires at least one changeset'))
504 if opts.get(b'amend'):
505 if opts.get(b'amend'):
505 cmdutil.checkunfinished(repo)
506 cmdutil.checkunfinished(repo)
506
507
507 # {newnode: (oldnode, olddiff, olddrev}
508 # {newnode: (oldnode, olddiff, olddrev}
508 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
509 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
509
510
510 confirm = ui.configbool(b'phabsend', b'confirm')
511 confirm = ui.configbool(b'phabsend', b'confirm')
511 confirm |= bool(opts.get(b'confirm'))
512 confirm |= bool(opts.get(b'confirm'))
512 if confirm:
513 if confirm:
513 confirmed = _confirmbeforesend(repo, revs, oldmap)
514 confirmed = _confirmbeforesend(repo, revs, oldmap)
514 if not confirmed:
515 if not confirmed:
515 raise error.Abort(_(b'phabsend cancelled'))
516 raise error.Abort(_(b'phabsend cancelled'))
516
517
517 actions = []
518 actions = []
518 reviewers = opts.get(b'reviewer', [])
519 reviewers = opts.get(b'reviewer', [])
519 if reviewers:
520 if reviewers:
520 phids = userphids(repo, reviewers)
521 phids = userphids(repo, reviewers)
521 actions.append({b'type': b'reviewers.add', b'value': phids})
522 actions.append({b'type': b'reviewers.add', b'value': phids})
522
523
523 drevids = [] # [int]
524 drevids = [] # [int]
524 diffmap = {} # {newnode: diff}
525 diffmap = {} # {newnode: diff}
525
526
526 # Send patches one by one so we know their Differential Revision IDs and
527 # Send patches one by one so we know their Differential Revision IDs and
527 # can provide dependency relationship
528 # can provide dependency relationship
528 lastrevid = None
529 lastrevid = None
529 for rev in revs:
530 for rev in revs:
530 ui.debug(b'sending rev %d\n' % rev)
531 ui.debug(b'sending rev %d\n' % rev)
531 ctx = repo[rev]
532 ctx = repo[rev]
532
533
533 # Get Differential Revision ID
534 # Get Differential Revision ID
534 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
535 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
535 if oldnode != ctx.node() or opts.get(b'amend'):
536 if oldnode != ctx.node() or opts.get(b'amend'):
536 # Create or update Differential Revision
537 # Create or update Differential Revision
537 revision, diff = createdifferentialrevision(
538 revision, diff = createdifferentialrevision(
538 ctx, revid, lastrevid, oldnode, olddiff, actions)
539 ctx, revid, lastrevid, oldnode, olddiff, actions)
539 diffmap[ctx.node()] = diff
540 diffmap[ctx.node()] = diff
540 newrevid = int(revision[r'object'][r'id'])
541 newrevid = int(revision[r'object'][r'id'])
541 if revid:
542 if revid:
542 action = b'updated'
543 action = b'updated'
543 else:
544 else:
544 action = b'created'
545 action = b'created'
545
546
546 # Create a local tag to note the association, if commit message
547 # Create a local tag to note the association, if commit message
547 # does not have it already
548 # does not have it already
548 m = _differentialrevisiondescre.search(ctx.description())
549 m = _differentialrevisiondescre.search(ctx.description())
549 if not m or int(m.group(b'id')) != newrevid:
550 if not m or int(m.group(b'id')) != newrevid:
550 tagname = b'D%d' % newrevid
551 tagname = b'D%d' % newrevid
551 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
552 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
552 date=None, local=True)
553 date=None, local=True)
553 else:
554 else:
554 # Nothing changed. But still set "newrevid" so the next revision
555 # Nothing changed. But still set "newrevid" so the next revision
555 # could depend on this one.
556 # could depend on this one.
556 newrevid = revid
557 newrevid = revid
557 action = b'skipped'
558 action = b'skipped'
558
559
559 actiondesc = ui.label(
560 actiondesc = ui.label(
560 {b'created': _(b'created'),
561 {b'created': _(b'created'),
561 b'skipped': _(b'skipped'),
562 b'skipped': _(b'skipped'),
562 b'updated': _(b'updated')}[action],
563 b'updated': _(b'updated')}[action],
563 b'phabricator.action.%s' % action)
564 b'phabricator.action.%s' % action)
564 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
565 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
565 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
566 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
566 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
567 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
567 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
568 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
568 desc))
569 desc))
569 drevids.append(newrevid)
570 drevids.append(newrevid)
570 lastrevid = newrevid
571 lastrevid = newrevid
571
572
572 # Update commit messages and remove tags
573 # Update commit messages and remove tags
573 if opts.get(b'amend'):
574 if opts.get(b'amend'):
574 unfi = repo.unfiltered()
575 unfi = repo.unfiltered()
575 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
576 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
576 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
577 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
577 wnode = unfi[b'.'].node()
578 wnode = unfi[b'.'].node()
578 mapping = {} # {oldnode: [newnode]}
579 mapping = {} # {oldnode: [newnode]}
579 for i, rev in enumerate(revs):
580 for i, rev in enumerate(revs):
580 old = unfi[rev]
581 old = unfi[rev]
581 drevid = drevids[i]
582 drevid = drevids[i]
582 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
583 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
583 newdesc = getdescfromdrev(drev)
584 newdesc = getdescfromdrev(drev)
584 newdesc = encoding.unitolocal(newdesc)
585 newdesc = encoding.unitolocal(newdesc)
585 # Make sure commit message contain "Differential Revision"
586 # Make sure commit message contain "Differential Revision"
586 if old.description() != newdesc:
587 if old.description() != newdesc:
587 if old.phase() == phases.public:
588 if old.phase() == phases.public:
588 ui.warn(_("warning: not updating public commit %s\n")
589 ui.warn(_("warning: not updating public commit %s\n")
589 % scmutil.formatchangeid(old))
590 % scmutil.formatchangeid(old))
590 continue
591 continue
591 parents = [
592 parents = [
592 mapping.get(old.p1().node(), (old.p1(),))[0],
593 mapping.get(old.p1().node(), (old.p1(),))[0],
593 mapping.get(old.p2().node(), (old.p2(),))[0],
594 mapping.get(old.p2().node(), (old.p2(),))[0],
594 ]
595 ]
595 new = context.metadataonlyctx(
596 new = context.metadataonlyctx(
596 repo, old, parents=parents, text=newdesc,
597 repo, old, parents=parents, text=newdesc,
597 user=old.user(), date=old.date(), extra=old.extra())
598 user=old.user(), date=old.date(), extra=old.extra())
598
599
599 newnode = new.commit()
600 newnode = new.commit()
600
601
601 mapping[old.node()] = [newnode]
602 mapping[old.node()] = [newnode]
602 # Update diff property
603 # Update diff property
603 writediffproperties(unfi[newnode], diffmap[old.node()])
604 writediffproperties(unfi[newnode], diffmap[old.node()])
604 # Remove local tags since it's no longer necessary
605 # Remove local tags since it's no longer necessary
605 tagname = b'D%d' % drevid
606 tagname = b'D%d' % drevid
606 if tagname in repo.tags():
607 if tagname in repo.tags():
607 tags.tag(repo, tagname, nullid, message=None, user=None,
608 tags.tag(repo, tagname, nullid, message=None, user=None,
608 date=None, local=True)
609 date=None, local=True)
609 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
610 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
610 if wnode in mapping:
611 if wnode in mapping:
611 unfi.setparents(mapping[wnode][0])
612 unfi.setparents(mapping[wnode][0])
612
613
613 # Map from "hg:meta" keys to header understood by "hg import". The order is
614 # Map from "hg:meta" keys to header understood by "hg import". The order is
614 # consistent with "hg export" output.
615 # consistent with "hg export" output.
615 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
616 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
616 (r'node', b'Node ID'), (r'parent', b'Parent ')])
617 (r'node', b'Node ID'), (r'parent', b'Parent ')])
617
618
618 def _confirmbeforesend(repo, revs, oldmap):
619 def _confirmbeforesend(repo, revs, oldmap):
619 url, token = readurltoken(repo)
620 url, token = readurltoken(repo)
620 ui = repo.ui
621 ui = repo.ui
621 for rev in revs:
622 for rev in revs:
622 ctx = repo[rev]
623 ctx = repo[rev]
623 desc = ctx.description().splitlines()[0]
624 desc = ctx.description().splitlines()[0]
624 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
625 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
625 if drevid:
626 if drevid:
626 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
627 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
627 else:
628 else:
628 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
629 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
629
630
630 ui.write(_(b'%s - %s: %s\n')
631 ui.write(_(b'%s - %s: %s\n')
631 % (drevdesc,
632 % (drevdesc,
632 ui.label(bytes(ctx), b'phabricator.node'),
633 ui.label(bytes(ctx), b'phabricator.node'),
633 ui.label(desc, b'phabricator.desc')))
634 ui.label(desc, b'phabricator.desc')))
634
635
635 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
636 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
636 b'$$ &Yes $$ &No') % url):
637 b'$$ &Yes $$ &No') % url):
637 return False
638 return False
638
639
639 return True
640 return True
640
641
641 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
642 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
642 b'abandoned'}
643 b'abandoned'}
643
644
644 def _getstatusname(drev):
645 def _getstatusname(drev):
645 """get normalized status name from a Differential Revision"""
646 """get normalized status name from a Differential Revision"""
646 return drev[r'statusName'].replace(b' ', b'').lower()
647 return drev[r'statusName'].replace(b' ', b'').lower()
647
648
648 # Small language to specify differential revisions. Support symbols: (), :X,
649 # Small language to specify differential revisions. Support symbols: (), :X,
649 # +, and -.
650 # +, and -.
650
651
651 _elements = {
652 _elements = {
652 # token-type: binding-strength, primary, prefix, infix, suffix
653 # token-type: binding-strength, primary, prefix, infix, suffix
653 b'(': (12, None, (b'group', 1, b')'), None, None),
654 b'(': (12, None, (b'group', 1, b')'), None, None),
654 b':': (8, None, (b'ancestors', 8), None, None),
655 b':': (8, None, (b'ancestors', 8), None, None),
655 b'&': (5, None, None, (b'and_', 5), None),
656 b'&': (5, None, None, (b'and_', 5), None),
656 b'+': (4, None, None, (b'add', 4), None),
657 b'+': (4, None, None, (b'add', 4), None),
657 b'-': (4, None, None, (b'sub', 4), None),
658 b'-': (4, None, None, (b'sub', 4), None),
658 b')': (0, None, None, None, None),
659 b')': (0, None, None, None, None),
659 b'symbol': (0, b'symbol', None, None, None),
660 b'symbol': (0, b'symbol', None, None, None),
660 b'end': (0, None, None, None, None),
661 b'end': (0, None, None, None, None),
661 }
662 }
662
663
663 def _tokenize(text):
664 def _tokenize(text):
664 view = memoryview(text) # zero-copy slice
665 view = memoryview(text) # zero-copy slice
665 special = b'():+-& '
666 special = b'():+-& '
666 pos = 0
667 pos = 0
667 length = len(text)
668 length = len(text)
668 while pos < length:
669 while pos < length:
669 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
670 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
670 view[pos:]))
671 view[pos:]))
671 if symbol:
672 if symbol:
672 yield (b'symbol', symbol, pos)
673 yield (b'symbol', symbol, pos)
673 pos += len(symbol)
674 pos += len(symbol)
674 else: # special char, ignore space
675 else: # special char, ignore space
675 if text[pos] != b' ':
676 if text[pos] != b' ':
676 yield (text[pos], None, pos)
677 yield (text[pos], None, pos)
677 pos += 1
678 pos += 1
678 yield (b'end', None, pos)
679 yield (b'end', None, pos)
679
680
680 def _parse(text):
681 def _parse(text):
681 tree, pos = parser.parser(_elements).parse(_tokenize(text))
682 tree, pos = parser.parser(_elements).parse(_tokenize(text))
682 if pos != len(text):
683 if pos != len(text):
683 raise error.ParseError(b'invalid token', pos)
684 raise error.ParseError(b'invalid token', pos)
684 return tree
685 return tree
685
686
686 def _parsedrev(symbol):
687 def _parsedrev(symbol):
687 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
688 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
688 if symbol.startswith(b'D') and symbol[1:].isdigit():
689 if symbol.startswith(b'D') and symbol[1:].isdigit():
689 return int(symbol[1:])
690 return int(symbol[1:])
690 if symbol.isdigit():
691 if symbol.isdigit():
691 return int(symbol)
692 return int(symbol)
692
693
693 def _prefetchdrevs(tree):
694 def _prefetchdrevs(tree):
694 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
695 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
695 drevs = set()
696 drevs = set()
696 ancestordrevs = set()
697 ancestordrevs = set()
697 op = tree[0]
698 op = tree[0]
698 if op == b'symbol':
699 if op == b'symbol':
699 r = _parsedrev(tree[1])
700 r = _parsedrev(tree[1])
700 if r:
701 if r:
701 drevs.add(r)
702 drevs.add(r)
702 elif op == b'ancestors':
703 elif op == b'ancestors':
703 r, a = _prefetchdrevs(tree[1])
704 r, a = _prefetchdrevs(tree[1])
704 drevs.update(r)
705 drevs.update(r)
705 ancestordrevs.update(r)
706 ancestordrevs.update(r)
706 ancestordrevs.update(a)
707 ancestordrevs.update(a)
707 else:
708 else:
708 for t in tree[1:]:
709 for t in tree[1:]:
709 r, a = _prefetchdrevs(t)
710 r, a = _prefetchdrevs(t)
710 drevs.update(r)
711 drevs.update(r)
711 ancestordrevs.update(a)
712 ancestordrevs.update(a)
712 return drevs, ancestordrevs
713 return drevs, ancestordrevs
713
714
714 def querydrev(repo, spec):
715 def querydrev(repo, spec):
715 """return a list of "Differential Revision" dicts
716 """return a list of "Differential Revision" dicts
716
717
717 spec is a string using a simple query language, see docstring in phabread
718 spec is a string using a simple query language, see docstring in phabread
718 for details.
719 for details.
719
720
720 A "Differential Revision dict" looks like:
721 A "Differential Revision dict" looks like:
721
722
722 {
723 {
723 "id": "2",
724 "id": "2",
724 "phid": "PHID-DREV-672qvysjcczopag46qty",
725 "phid": "PHID-DREV-672qvysjcczopag46qty",
725 "title": "example",
726 "title": "example",
726 "uri": "https://phab.example.com/D2",
727 "uri": "https://phab.example.com/D2",
727 "dateCreated": "1499181406",
728 "dateCreated": "1499181406",
728 "dateModified": "1499182103",
729 "dateModified": "1499182103",
729 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
730 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
730 "status": "0",
731 "status": "0",
731 "statusName": "Needs Review",
732 "statusName": "Needs Review",
732 "properties": [],
733 "properties": [],
733 "branch": null,
734 "branch": null,
734 "summary": "",
735 "summary": "",
735 "testPlan": "",
736 "testPlan": "",
736 "lineCount": "2",
737 "lineCount": "2",
737 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
738 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
738 "diffs": [
739 "diffs": [
739 "3",
740 "3",
740 "4",
741 "4",
741 ],
742 ],
742 "commits": [],
743 "commits": [],
743 "reviewers": [],
744 "reviewers": [],
744 "ccs": [],
745 "ccs": [],
745 "hashes": [],
746 "hashes": [],
746 "auxiliary": {
747 "auxiliary": {
747 "phabricator:projects": [],
748 "phabricator:projects": [],
748 "phabricator:depends-on": [
749 "phabricator:depends-on": [
749 "PHID-DREV-gbapp366kutjebt7agcd"
750 "PHID-DREV-gbapp366kutjebt7agcd"
750 ]
751 ]
751 },
752 },
752 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
753 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
753 "sourcePath": null
754 "sourcePath": null
754 }
755 }
755 """
756 """
756 def fetch(params):
757 def fetch(params):
757 """params -> single drev or None"""
758 """params -> single drev or None"""
758 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
759 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
759 if key in prefetched:
760 if key in prefetched:
760 return prefetched[key]
761 return prefetched[key]
761 drevs = callconduit(repo, b'differential.query', params)
762 drevs = callconduit(repo, b'differential.query', params)
762 # Fill prefetched with the result
763 # Fill prefetched with the result
763 for drev in drevs:
764 for drev in drevs:
764 prefetched[drev[r'phid']] = drev
765 prefetched[drev[r'phid']] = drev
765 prefetched[int(drev[r'id'])] = drev
766 prefetched[int(drev[r'id'])] = drev
766 if key not in prefetched:
767 if key not in prefetched:
767 raise error.Abort(_(b'cannot get Differential Revision %r')
768 raise error.Abort(_(b'cannot get Differential Revision %r')
768 % params)
769 % params)
769 return prefetched[key]
770 return prefetched[key]
770
771
771 def getstack(topdrevids):
772 def getstack(topdrevids):
772 """given a top, get a stack from the bottom, [id] -> [id]"""
773 """given a top, get a stack from the bottom, [id] -> [id]"""
773 visited = set()
774 visited = set()
774 result = []
775 result = []
775 queue = [{r'ids': [i]} for i in topdrevids]
776 queue = [{r'ids': [i]} for i in topdrevids]
776 while queue:
777 while queue:
777 params = queue.pop()
778 params = queue.pop()
778 drev = fetch(params)
779 drev = fetch(params)
779 if drev[r'id'] in visited:
780 if drev[r'id'] in visited:
780 continue
781 continue
781 visited.add(drev[r'id'])
782 visited.add(drev[r'id'])
782 result.append(int(drev[r'id']))
783 result.append(int(drev[r'id']))
783 auxiliary = drev.get(r'auxiliary', {})
784 auxiliary = drev.get(r'auxiliary', {})
784 depends = auxiliary.get(r'phabricator:depends-on', [])
785 depends = auxiliary.get(r'phabricator:depends-on', [])
785 for phid in depends:
786 for phid in depends:
786 queue.append({b'phids': [phid]})
787 queue.append({b'phids': [phid]})
787 result.reverse()
788 result.reverse()
788 return smartset.baseset(result)
789 return smartset.baseset(result)
789
790
790 # Initialize prefetch cache
791 # Initialize prefetch cache
791 prefetched = {} # {id or phid: drev}
792 prefetched = {} # {id or phid: drev}
792
793
793 tree = _parse(spec)
794 tree = _parse(spec)
794 drevs, ancestordrevs = _prefetchdrevs(tree)
795 drevs, ancestordrevs = _prefetchdrevs(tree)
795
796
796 # developer config: phabricator.batchsize
797 # developer config: phabricator.batchsize
797 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
798 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
798
799
799 # Prefetch Differential Revisions in batch
800 # Prefetch Differential Revisions in batch
800 tofetch = set(drevs)
801 tofetch = set(drevs)
801 for r in ancestordrevs:
802 for r in ancestordrevs:
802 tofetch.update(range(max(1, r - batchsize), r + 1))
803 tofetch.update(range(max(1, r - batchsize), r + 1))
803 if drevs:
804 if drevs:
804 fetch({r'ids': list(tofetch)})
805 fetch({r'ids': list(tofetch)})
805 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
806 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
806
807
807 # Walk through the tree, return smartsets
808 # Walk through the tree, return smartsets
808 def walk(tree):
809 def walk(tree):
809 op = tree[0]
810 op = tree[0]
810 if op == b'symbol':
811 if op == b'symbol':
811 drev = _parsedrev(tree[1])
812 drev = _parsedrev(tree[1])
812 if drev:
813 if drev:
813 return smartset.baseset([drev])
814 return smartset.baseset([drev])
814 elif tree[1] in _knownstatusnames:
815 elif tree[1] in _knownstatusnames:
815 drevs = [r for r in validids
816 drevs = [r for r in validids
816 if _getstatusname(prefetched[r]) == tree[1]]
817 if _getstatusname(prefetched[r]) == tree[1]]
817 return smartset.baseset(drevs)
818 return smartset.baseset(drevs)
818 else:
819 else:
819 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
820 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
820 elif op in {b'and_', b'add', b'sub'}:
821 elif op in {b'and_', b'add', b'sub'}:
821 assert len(tree) == 3
822 assert len(tree) == 3
822 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
823 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
823 elif op == b'group':
824 elif op == b'group':
824 return walk(tree[1])
825 return walk(tree[1])
825 elif op == b'ancestors':
826 elif op == b'ancestors':
826 return getstack(walk(tree[1]))
827 return getstack(walk(tree[1]))
827 else:
828 else:
828 raise error.ProgrammingError(b'illegal tree: %r' % tree)
829 raise error.ProgrammingError(b'illegal tree: %r' % tree)
829
830
830 return [prefetched[r] for r in walk(tree)]
831 return [prefetched[r] for r in walk(tree)]
831
832
832 def getdescfromdrev(drev):
833 def getdescfromdrev(drev):
833 """get description (commit message) from "Differential Revision"
834 """get description (commit message) from "Differential Revision"
834
835
835 This is similar to differential.getcommitmessage API. But we only care
836 This is similar to differential.getcommitmessage API. But we only care
836 about limited fields: title, summary, test plan, and URL.
837 about limited fields: title, summary, test plan, and URL.
837 """
838 """
838 title = drev[r'title']
839 title = drev[r'title']
839 summary = drev[r'summary'].rstrip()
840 summary = drev[r'summary'].rstrip()
840 testplan = drev[r'testPlan'].rstrip()
841 testplan = drev[r'testPlan'].rstrip()
841 if testplan:
842 if testplan:
842 testplan = b'Test Plan:\n%s' % testplan
843 testplan = b'Test Plan:\n%s' % testplan
843 uri = b'Differential Revision: %s' % drev[r'uri']
844 uri = b'Differential Revision: %s' % drev[r'uri']
844 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
845 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
845
846
846 def getdiffmeta(diff):
847 def getdiffmeta(diff):
847 """get commit metadata (date, node, user, p1) from a diff object
848 """get commit metadata (date, node, user, p1) from a diff object
848
849
849 The metadata could be "hg:meta", sent by phabsend, like:
850 The metadata could be "hg:meta", sent by phabsend, like:
850
851
851 "properties": {
852 "properties": {
852 "hg:meta": {
853 "hg:meta": {
853 "date": "1499571514 25200",
854 "date": "1499571514 25200",
854 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 "user": "Foo Bar <foo@example.com>",
856 "user": "Foo Bar <foo@example.com>",
856 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
857 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
857 }
858 }
858 }
859 }
859
860
860 Or converted from "local:commits", sent by "arc", like:
861 Or converted from "local:commits", sent by "arc", like:
861
862
862 "properties": {
863 "properties": {
863 "local:commits": {
864 "local:commits": {
864 "98c08acae292b2faf60a279b4189beb6cff1414d": {
865 "98c08acae292b2faf60a279b4189beb6cff1414d": {
865 "author": "Foo Bar",
866 "author": "Foo Bar",
866 "time": 1499546314,
867 "time": 1499546314,
867 "branch": "default",
868 "branch": "default",
868 "tag": "",
869 "tag": "",
869 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
871 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
871 "local": "1000",
872 "local": "1000",
872 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
873 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
873 "summary": "...",
874 "summary": "...",
874 "message": "...",
875 "message": "...",
875 "authorEmail": "foo@example.com"
876 "authorEmail": "foo@example.com"
876 }
877 }
877 }
878 }
878 }
879 }
879
880
880 Note: metadata extracted from "local:commits" will lose time zone
881 Note: metadata extracted from "local:commits" will lose time zone
881 information.
882 information.
882 """
883 """
883 props = diff.get(r'properties') or {}
884 props = diff.get(r'properties') or {}
884 meta = props.get(r'hg:meta')
885 meta = props.get(r'hg:meta')
885 if not meta and props.get(r'local:commits'):
886 if not meta and props.get(r'local:commits'):
886 commit = sorted(props[r'local:commits'].values())[0]
887 commit = sorted(props[r'local:commits'].values())[0]
887 meta = {
888 meta = {
888 r'date': r'%d 0' % commit[r'time'],
889 r'date': r'%d 0' % commit[r'time'],
889 r'node': commit[r'rev'],
890 r'node': commit[r'rev'],
890 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
891 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
891 }
892 }
892 if len(commit.get(r'parents', ())) >= 1:
893 if len(commit.get(r'parents', ())) >= 1:
893 meta[r'parent'] = commit[r'parents'][0]
894 meta[r'parent'] = commit[r'parents'][0]
894 return meta or {}
895 return meta or {}
895
896
896 def readpatch(repo, drevs, write):
897 def readpatch(repo, drevs, write):
897 """generate plain-text patch readable by 'hg import'
898 """generate plain-text patch readable by 'hg import'
898
899
899 write is usually ui.write. drevs is what "querydrev" returns, results of
900 write is usually ui.write. drevs is what "querydrev" returns, results of
900 "differential.query".
901 "differential.query".
901 """
902 """
902 # Prefetch hg:meta property for all diffs
903 # Prefetch hg:meta property for all diffs
903 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
904 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
904 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
905 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
905
906
906 # Generate patch for each drev
907 # Generate patch for each drev
907 for drev in drevs:
908 for drev in drevs:
908 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
909 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
909
910
910 diffid = max(int(v) for v in drev[r'diffs'])
911 diffid = max(int(v) for v in drev[r'diffs'])
911 body = callconduit(repo, b'differential.getrawdiff',
912 body = callconduit(repo, b'differential.getrawdiff',
912 {b'diffID': diffid})
913 {b'diffID': diffid})
913 desc = getdescfromdrev(drev)
914 desc = getdescfromdrev(drev)
914 header = b'# HG changeset patch\n'
915 header = b'# HG changeset patch\n'
915
916
916 # Try to preserve metadata from hg:meta property. Write hg patch
917 # Try to preserve metadata from hg:meta property. Write hg patch
917 # headers that can be read by the "import" command. See patchheadermap
918 # headers that can be read by the "import" command. See patchheadermap
918 # and extract in mercurial/patch.py for supported headers.
919 # and extract in mercurial/patch.py for supported headers.
919 meta = getdiffmeta(diffs[str(diffid)])
920 meta = getdiffmeta(diffs[str(diffid)])
920 for k in _metanamemap.keys():
921 for k in _metanamemap.keys():
921 if k in meta:
922 if k in meta:
922 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
923 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
923
924
924 content = b'%s%s\n%s' % (header, desc, body)
925 content = b'%s%s\n%s' % (header, desc, body)
925 write(encoding.unitolocal(content))
926 write(encoding.unitolocal(content))
926
927
927 @vcrcommand(b'phabread',
928 @vcrcommand(b'phabread',
928 [(b'', b'stack', False, _(b'read dependencies'))],
929 [(b'', b'stack', False, _(b'read dependencies'))],
929 _(b'DREVSPEC [OPTIONS]'),
930 _(b'DREVSPEC [OPTIONS]'),
930 helpcategory=command.CATEGORY_IMPORT_EXPORT)
931 helpcategory=command.CATEGORY_IMPORT_EXPORT)
931 def phabread(ui, repo, spec, **opts):
932 def phabread(ui, repo, spec, **opts):
932 """print patches from Phabricator suitable for importing
933 """print patches from Phabricator suitable for importing
933
934
934 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
935 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
935 the number ``123``. It could also have common operators like ``+``, ``-``,
936 the number ``123``. It could also have common operators like ``+``, ``-``,
936 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
937 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
937 select a stack.
938 select a stack.
938
939
939 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
940 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
940 could be used to filter patches by status. For performance reason, they
941 could be used to filter patches by status. For performance reason, they
941 only represent a subset of non-status selections and cannot be used alone.
942 only represent a subset of non-status selections and cannot be used alone.
942
943
943 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
944 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
944 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
945 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
945 stack up to D9.
946 stack up to D9.
946
947
947 If --stack is given, follow dependencies information and read all patches.
948 If --stack is given, follow dependencies information and read all patches.
948 It is equivalent to the ``:`` operator.
949 It is equivalent to the ``:`` operator.
949 """
950 """
950 if opts.get(b'stack'):
951 if opts.get(b'stack'):
951 spec = b':(%s)' % spec
952 spec = b':(%s)' % spec
952 drevs = querydrev(repo, spec)
953 drevs = querydrev(repo, spec)
953 readpatch(repo, drevs, ui.write)
954 readpatch(repo, drevs, ui.write)
954
955
955 @vcrcommand(b'phabupdate',
956 @vcrcommand(b'phabupdate',
956 [(b'', b'accept', False, _(b'accept revisions')),
957 [(b'', b'accept', False, _(b'accept revisions')),
957 (b'', b'reject', False, _(b'reject revisions')),
958 (b'', b'reject', False, _(b'reject revisions')),
958 (b'', b'abandon', False, _(b'abandon revisions')),
959 (b'', b'abandon', False, _(b'abandon revisions')),
959 (b'', b'reclaim', False, _(b'reclaim revisions')),
960 (b'', b'reclaim', False, _(b'reclaim revisions')),
960 (b'm', b'comment', b'', _(b'comment on the last revision')),
961 (b'm', b'comment', b'', _(b'comment on the last revision')),
961 ], _(b'DREVSPEC [OPTIONS]'),
962 ], _(b'DREVSPEC [OPTIONS]'),
962 helpcategory=command.CATEGORY_IMPORT_EXPORT)
963 helpcategory=command.CATEGORY_IMPORT_EXPORT)
963 def phabupdate(ui, repo, spec, **opts):
964 def phabupdate(ui, repo, spec, **opts):
964 """update Differential Revision in batch
965 """update Differential Revision in batch
965
966
966 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
967 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
967 """
968 """
968 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
969 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
969 if len(flags) > 1:
970 if len(flags) > 1:
970 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
971 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
971
972
972 actions = []
973 actions = []
973 for f in flags:
974 for f in flags:
974 actions.append({b'type': f, b'value': b'true'})
975 actions.append({b'type': f, b'value': b'true'})
975
976
976 drevs = querydrev(repo, spec)
977 drevs = querydrev(repo, spec)
977 for i, drev in enumerate(drevs):
978 for i, drev in enumerate(drevs):
978 if i + 1 == len(drevs) and opts.get(b'comment'):
979 if i + 1 == len(drevs) and opts.get(b'comment'):
979 actions.append({b'type': b'comment', b'value': opts[b'comment']})
980 actions.append({b'type': b'comment', b'value': opts[b'comment']})
980 if actions:
981 if actions:
981 params = {b'objectIdentifier': drev[r'phid'],
982 params = {b'objectIdentifier': drev[r'phid'],
982 b'transactions': actions}
983 b'transactions': actions}
983 callconduit(repo, b'differential.revision.edit', params)
984 callconduit(repo, b'differential.revision.edit', params)
984
985
985 templatekeyword = registrar.templatekeyword()
986 templatekeyword = registrar.templatekeyword()
986
987
987 @templatekeyword(b'phabreview', requires={b'ctx'})
988 @templatekeyword(b'phabreview', requires={b'ctx'})
988 def template_review(context, mapping):
989 def template_review(context, mapping):
989 """:phabreview: Object describing the review for this changeset.
990 """:phabreview: Object describing the review for this changeset.
990 Has attributes `url` and `id`.
991 Has attributes `url` and `id`.
991 """
992 """
992 ctx = context.resource(mapping, b'ctx')
993 ctx = context.resource(mapping, b'ctx')
993 m = _differentialrevisiondescre.search(ctx.description())
994 m = _differentialrevisiondescre.search(ctx.description())
994 if m:
995 if m:
995 return templateutil.hybriddict({
996 return templateutil.hybriddict({
996 b'url': m.group(b'url'),
997 b'url': m.group(b'url'),
997 b'id': b"D{}".format(m.group(b'id')),
998 b'id': b"D{}".format(m.group(b'id')),
998 })
999 })
999 else:
1000 else:
1000 tags = ctx.repo().nodetags(ctx.node())
1001 tags = ctx.repo().nodetags(ctx.node())
1001 for t in tags:
1002 for t in tags:
1002 if _differentialrevisiontagre.match(t):
1003 if _differentialrevisiontagre.match(t):
1003 url = ctx.repo().ui.config(b'phabricator', b'url')
1004 url = ctx.repo().ui.config(b'phabricator', b'url')
1004 if not url.endswith(b'/'):
1005 if not url.endswith(b'/'):
1005 url += b'/'
1006 url += b'/'
1006 url += t
1007 url += t
1007
1008
1008 return templateutil.hybriddict({
1009 return templateutil.hybriddict({
1009 b'url': url,
1010 b'url': url,
1010 b'id': t,
1011 b'id': t,
1011 })
1012 })
1012 return None
1013 return None
General Comments 0
You need to be logged in to leave comments. Login now