##// END OF EJS Templates
py3: convert indexes into bytes when enumerating lists in urlencodenested...
Ian Moody -
r42066:47125193 default
parent child Browse files
Show More
@@ -1,1018 +1,1019 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templateutil,
68 templateutil,
69 url as urlmod,
69 url as urlmod,
70 util,
70 util,
71 )
71 )
72 from mercurial.utils import (
72 from mercurial.utils import (
73 procutil,
73 procutil,
74 stringutil,
74 stringutil,
75 )
75 )
76
76
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # be specifying the version(s) of Mercurial they are tested with, or
79 # be specifying the version(s) of Mercurial they are tested with, or
80 # leave the attribute unspecified.
80 # leave the attribute unspecified.
81 testedwith = 'ships-with-hg-core'
81 testedwith = 'ships-with-hg-core'
82
82
83 cmdtable = {}
83 cmdtable = {}
84 command = registrar.command(cmdtable)
84 command = registrar.command(cmdtable)
85
85
86 configtable = {}
86 configtable = {}
87 configitem = registrar.configitem(configtable)
87 configitem = registrar.configitem(configtable)
88
88
89 # developer config: phabricator.batchsize
89 # developer config: phabricator.batchsize
90 configitem(b'phabricator', b'batchsize',
90 configitem(b'phabricator', b'batchsize',
91 default=12,
91 default=12,
92 )
92 )
93 configitem(b'phabricator', b'callsign',
93 configitem(b'phabricator', b'callsign',
94 default=None,
94 default=None,
95 )
95 )
96 configitem(b'phabricator', b'curlcmd',
96 configitem(b'phabricator', b'curlcmd',
97 default=None,
97 default=None,
98 )
98 )
99 # developer config: phabricator.repophid
99 # developer config: phabricator.repophid
100 configitem(b'phabricator', b'repophid',
100 configitem(b'phabricator', b'repophid',
101 default=None,
101 default=None,
102 )
102 )
103 configitem(b'phabricator', b'url',
103 configitem(b'phabricator', b'url',
104 default=None,
104 default=None,
105 )
105 )
106 configitem(b'phabsend', b'confirm',
106 configitem(b'phabsend', b'confirm',
107 default=False,
107 default=False,
108 )
108 )
109
109
110 colortable = {
110 colortable = {
111 b'phabricator.action.created': b'green',
111 b'phabricator.action.created': b'green',
112 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.updated': b'magenta',
113 b'phabricator.action.updated': b'magenta',
114 b'phabricator.desc': b'',
114 b'phabricator.desc': b'',
115 b'phabricator.drev': b'bold',
115 b'phabricator.drev': b'bold',
116 b'phabricator.node': b'',
116 b'phabricator.node': b'',
117 }
117 }
118
118
119 _VCR_FLAGS = [
119 _VCR_FLAGS = [
120 (b'', b'test-vcr', b'',
120 (b'', b'test-vcr', b'',
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 b', otherwise will mock all http requests using the specified vcr file.'
122 b', otherwise will mock all http requests using the specified vcr file.'
123 b' (ADVANCED)'
123 b' (ADVANCED)'
124 )),
124 )),
125 ]
125 ]
126
126
127 def vcrcommand(name, flags, spec, helpcategory=None):
127 def vcrcommand(name, flags, spec, helpcategory=None):
128 fullflags = flags + _VCR_FLAGS
128 fullflags = flags + _VCR_FLAGS
129 def decorate(fn):
129 def decorate(fn):
130 def inner(*args, **kwargs):
130 def inner(*args, **kwargs):
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 if cassette:
132 if cassette:
133 import hgdemandimport
133 import hgdemandimport
134 with hgdemandimport.deactivated():
134 with hgdemandimport.deactivated():
135 import vcr as vcrmod
135 import vcr as vcrmod
136 import vcr.stubs as stubs
136 import vcr.stubs as stubs
137 vcr = vcrmod.VCR(
137 vcr = vcrmod.VCR(
138 serializer=r'json',
138 serializer=r'json',
139 custom_patches=[
139 custom_patches=[
140 (urlmod, r'httpconnection',
140 (urlmod, r'httpconnection',
141 stubs.VCRHTTPConnection),
141 stubs.VCRHTTPConnection),
142 (urlmod, r'httpsconnection',
142 (urlmod, r'httpsconnection',
143 stubs.VCRHTTPSConnection),
143 stubs.VCRHTTPSConnection),
144 ])
144 ])
145 with vcr.use_cassette(cassette):
145 with vcr.use_cassette(cassette):
146 return fn(*args, **kwargs)
146 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 inner.__name__ = fn.__name__
148 inner.__name__ = fn.__name__
149 inner.__doc__ = fn.__doc__
149 inner.__doc__ = fn.__doc__
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return decorate
151 return decorate
152
152
153 def urlencodenested(params):
153 def urlencodenested(params):
154 """like urlencode, but works with nested parameters.
154 """like urlencode, but works with nested parameters.
155
155
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 """
159 """
160 flatparams = util.sortdict()
160 flatparams = util.sortdict()
161 def process(prefix, obj):
161 def process(prefix, obj):
162 if isinstance(obj, bool):
162 if isinstance(obj, bool):
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
164 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
165 if items is None:
166 if items is None:
166 flatparams[prefix] = obj
167 flatparams[prefix] = obj
167 else:
168 else:
168 for k, v in items(obj):
169 for k, v in items(obj):
169 if prefix:
170 if prefix:
170 process(b'%s[%s]' % (prefix, k), v)
171 process(b'%s[%s]' % (prefix, k), v)
171 else:
172 else:
172 process(k, v)
173 process(k, v)
173 process(b'', params)
174 process(b'', params)
174 return util.urlreq.urlencode(flatparams)
175 return util.urlreq.urlencode(flatparams)
175
176
176 def readurltoken(repo):
177 def readurltoken(repo):
177 """return conduit url, token and make sure they exist
178 """return conduit url, token and make sure they exist
178
179
179 Currently read from [auth] config section. In the future, it might
180 Currently read from [auth] config section. In the future, it might
180 make sense to read from .arcconfig and .arcrc as well.
181 make sense to read from .arcconfig and .arcrc as well.
181 """
182 """
182 url = repo.ui.config(b'phabricator', b'url')
183 url = repo.ui.config(b'phabricator', b'url')
183 if not url:
184 if not url:
184 raise error.Abort(_(b'config %s.%s is required')
185 raise error.Abort(_(b'config %s.%s is required')
185 % (b'phabricator', b'url'))
186 % (b'phabricator', b'url'))
186
187
187 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
188 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
188 token = None
189 token = None
189
190
190 if res:
191 if res:
191 group, auth = res
192 group, auth = res
192
193
193 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
194 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
194
195
195 token = auth.get(b'phabtoken')
196 token = auth.get(b'phabtoken')
196
197
197 if not token:
198 if not token:
198 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
199 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
199 % (url,))
200 % (url,))
200
201
201 return url, token
202 return url, token
202
203
203 def callconduit(repo, name, params):
204 def callconduit(repo, name, params):
204 """call Conduit API, params is a dict. return json.loads result, or None"""
205 """call Conduit API, params is a dict. return json.loads result, or None"""
205 host, token = readurltoken(repo)
206 host, token = readurltoken(repo)
206 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
207 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
207 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
208 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
208 params = params.copy()
209 params = params.copy()
209 params[b'api.token'] = token
210 params[b'api.token'] = token
210 data = urlencodenested(params)
211 data = urlencodenested(params)
211 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
212 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
212 if curlcmd:
213 if curlcmd:
213 sin, sout = procutil.popen2(b'%s -d @- %s'
214 sin, sout = procutil.popen2(b'%s -d @- %s'
214 % (curlcmd, procutil.shellquote(url)))
215 % (curlcmd, procutil.shellquote(url)))
215 sin.write(data)
216 sin.write(data)
216 sin.close()
217 sin.close()
217 body = sout.read()
218 body = sout.read()
218 else:
219 else:
219 urlopener = urlmod.opener(repo.ui, authinfo)
220 urlopener = urlmod.opener(repo.ui, authinfo)
220 request = util.urlreq.request(url, data=data)
221 request = util.urlreq.request(url, data=data)
221 with contextlib.closing(urlopener.open(request)) as rsp:
222 with contextlib.closing(urlopener.open(request)) as rsp:
222 body = rsp.read()
223 body = rsp.read()
223 repo.ui.debug(b'Conduit Response: %s\n' % body)
224 repo.ui.debug(b'Conduit Response: %s\n' % body)
224 parsed = pycompat.rapply(
225 parsed = pycompat.rapply(
225 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
226 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
226 else x,
227 else x,
227 json.loads(body)
228 json.loads(body)
228 )
229 )
229 if parsed.get(b'error_code'):
230 if parsed.get(b'error_code'):
230 msg = (_(b'Conduit Error (%s): %s')
231 msg = (_(b'Conduit Error (%s): %s')
231 % (parsed[b'error_code'], parsed[b'error_info']))
232 % (parsed[b'error_code'], parsed[b'error_info']))
232 raise error.Abort(msg)
233 raise error.Abort(msg)
233 return parsed[b'result']
234 return parsed[b'result']
234
235
235 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
236 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
236 def debugcallconduit(ui, repo, name):
237 def debugcallconduit(ui, repo, name):
237 """call Conduit API
238 """call Conduit API
238
239
239 Call parameters are read from stdin as a JSON blob. Result will be written
240 Call parameters are read from stdin as a JSON blob. Result will be written
240 to stdout as a JSON blob.
241 to stdout as a JSON blob.
241 """
242 """
242 params = json.loads(ui.fin.read())
243 params = json.loads(ui.fin.read())
243 result = callconduit(repo, name, params)
244 result = callconduit(repo, name, params)
244 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
245 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
245 ui.write(b'%s\n' % s)
246 ui.write(b'%s\n' % s)
246
247
247 def getrepophid(repo):
248 def getrepophid(repo):
248 """given callsign, return repository PHID or None"""
249 """given callsign, return repository PHID or None"""
249 # developer config: phabricator.repophid
250 # developer config: phabricator.repophid
250 repophid = repo.ui.config(b'phabricator', b'repophid')
251 repophid = repo.ui.config(b'phabricator', b'repophid')
251 if repophid:
252 if repophid:
252 return repophid
253 return repophid
253 callsign = repo.ui.config(b'phabricator', b'callsign')
254 callsign = repo.ui.config(b'phabricator', b'callsign')
254 if not callsign:
255 if not callsign:
255 return None
256 return None
256 query = callconduit(repo, b'diffusion.repository.search',
257 query = callconduit(repo, b'diffusion.repository.search',
257 {b'constraints': {b'callsigns': [callsign]}})
258 {b'constraints': {b'callsigns': [callsign]}})
258 if len(query[b'data']) == 0:
259 if len(query[b'data']) == 0:
259 return None
260 return None
260 repophid = query[b'data'][0][b'phid']
261 repophid = query[b'data'][0][b'phid']
261 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
262 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
262 return repophid
263 return repophid
263
264
264 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
265 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
265 _differentialrevisiondescre = re.compile(
266 _differentialrevisiondescre = re.compile(
266 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
267 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
267
268
268 def getoldnodedrevmap(repo, nodelist):
269 def getoldnodedrevmap(repo, nodelist):
269 """find previous nodes that has been sent to Phabricator
270 """find previous nodes that has been sent to Phabricator
270
271
271 return {node: (oldnode, Differential diff, Differential Revision ID)}
272 return {node: (oldnode, Differential diff, Differential Revision ID)}
272 for node in nodelist with known previous sent versions, or associated
273 for node in nodelist with known previous sent versions, or associated
273 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
274 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
274 be ``None``.
275 be ``None``.
275
276
276 Examines commit messages like "Differential Revision:" to get the
277 Examines commit messages like "Differential Revision:" to get the
277 association information.
278 association information.
278
279
279 If such commit message line is not found, examines all precursors and their
280 If such commit message line is not found, examines all precursors and their
280 tags. Tags with format like "D1234" are considered a match and the node
281 tags. Tags with format like "D1234" are considered a match and the node
281 with that tag, and the number after "D" (ex. 1234) will be returned.
282 with that tag, and the number after "D" (ex. 1234) will be returned.
282
283
283 The ``old node``, if not None, is guaranteed to be the last diff of
284 The ``old node``, if not None, is guaranteed to be the last diff of
284 corresponding Differential Revision, and exist in the repo.
285 corresponding Differential Revision, and exist in the repo.
285 """
286 """
286 unfi = repo.unfiltered()
287 unfi = repo.unfiltered()
287 nodemap = unfi.changelog.nodemap
288 nodemap = unfi.changelog.nodemap
288
289
289 result = {} # {node: (oldnode?, lastdiff?, drev)}
290 result = {} # {node: (oldnode?, lastdiff?, drev)}
290 toconfirm = {} # {node: (force, {precnode}, drev)}
291 toconfirm = {} # {node: (force, {precnode}, drev)}
291 for node in nodelist:
292 for node in nodelist:
292 ctx = unfi[node]
293 ctx = unfi[node]
293 # For tags like "D123", put them into "toconfirm" to verify later
294 # For tags like "D123", put them into "toconfirm" to verify later
294 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
295 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
295 for n in precnodes:
296 for n in precnodes:
296 if n in nodemap:
297 if n in nodemap:
297 for tag in unfi.nodetags(n):
298 for tag in unfi.nodetags(n):
298 m = _differentialrevisiontagre.match(tag)
299 m = _differentialrevisiontagre.match(tag)
299 if m:
300 if m:
300 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
301 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
301 continue
302 continue
302
303
303 # Check commit message
304 # Check commit message
304 m = _differentialrevisiondescre.search(ctx.description())
305 m = _differentialrevisiondescre.search(ctx.description())
305 if m:
306 if m:
306 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
307 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
307
308
308 # Double check if tags are genuine by collecting all old nodes from
309 # Double check if tags are genuine by collecting all old nodes from
309 # Phabricator, and expect precursors overlap with it.
310 # Phabricator, and expect precursors overlap with it.
310 if toconfirm:
311 if toconfirm:
311 drevs = [drev for force, precs, drev in toconfirm.values()]
312 drevs = [drev for force, precs, drev in toconfirm.values()]
312 alldiffs = callconduit(unfi, b'differential.querydiffs',
313 alldiffs = callconduit(unfi, b'differential.querydiffs',
313 {b'revisionIDs': drevs})
314 {b'revisionIDs': drevs})
314 getnode = lambda d: bin(
315 getnode = lambda d: bin(
315 getdiffmeta(d).get(b'node', b'')) or None
316 getdiffmeta(d).get(b'node', b'')) or None
316 for newnode, (force, precset, drev) in toconfirm.items():
317 for newnode, (force, precset, drev) in toconfirm.items():
317 diffs = [d for d in alldiffs.values()
318 diffs = [d for d in alldiffs.values()
318 if int(d[b'revisionID']) == drev]
319 if int(d[b'revisionID']) == drev]
319
320
320 # "precursors" as known by Phabricator
321 # "precursors" as known by Phabricator
321 phprecset = set(getnode(d) for d in diffs)
322 phprecset = set(getnode(d) for d in diffs)
322
323
323 # Ignore if precursors (Phabricator and local repo) do not overlap,
324 # Ignore if precursors (Phabricator and local repo) do not overlap,
324 # and force is not set (when commit message says nothing)
325 # and force is not set (when commit message says nothing)
325 if not force and not bool(phprecset & precset):
326 if not force and not bool(phprecset & precset):
326 tagname = b'D%d' % drev
327 tagname = b'D%d' % drev
327 tags.tag(repo, tagname, nullid, message=None, user=None,
328 tags.tag(repo, tagname, nullid, message=None, user=None,
328 date=None, local=True)
329 date=None, local=True)
329 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
330 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
330 b'Differential history\n') % drev)
331 b'Differential history\n') % drev)
331 continue
332 continue
332
333
333 # Find the last node using Phabricator metadata, and make sure it
334 # Find the last node using Phabricator metadata, and make sure it
334 # exists in the repo
335 # exists in the repo
335 oldnode = lastdiff = None
336 oldnode = lastdiff = None
336 if diffs:
337 if diffs:
337 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
338 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
338 oldnode = getnode(lastdiff)
339 oldnode = getnode(lastdiff)
339 if oldnode and oldnode not in nodemap:
340 if oldnode and oldnode not in nodemap:
340 oldnode = None
341 oldnode = None
341
342
342 result[newnode] = (oldnode, lastdiff, drev)
343 result[newnode] = (oldnode, lastdiff, drev)
343
344
344 return result
345 return result
345
346
346 def getdiff(ctx, diffopts):
347 def getdiff(ctx, diffopts):
347 """plain-text diff without header (user, commit message, etc)"""
348 """plain-text diff without header (user, commit message, etc)"""
348 output = util.stringio()
349 output = util.stringio()
349 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
350 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
350 None, opts=diffopts):
351 None, opts=diffopts):
351 output.write(chunk)
352 output.write(chunk)
352 return output.getvalue()
353 return output.getvalue()
353
354
354 def creatediff(ctx):
355 def creatediff(ctx):
355 """create a Differential Diff"""
356 """create a Differential Diff"""
356 repo = ctx.repo()
357 repo = ctx.repo()
357 repophid = getrepophid(repo)
358 repophid = getrepophid(repo)
358 # Create a "Differential Diff" via "differential.createrawdiff" API
359 # Create a "Differential Diff" via "differential.createrawdiff" API
359 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
360 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
360 if repophid:
361 if repophid:
361 params[b'repositoryPHID'] = repophid
362 params[b'repositoryPHID'] = repophid
362 diff = callconduit(repo, b'differential.createrawdiff', params)
363 diff = callconduit(repo, b'differential.createrawdiff', params)
363 if not diff:
364 if not diff:
364 raise error.Abort(_(b'cannot create diff for %s') % ctx)
365 raise error.Abort(_(b'cannot create diff for %s') % ctx)
365 return diff
366 return diff
366
367
367 def writediffproperties(ctx, diff):
368 def writediffproperties(ctx, diff):
368 """write metadata to diff so patches could be applied losslessly"""
369 """write metadata to diff so patches could be applied losslessly"""
369 params = {
370 params = {
370 b'diff_id': diff[b'id'],
371 b'diff_id': diff[b'id'],
371 b'name': b'hg:meta',
372 b'name': b'hg:meta',
372 b'data': json.dumps({
373 b'data': json.dumps({
373 b'user': ctx.user(),
374 b'user': ctx.user(),
374 b'date': b'%d %d' % ctx.date(),
375 b'date': b'%d %d' % ctx.date(),
375 b'node': ctx.hex(),
376 b'node': ctx.hex(),
376 b'parent': ctx.p1().hex(),
377 b'parent': ctx.p1().hex(),
377 }),
378 }),
378 }
379 }
379 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
380 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
380
381
381 params = {
382 params = {
382 b'diff_id': diff[b'id'],
383 b'diff_id': diff[b'id'],
383 b'name': b'local:commits',
384 b'name': b'local:commits',
384 b'data': json.dumps({
385 b'data': json.dumps({
385 ctx.hex(): {
386 ctx.hex(): {
386 b'author': stringutil.person(ctx.user()),
387 b'author': stringutil.person(ctx.user()),
387 b'authorEmail': stringutil.email(ctx.user()),
388 b'authorEmail': stringutil.email(ctx.user()),
388 b'time': ctx.date()[0],
389 b'time': ctx.date()[0],
389 },
390 },
390 }),
391 }),
391 }
392 }
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393
394
394 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
395 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
395 olddiff=None, actions=None):
396 olddiff=None, actions=None):
396 """create or update a Differential Revision
397 """create or update a Differential Revision
397
398
398 If revid is None, create a new Differential Revision, otherwise update
399 If revid is None, create a new Differential Revision, otherwise update
399 revid. If parentrevid is not None, set it as a dependency.
400 revid. If parentrevid is not None, set it as a dependency.
400
401
401 If oldnode is not None, check if the patch content (without commit message
402 If oldnode is not None, check if the patch content (without commit message
402 and metadata) has changed before creating another diff.
403 and metadata) has changed before creating another diff.
403
404
404 If actions is not None, they will be appended to the transaction.
405 If actions is not None, they will be appended to the transaction.
405 """
406 """
406 repo = ctx.repo()
407 repo = ctx.repo()
407 if oldnode:
408 if oldnode:
408 diffopts = mdiff.diffopts(git=True, context=32767)
409 diffopts = mdiff.diffopts(git=True, context=32767)
409 oldctx = repo.unfiltered()[oldnode]
410 oldctx = repo.unfiltered()[oldnode]
410 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
411 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
411 else:
412 else:
412 neednewdiff = True
413 neednewdiff = True
413
414
414 transactions = []
415 transactions = []
415 if neednewdiff:
416 if neednewdiff:
416 diff = creatediff(ctx)
417 diff = creatediff(ctx)
417 transactions.append({b'type': b'update', b'value': diff[b'phid']})
418 transactions.append({b'type': b'update', b'value': diff[b'phid']})
418 else:
419 else:
419 # Even if we don't need to upload a new diff because the patch content
420 # Even if we don't need to upload a new diff because the patch content
420 # does not change. We might still need to update its metadata so
421 # does not change. We might still need to update its metadata so
421 # pushers could know the correct node metadata.
422 # pushers could know the correct node metadata.
422 assert olddiff
423 assert olddiff
423 diff = olddiff
424 diff = olddiff
424 writediffproperties(ctx, diff)
425 writediffproperties(ctx, diff)
425
426
426 # Use a temporary summary to set dependency. There might be better ways but
427 # Use a temporary summary to set dependency. There might be better ways but
427 # I cannot find them for now. But do not do that if we are updating an
428 # I cannot find them for now. But do not do that if we are updating an
428 # existing revision (revid is not None) since that introduces visible
429 # existing revision (revid is not None) since that introduces visible
429 # churns (someone edited "Summary" twice) on the web page.
430 # churns (someone edited "Summary" twice) on the web page.
430 if parentrevid and revid is None:
431 if parentrevid and revid is None:
431 summary = b'Depends on D%s' % parentrevid
432 summary = b'Depends on D%s' % parentrevid
432 transactions += [{b'type': b'summary', b'value': summary},
433 transactions += [{b'type': b'summary', b'value': summary},
433 {b'type': b'summary', b'value': b' '}]
434 {b'type': b'summary', b'value': b' '}]
434
435
435 if actions:
436 if actions:
436 transactions += actions
437 transactions += actions
437
438
438 # Parse commit message and update related fields.
439 # Parse commit message and update related fields.
439 desc = ctx.description()
440 desc = ctx.description()
440 info = callconduit(repo, b'differential.parsecommitmessage',
441 info = callconduit(repo, b'differential.parsecommitmessage',
441 {b'corpus': desc})
442 {b'corpus': desc})
442 for k, v in info[b'fields'].items():
443 for k, v in info[b'fields'].items():
443 if k in [b'title', b'summary', b'testPlan']:
444 if k in [b'title', b'summary', b'testPlan']:
444 transactions.append({b'type': k, b'value': v})
445 transactions.append({b'type': k, b'value': v})
445
446
446 params = {b'transactions': transactions}
447 params = {b'transactions': transactions}
447 if revid is not None:
448 if revid is not None:
448 # Update an existing Differential Revision
449 # Update an existing Differential Revision
449 params[b'objectIdentifier'] = revid
450 params[b'objectIdentifier'] = revid
450
451
451 revision = callconduit(repo, b'differential.revision.edit', params)
452 revision = callconduit(repo, b'differential.revision.edit', params)
452 if not revision:
453 if not revision:
453 raise error.Abort(_(b'cannot create revision for %s') % ctx)
454 raise error.Abort(_(b'cannot create revision for %s') % ctx)
454
455
455 return revision, diff
456 return revision, diff
456
457
457 def userphids(repo, names):
458 def userphids(repo, names):
458 """convert user names to PHIDs"""
459 """convert user names to PHIDs"""
459 names = [name.lower() for name in names]
460 names = [name.lower() for name in names]
460 query = {b'constraints': {b'usernames': names}}
461 query = {b'constraints': {b'usernames': names}}
461 result = callconduit(repo, b'user.search', query)
462 result = callconduit(repo, b'user.search', query)
462 # username not found is not an error of the API. So check if we have missed
463 # username not found is not an error of the API. So check if we have missed
463 # some names here.
464 # some names here.
464 data = result[b'data']
465 data = result[b'data']
465 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
466 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
466 unresolved = set(names) - resolved
467 unresolved = set(names) - resolved
467 if unresolved:
468 if unresolved:
468 raise error.Abort(_(b'unknown username: %s')
469 raise error.Abort(_(b'unknown username: %s')
469 % b' '.join(sorted(unresolved)))
470 % b' '.join(sorted(unresolved)))
470 return [entry[b'phid'] for entry in data]
471 return [entry[b'phid'] for entry in data]
471
472
472 @vcrcommand(b'phabsend',
473 @vcrcommand(b'phabsend',
473 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
474 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
474 (b'', b'amend', True, _(b'update commit messages')),
475 (b'', b'amend', True, _(b'update commit messages')),
475 (b'', b'reviewer', [], _(b'specify reviewers')),
476 (b'', b'reviewer', [], _(b'specify reviewers')),
476 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
477 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
477 _(b'REV [OPTIONS]'),
478 _(b'REV [OPTIONS]'),
478 helpcategory=command.CATEGORY_IMPORT_EXPORT)
479 helpcategory=command.CATEGORY_IMPORT_EXPORT)
479 def phabsend(ui, repo, *revs, **opts):
480 def phabsend(ui, repo, *revs, **opts):
480 """upload changesets to Phabricator
481 """upload changesets to Phabricator
481
482
482 If there are multiple revisions specified, they will be send as a stack
483 If there are multiple revisions specified, they will be send as a stack
483 with a linear dependencies relationship using the order specified by the
484 with a linear dependencies relationship using the order specified by the
484 revset.
485 revset.
485
486
486 For the first time uploading changesets, local tags will be created to
487 For the first time uploading changesets, local tags will be created to
487 maintain the association. After the first time, phabsend will check
488 maintain the association. After the first time, phabsend will check
488 obsstore and tags information so it can figure out whether to update an
489 obsstore and tags information so it can figure out whether to update an
489 existing Differential Revision, or create a new one.
490 existing Differential Revision, or create a new one.
490
491
491 If --amend is set, update commit messages so they have the
492 If --amend is set, update commit messages so they have the
492 ``Differential Revision`` URL, remove related tags. This is similar to what
493 ``Differential Revision`` URL, remove related tags. This is similar to what
493 arcanist will do, and is more desired in author-push workflows. Otherwise,
494 arcanist will do, and is more desired in author-push workflows. Otherwise,
494 use local tags to record the ``Differential Revision`` association.
495 use local tags to record the ``Differential Revision`` association.
495
496
496 The --confirm option lets you confirm changesets before sending them. You
497 The --confirm option lets you confirm changesets before sending them. You
497 can also add following to your configuration file to make it default
498 can also add following to your configuration file to make it default
498 behaviour::
499 behaviour::
499
500
500 [phabsend]
501 [phabsend]
501 confirm = true
502 confirm = true
502
503
503 phabsend will check obsstore and the above association to decide whether to
504 phabsend will check obsstore and the above association to decide whether to
504 update an existing Differential Revision, or create a new one.
505 update an existing Differential Revision, or create a new one.
505 """
506 """
506 revs = list(revs) + opts.get(b'rev', [])
507 revs = list(revs) + opts.get(b'rev', [])
507 revs = scmutil.revrange(repo, revs)
508 revs = scmutil.revrange(repo, revs)
508
509
509 if not revs:
510 if not revs:
510 raise error.Abort(_(b'phabsend requires at least one changeset'))
511 raise error.Abort(_(b'phabsend requires at least one changeset'))
511 if opts.get(b'amend'):
512 if opts.get(b'amend'):
512 cmdutil.checkunfinished(repo)
513 cmdutil.checkunfinished(repo)
513
514
514 # {newnode: (oldnode, olddiff, olddrev}
515 # {newnode: (oldnode, olddiff, olddrev}
515 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
516 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
516
517
517 confirm = ui.configbool(b'phabsend', b'confirm')
518 confirm = ui.configbool(b'phabsend', b'confirm')
518 confirm |= bool(opts.get(b'confirm'))
519 confirm |= bool(opts.get(b'confirm'))
519 if confirm:
520 if confirm:
520 confirmed = _confirmbeforesend(repo, revs, oldmap)
521 confirmed = _confirmbeforesend(repo, revs, oldmap)
521 if not confirmed:
522 if not confirmed:
522 raise error.Abort(_(b'phabsend cancelled'))
523 raise error.Abort(_(b'phabsend cancelled'))
523
524
524 actions = []
525 actions = []
525 reviewers = opts.get(b'reviewer', [])
526 reviewers = opts.get(b'reviewer', [])
526 if reviewers:
527 if reviewers:
527 phids = userphids(repo, reviewers)
528 phids = userphids(repo, reviewers)
528 actions.append({b'type': b'reviewers.add', b'value': phids})
529 actions.append({b'type': b'reviewers.add', b'value': phids})
529
530
530 drevids = [] # [int]
531 drevids = [] # [int]
531 diffmap = {} # {newnode: diff}
532 diffmap = {} # {newnode: diff}
532
533
533 # Send patches one by one so we know their Differential Revision IDs and
534 # Send patches one by one so we know their Differential Revision IDs and
534 # can provide dependency relationship
535 # can provide dependency relationship
535 lastrevid = None
536 lastrevid = None
536 for rev in revs:
537 for rev in revs:
537 ui.debug(b'sending rev %d\n' % rev)
538 ui.debug(b'sending rev %d\n' % rev)
538 ctx = repo[rev]
539 ctx = repo[rev]
539
540
540 # Get Differential Revision ID
541 # Get Differential Revision ID
541 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
542 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
542 if oldnode != ctx.node() or opts.get(b'amend'):
543 if oldnode != ctx.node() or opts.get(b'amend'):
543 # Create or update Differential Revision
544 # Create or update Differential Revision
544 revision, diff = createdifferentialrevision(
545 revision, diff = createdifferentialrevision(
545 ctx, revid, lastrevid, oldnode, olddiff, actions)
546 ctx, revid, lastrevid, oldnode, olddiff, actions)
546 diffmap[ctx.node()] = diff
547 diffmap[ctx.node()] = diff
547 newrevid = int(revision[b'object'][b'id'])
548 newrevid = int(revision[b'object'][b'id'])
548 if revid:
549 if revid:
549 action = b'updated'
550 action = b'updated'
550 else:
551 else:
551 action = b'created'
552 action = b'created'
552
553
553 # Create a local tag to note the association, if commit message
554 # Create a local tag to note the association, if commit message
554 # does not have it already
555 # does not have it already
555 m = _differentialrevisiondescre.search(ctx.description())
556 m = _differentialrevisiondescre.search(ctx.description())
556 if not m or int(m.group(b'id')) != newrevid:
557 if not m or int(m.group(b'id')) != newrevid:
557 tagname = b'D%d' % newrevid
558 tagname = b'D%d' % newrevid
558 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
559 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
559 date=None, local=True)
560 date=None, local=True)
560 else:
561 else:
561 # Nothing changed. But still set "newrevid" so the next revision
562 # Nothing changed. But still set "newrevid" so the next revision
562 # could depend on this one.
563 # could depend on this one.
563 newrevid = revid
564 newrevid = revid
564 action = b'skipped'
565 action = b'skipped'
565
566
566 actiondesc = ui.label(
567 actiondesc = ui.label(
567 {b'created': _(b'created'),
568 {b'created': _(b'created'),
568 b'skipped': _(b'skipped'),
569 b'skipped': _(b'skipped'),
569 b'updated': _(b'updated')}[action],
570 b'updated': _(b'updated')}[action],
570 b'phabricator.action.%s' % action)
571 b'phabricator.action.%s' % action)
571 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
572 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
572 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
573 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
573 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
574 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
574 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
575 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
575 desc))
576 desc))
576 drevids.append(newrevid)
577 drevids.append(newrevid)
577 lastrevid = newrevid
578 lastrevid = newrevid
578
579
579 # Update commit messages and remove tags
580 # Update commit messages and remove tags
580 if opts.get(b'amend'):
581 if opts.get(b'amend'):
581 unfi = repo.unfiltered()
582 unfi = repo.unfiltered()
582 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
583 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
583 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
584 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
584 wnode = unfi[b'.'].node()
585 wnode = unfi[b'.'].node()
585 mapping = {} # {oldnode: [newnode]}
586 mapping = {} # {oldnode: [newnode]}
586 for i, rev in enumerate(revs):
587 for i, rev in enumerate(revs):
587 old = unfi[rev]
588 old = unfi[rev]
588 drevid = drevids[i]
589 drevid = drevids[i]
589 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
590 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
590 newdesc = getdescfromdrev(drev)
591 newdesc = getdescfromdrev(drev)
591 # Make sure commit message contain "Differential Revision"
592 # Make sure commit message contain "Differential Revision"
592 if old.description() != newdesc:
593 if old.description() != newdesc:
593 if old.phase() == phases.public:
594 if old.phase() == phases.public:
594 ui.warn(_("warning: not updating public commit %s\n")
595 ui.warn(_("warning: not updating public commit %s\n")
595 % scmutil.formatchangeid(old))
596 % scmutil.formatchangeid(old))
596 continue
597 continue
597 parents = [
598 parents = [
598 mapping.get(old.p1().node(), (old.p1(),))[0],
599 mapping.get(old.p1().node(), (old.p1(),))[0],
599 mapping.get(old.p2().node(), (old.p2(),))[0],
600 mapping.get(old.p2().node(), (old.p2(),))[0],
600 ]
601 ]
601 new = context.metadataonlyctx(
602 new = context.metadataonlyctx(
602 repo, old, parents=parents, text=newdesc,
603 repo, old, parents=parents, text=newdesc,
603 user=old.user(), date=old.date(), extra=old.extra())
604 user=old.user(), date=old.date(), extra=old.extra())
604
605
605 newnode = new.commit()
606 newnode = new.commit()
606
607
607 mapping[old.node()] = [newnode]
608 mapping[old.node()] = [newnode]
608 # Update diff property
609 # Update diff property
609 writediffproperties(unfi[newnode], diffmap[old.node()])
610 writediffproperties(unfi[newnode], diffmap[old.node()])
610 # Remove local tags since it's no longer necessary
611 # Remove local tags since it's no longer necessary
611 tagname = b'D%d' % drevid
612 tagname = b'D%d' % drevid
612 if tagname in repo.tags():
613 if tagname in repo.tags():
613 tags.tag(repo, tagname, nullid, message=None, user=None,
614 tags.tag(repo, tagname, nullid, message=None, user=None,
614 date=None, local=True)
615 date=None, local=True)
615 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
616 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
616 if wnode in mapping:
617 if wnode in mapping:
617 unfi.setparents(mapping[wnode][0])
618 unfi.setparents(mapping[wnode][0])
618
619
619 # Map from "hg:meta" keys to header understood by "hg import". The order is
620 # Map from "hg:meta" keys to header understood by "hg import". The order is
620 # consistent with "hg export" output.
621 # consistent with "hg export" output.
621 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
622 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
622 (b'node', b'Node ID'), (b'parent', b'Parent ')])
623 (b'node', b'Node ID'), (b'parent', b'Parent ')])
623
624
624 def _confirmbeforesend(repo, revs, oldmap):
625 def _confirmbeforesend(repo, revs, oldmap):
625 url, token = readurltoken(repo)
626 url, token = readurltoken(repo)
626 ui = repo.ui
627 ui = repo.ui
627 for rev in revs:
628 for rev in revs:
628 ctx = repo[rev]
629 ctx = repo[rev]
629 desc = ctx.description().splitlines()[0]
630 desc = ctx.description().splitlines()[0]
630 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
631 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
631 if drevid:
632 if drevid:
632 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
633 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
633 else:
634 else:
634 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
635 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
635
636
636 ui.write(_(b'%s - %s: %s\n')
637 ui.write(_(b'%s - %s: %s\n')
637 % (drevdesc,
638 % (drevdesc,
638 ui.label(bytes(ctx), b'phabricator.node'),
639 ui.label(bytes(ctx), b'phabricator.node'),
639 ui.label(desc, b'phabricator.desc')))
640 ui.label(desc, b'phabricator.desc')))
640
641
641 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
642 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
642 b'$$ &Yes $$ &No') % url):
643 b'$$ &Yes $$ &No') % url):
643 return False
644 return False
644
645
645 return True
646 return True
646
647
647 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
648 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
648 b'abandoned'}
649 b'abandoned'}
649
650
650 def _getstatusname(drev):
651 def _getstatusname(drev):
651 """get normalized status name from a Differential Revision"""
652 """get normalized status name from a Differential Revision"""
652 return drev[b'statusName'].replace(b' ', b'').lower()
653 return drev[b'statusName'].replace(b' ', b'').lower()
653
654
654 # Small language to specify differential revisions. Support symbols: (), :X,
655 # Small language to specify differential revisions. Support symbols: (), :X,
655 # +, and -.
656 # +, and -.
656
657
657 _elements = {
658 _elements = {
658 # token-type: binding-strength, primary, prefix, infix, suffix
659 # token-type: binding-strength, primary, prefix, infix, suffix
659 b'(': (12, None, (b'group', 1, b')'), None, None),
660 b'(': (12, None, (b'group', 1, b')'), None, None),
660 b':': (8, None, (b'ancestors', 8), None, None),
661 b':': (8, None, (b'ancestors', 8), None, None),
661 b'&': (5, None, None, (b'and_', 5), None),
662 b'&': (5, None, None, (b'and_', 5), None),
662 b'+': (4, None, None, (b'add', 4), None),
663 b'+': (4, None, None, (b'add', 4), None),
663 b'-': (4, None, None, (b'sub', 4), None),
664 b'-': (4, None, None, (b'sub', 4), None),
664 b')': (0, None, None, None, None),
665 b')': (0, None, None, None, None),
665 b'symbol': (0, b'symbol', None, None, None),
666 b'symbol': (0, b'symbol', None, None, None),
666 b'end': (0, None, None, None, None),
667 b'end': (0, None, None, None, None),
667 }
668 }
668
669
669 def _tokenize(text):
670 def _tokenize(text):
670 view = memoryview(text) # zero-copy slice
671 view = memoryview(text) # zero-copy slice
671 special = b'():+-& '
672 special = b'():+-& '
672 pos = 0
673 pos = 0
673 length = len(text)
674 length = len(text)
674 while pos < length:
675 while pos < length:
675 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
676 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
676 view[pos:]))
677 view[pos:]))
677 if symbol:
678 if symbol:
678 yield (b'symbol', symbol, pos)
679 yield (b'symbol', symbol, pos)
679 pos += len(symbol)
680 pos += len(symbol)
680 else: # special char, ignore space
681 else: # special char, ignore space
681 if text[pos] != b' ':
682 if text[pos] != b' ':
682 yield (text[pos], None, pos)
683 yield (text[pos], None, pos)
683 pos += 1
684 pos += 1
684 yield (b'end', None, pos)
685 yield (b'end', None, pos)
685
686
686 def _parse(text):
687 def _parse(text):
687 tree, pos = parser.parser(_elements).parse(_tokenize(text))
688 tree, pos = parser.parser(_elements).parse(_tokenize(text))
688 if pos != len(text):
689 if pos != len(text):
689 raise error.ParseError(b'invalid token', pos)
690 raise error.ParseError(b'invalid token', pos)
690 return tree
691 return tree
691
692
692 def _parsedrev(symbol):
693 def _parsedrev(symbol):
693 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
694 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
694 if symbol.startswith(b'D') and symbol[1:].isdigit():
695 if symbol.startswith(b'D') and symbol[1:].isdigit():
695 return int(symbol[1:])
696 return int(symbol[1:])
696 if symbol.isdigit():
697 if symbol.isdigit():
697 return int(symbol)
698 return int(symbol)
698
699
699 def _prefetchdrevs(tree):
700 def _prefetchdrevs(tree):
700 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
701 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
701 drevs = set()
702 drevs = set()
702 ancestordrevs = set()
703 ancestordrevs = set()
703 op = tree[0]
704 op = tree[0]
704 if op == b'symbol':
705 if op == b'symbol':
705 r = _parsedrev(tree[1])
706 r = _parsedrev(tree[1])
706 if r:
707 if r:
707 drevs.add(r)
708 drevs.add(r)
708 elif op == b'ancestors':
709 elif op == b'ancestors':
709 r, a = _prefetchdrevs(tree[1])
710 r, a = _prefetchdrevs(tree[1])
710 drevs.update(r)
711 drevs.update(r)
711 ancestordrevs.update(r)
712 ancestordrevs.update(r)
712 ancestordrevs.update(a)
713 ancestordrevs.update(a)
713 else:
714 else:
714 for t in tree[1:]:
715 for t in tree[1:]:
715 r, a = _prefetchdrevs(t)
716 r, a = _prefetchdrevs(t)
716 drevs.update(r)
717 drevs.update(r)
717 ancestordrevs.update(a)
718 ancestordrevs.update(a)
718 return drevs, ancestordrevs
719 return drevs, ancestordrevs
719
720
720 def querydrev(repo, spec):
721 def querydrev(repo, spec):
721 """return a list of "Differential Revision" dicts
722 """return a list of "Differential Revision" dicts
722
723
723 spec is a string using a simple query language, see docstring in phabread
724 spec is a string using a simple query language, see docstring in phabread
724 for details.
725 for details.
725
726
726 A "Differential Revision dict" looks like:
727 A "Differential Revision dict" looks like:
727
728
728 {
729 {
729 "id": "2",
730 "id": "2",
730 "phid": "PHID-DREV-672qvysjcczopag46qty",
731 "phid": "PHID-DREV-672qvysjcczopag46qty",
731 "title": "example",
732 "title": "example",
732 "uri": "https://phab.example.com/D2",
733 "uri": "https://phab.example.com/D2",
733 "dateCreated": "1499181406",
734 "dateCreated": "1499181406",
734 "dateModified": "1499182103",
735 "dateModified": "1499182103",
735 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
736 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
736 "status": "0",
737 "status": "0",
737 "statusName": "Needs Review",
738 "statusName": "Needs Review",
738 "properties": [],
739 "properties": [],
739 "branch": null,
740 "branch": null,
740 "summary": "",
741 "summary": "",
741 "testPlan": "",
742 "testPlan": "",
742 "lineCount": "2",
743 "lineCount": "2",
743 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
744 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
744 "diffs": [
745 "diffs": [
745 "3",
746 "3",
746 "4",
747 "4",
747 ],
748 ],
748 "commits": [],
749 "commits": [],
749 "reviewers": [],
750 "reviewers": [],
750 "ccs": [],
751 "ccs": [],
751 "hashes": [],
752 "hashes": [],
752 "auxiliary": {
753 "auxiliary": {
753 "phabricator:projects": [],
754 "phabricator:projects": [],
754 "phabricator:depends-on": [
755 "phabricator:depends-on": [
755 "PHID-DREV-gbapp366kutjebt7agcd"
756 "PHID-DREV-gbapp366kutjebt7agcd"
756 ]
757 ]
757 },
758 },
758 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
759 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
759 "sourcePath": null
760 "sourcePath": null
760 }
761 }
761 """
762 """
762 def fetch(params):
763 def fetch(params):
763 """params -> single drev or None"""
764 """params -> single drev or None"""
764 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
765 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
765 if key in prefetched:
766 if key in prefetched:
766 return prefetched[key]
767 return prefetched[key]
767 drevs = callconduit(repo, b'differential.query', params)
768 drevs = callconduit(repo, b'differential.query', params)
768 # Fill prefetched with the result
769 # Fill prefetched with the result
769 for drev in drevs:
770 for drev in drevs:
770 prefetched[drev[b'phid']] = drev
771 prefetched[drev[b'phid']] = drev
771 prefetched[int(drev[b'id'])] = drev
772 prefetched[int(drev[b'id'])] = drev
772 if key not in prefetched:
773 if key not in prefetched:
773 raise error.Abort(_(b'cannot get Differential Revision %r')
774 raise error.Abort(_(b'cannot get Differential Revision %r')
774 % params)
775 % params)
775 return prefetched[key]
776 return prefetched[key]
776
777
777 def getstack(topdrevids):
778 def getstack(topdrevids):
778 """given a top, get a stack from the bottom, [id] -> [id]"""
779 """given a top, get a stack from the bottom, [id] -> [id]"""
779 visited = set()
780 visited = set()
780 result = []
781 result = []
781 queue = [{r'ids': [i]} for i in topdrevids]
782 queue = [{r'ids': [i]} for i in topdrevids]
782 while queue:
783 while queue:
783 params = queue.pop()
784 params = queue.pop()
784 drev = fetch(params)
785 drev = fetch(params)
785 if drev[b'id'] in visited:
786 if drev[b'id'] in visited:
786 continue
787 continue
787 visited.add(drev[b'id'])
788 visited.add(drev[b'id'])
788 result.append(int(drev[b'id']))
789 result.append(int(drev[b'id']))
789 auxiliary = drev.get(b'auxiliary', {})
790 auxiliary = drev.get(b'auxiliary', {})
790 depends = auxiliary.get(b'phabricator:depends-on', [])
791 depends = auxiliary.get(b'phabricator:depends-on', [])
791 for phid in depends:
792 for phid in depends:
792 queue.append({b'phids': [phid]})
793 queue.append({b'phids': [phid]})
793 result.reverse()
794 result.reverse()
794 return smartset.baseset(result)
795 return smartset.baseset(result)
795
796
796 # Initialize prefetch cache
797 # Initialize prefetch cache
797 prefetched = {} # {id or phid: drev}
798 prefetched = {} # {id or phid: drev}
798
799
799 tree = _parse(spec)
800 tree = _parse(spec)
800 drevs, ancestordrevs = _prefetchdrevs(tree)
801 drevs, ancestordrevs = _prefetchdrevs(tree)
801
802
802 # developer config: phabricator.batchsize
803 # developer config: phabricator.batchsize
803 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
804 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
804
805
805 # Prefetch Differential Revisions in batch
806 # Prefetch Differential Revisions in batch
806 tofetch = set(drevs)
807 tofetch = set(drevs)
807 for r in ancestordrevs:
808 for r in ancestordrevs:
808 tofetch.update(range(max(1, r - batchsize), r + 1))
809 tofetch.update(range(max(1, r - batchsize), r + 1))
809 if drevs:
810 if drevs:
810 fetch({b'ids': list(tofetch)})
811 fetch({b'ids': list(tofetch)})
811 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
812 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
812
813
813 # Walk through the tree, return smartsets
814 # Walk through the tree, return smartsets
814 def walk(tree):
815 def walk(tree):
815 op = tree[0]
816 op = tree[0]
816 if op == b'symbol':
817 if op == b'symbol':
817 drev = _parsedrev(tree[1])
818 drev = _parsedrev(tree[1])
818 if drev:
819 if drev:
819 return smartset.baseset([drev])
820 return smartset.baseset([drev])
820 elif tree[1] in _knownstatusnames:
821 elif tree[1] in _knownstatusnames:
821 drevs = [r for r in validids
822 drevs = [r for r in validids
822 if _getstatusname(prefetched[r]) == tree[1]]
823 if _getstatusname(prefetched[r]) == tree[1]]
823 return smartset.baseset(drevs)
824 return smartset.baseset(drevs)
824 else:
825 else:
825 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
826 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
826 elif op in {b'and_', b'add', b'sub'}:
827 elif op in {b'and_', b'add', b'sub'}:
827 assert len(tree) == 3
828 assert len(tree) == 3
828 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
829 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
829 elif op == b'group':
830 elif op == b'group':
830 return walk(tree[1])
831 return walk(tree[1])
831 elif op == b'ancestors':
832 elif op == b'ancestors':
832 return getstack(walk(tree[1]))
833 return getstack(walk(tree[1]))
833 else:
834 else:
834 raise error.ProgrammingError(b'illegal tree: %r' % tree)
835 raise error.ProgrammingError(b'illegal tree: %r' % tree)
835
836
836 return [prefetched[r] for r in walk(tree)]
837 return [prefetched[r] for r in walk(tree)]
837
838
838 def getdescfromdrev(drev):
839 def getdescfromdrev(drev):
839 """get description (commit message) from "Differential Revision"
840 """get description (commit message) from "Differential Revision"
840
841
841 This is similar to differential.getcommitmessage API. But we only care
842 This is similar to differential.getcommitmessage API. But we only care
842 about limited fields: title, summary, test plan, and URL.
843 about limited fields: title, summary, test plan, and URL.
843 """
844 """
844 title = drev[b'title']
845 title = drev[b'title']
845 summary = drev[b'summary'].rstrip()
846 summary = drev[b'summary'].rstrip()
846 testplan = drev[b'testPlan'].rstrip()
847 testplan = drev[b'testPlan'].rstrip()
847 if testplan:
848 if testplan:
848 testplan = b'Test Plan:\n%s' % testplan
849 testplan = b'Test Plan:\n%s' % testplan
849 uri = b'Differential Revision: %s' % drev[b'uri']
850 uri = b'Differential Revision: %s' % drev[b'uri']
850 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
851 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
851
852
852 def getdiffmeta(diff):
853 def getdiffmeta(diff):
853 """get commit metadata (date, node, user, p1) from a diff object
854 """get commit metadata (date, node, user, p1) from a diff object
854
855
855 The metadata could be "hg:meta", sent by phabsend, like:
856 The metadata could be "hg:meta", sent by phabsend, like:
856
857
857 "properties": {
858 "properties": {
858 "hg:meta": {
859 "hg:meta": {
859 "date": "1499571514 25200",
860 "date": "1499571514 25200",
860 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
861 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
861 "user": "Foo Bar <foo@example.com>",
862 "user": "Foo Bar <foo@example.com>",
862 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
863 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
863 }
864 }
864 }
865 }
865
866
866 Or converted from "local:commits", sent by "arc", like:
867 Or converted from "local:commits", sent by "arc", like:
867
868
868 "properties": {
869 "properties": {
869 "local:commits": {
870 "local:commits": {
870 "98c08acae292b2faf60a279b4189beb6cff1414d": {
871 "98c08acae292b2faf60a279b4189beb6cff1414d": {
871 "author": "Foo Bar",
872 "author": "Foo Bar",
872 "time": 1499546314,
873 "time": 1499546314,
873 "branch": "default",
874 "branch": "default",
874 "tag": "",
875 "tag": "",
875 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "local": "1000",
878 "local": "1000",
878 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
879 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
879 "summary": "...",
880 "summary": "...",
880 "message": "...",
881 "message": "...",
881 "authorEmail": "foo@example.com"
882 "authorEmail": "foo@example.com"
882 }
883 }
883 }
884 }
884 }
885 }
885
886
886 Note: metadata extracted from "local:commits" will lose time zone
887 Note: metadata extracted from "local:commits" will lose time zone
887 information.
888 information.
888 """
889 """
889 props = diff.get(b'properties') or {}
890 props = diff.get(b'properties') or {}
890 meta = props.get(b'hg:meta')
891 meta = props.get(b'hg:meta')
891 if not meta and props.get(b'local:commits'):
892 if not meta and props.get(b'local:commits'):
892 commit = sorted(props[b'local:commits'].values())[0]
893 commit = sorted(props[b'local:commits'].values())[0]
893 meta = {
894 meta = {
894 b'date': b'%d 0' % commit[b'time'],
895 b'date': b'%d 0' % commit[b'time'],
895 b'node': commit[b'rev'],
896 b'node': commit[b'rev'],
896 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
897 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
897 }
898 }
898 if len(commit.get(b'parents', ())) >= 1:
899 if len(commit.get(b'parents', ())) >= 1:
899 meta[b'parent'] = commit[b'parents'][0]
900 meta[b'parent'] = commit[b'parents'][0]
900 return meta or {}
901 return meta or {}
901
902
902 def readpatch(repo, drevs, write):
903 def readpatch(repo, drevs, write):
903 """generate plain-text patch readable by 'hg import'
904 """generate plain-text patch readable by 'hg import'
904
905
905 write is usually ui.write. drevs is what "querydrev" returns, results of
906 write is usually ui.write. drevs is what "querydrev" returns, results of
906 "differential.query".
907 "differential.query".
907 """
908 """
908 # Prefetch hg:meta property for all diffs
909 # Prefetch hg:meta property for all diffs
909 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
910 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
910 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
911 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
911
912
912 # Generate patch for each drev
913 # Generate patch for each drev
913 for drev in drevs:
914 for drev in drevs:
914 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
915 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
915
916
916 diffid = max(int(v) for v in drev[b'diffs'])
917 diffid = max(int(v) for v in drev[b'diffs'])
917 body = callconduit(repo, b'differential.getrawdiff',
918 body = callconduit(repo, b'differential.getrawdiff',
918 {b'diffID': diffid})
919 {b'diffID': diffid})
919 desc = getdescfromdrev(drev)
920 desc = getdescfromdrev(drev)
920 header = b'# HG changeset patch\n'
921 header = b'# HG changeset patch\n'
921
922
922 # Try to preserve metadata from hg:meta property. Write hg patch
923 # Try to preserve metadata from hg:meta property. Write hg patch
923 # headers that can be read by the "import" command. See patchheadermap
924 # headers that can be read by the "import" command. See patchheadermap
924 # and extract in mercurial/patch.py for supported headers.
925 # and extract in mercurial/patch.py for supported headers.
925 meta = getdiffmeta(diffs[str(diffid)])
926 meta = getdiffmeta(diffs[str(diffid)])
926 for k in _metanamemap.keys():
927 for k in _metanamemap.keys():
927 if k in meta:
928 if k in meta:
928 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
929 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
929
930
930 content = b'%s%s\n%s' % (header, desc, body)
931 content = b'%s%s\n%s' % (header, desc, body)
931 write(content)
932 write(content)
932
933
933 @vcrcommand(b'phabread',
934 @vcrcommand(b'phabread',
934 [(b'', b'stack', False, _(b'read dependencies'))],
935 [(b'', b'stack', False, _(b'read dependencies'))],
935 _(b'DREVSPEC [OPTIONS]'),
936 _(b'DREVSPEC [OPTIONS]'),
936 helpcategory=command.CATEGORY_IMPORT_EXPORT)
937 helpcategory=command.CATEGORY_IMPORT_EXPORT)
937 def phabread(ui, repo, spec, **opts):
938 def phabread(ui, repo, spec, **opts):
938 """print patches from Phabricator suitable for importing
939 """print patches from Phabricator suitable for importing
939
940
940 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
941 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
941 the number ``123``. It could also have common operators like ``+``, ``-``,
942 the number ``123``. It could also have common operators like ``+``, ``-``,
942 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
943 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
943 select a stack.
944 select a stack.
944
945
945 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
946 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
946 could be used to filter patches by status. For performance reason, they
947 could be used to filter patches by status. For performance reason, they
947 only represent a subset of non-status selections and cannot be used alone.
948 only represent a subset of non-status selections and cannot be used alone.
948
949
949 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
950 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
950 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
951 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
951 stack up to D9.
952 stack up to D9.
952
953
953 If --stack is given, follow dependencies information and read all patches.
954 If --stack is given, follow dependencies information and read all patches.
954 It is equivalent to the ``:`` operator.
955 It is equivalent to the ``:`` operator.
955 """
956 """
956 if opts.get(b'stack'):
957 if opts.get(b'stack'):
957 spec = b':(%s)' % spec
958 spec = b':(%s)' % spec
958 drevs = querydrev(repo, spec)
959 drevs = querydrev(repo, spec)
959 readpatch(repo, drevs, ui.write)
960 readpatch(repo, drevs, ui.write)
960
961
961 @vcrcommand(b'phabupdate',
962 @vcrcommand(b'phabupdate',
962 [(b'', b'accept', False, _(b'accept revisions')),
963 [(b'', b'accept', False, _(b'accept revisions')),
963 (b'', b'reject', False, _(b'reject revisions')),
964 (b'', b'reject', False, _(b'reject revisions')),
964 (b'', b'abandon', False, _(b'abandon revisions')),
965 (b'', b'abandon', False, _(b'abandon revisions')),
965 (b'', b'reclaim', False, _(b'reclaim revisions')),
966 (b'', b'reclaim', False, _(b'reclaim revisions')),
966 (b'm', b'comment', b'', _(b'comment on the last revision')),
967 (b'm', b'comment', b'', _(b'comment on the last revision')),
967 ], _(b'DREVSPEC [OPTIONS]'),
968 ], _(b'DREVSPEC [OPTIONS]'),
968 helpcategory=command.CATEGORY_IMPORT_EXPORT)
969 helpcategory=command.CATEGORY_IMPORT_EXPORT)
969 def phabupdate(ui, repo, spec, **opts):
970 def phabupdate(ui, repo, spec, **opts):
970 """update Differential Revision in batch
971 """update Differential Revision in batch
971
972
972 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
973 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
973 """
974 """
974 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
975 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
975 if len(flags) > 1:
976 if len(flags) > 1:
976 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
977 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
977
978
978 actions = []
979 actions = []
979 for f in flags:
980 for f in flags:
980 actions.append({b'type': f, b'value': b'true'})
981 actions.append({b'type': f, b'value': b'true'})
981
982
982 drevs = querydrev(repo, spec)
983 drevs = querydrev(repo, spec)
983 for i, drev in enumerate(drevs):
984 for i, drev in enumerate(drevs):
984 if i + 1 == len(drevs) and opts.get(b'comment'):
985 if i + 1 == len(drevs) and opts.get(b'comment'):
985 actions.append({b'type': b'comment', b'value': opts[b'comment']})
986 actions.append({b'type': b'comment', b'value': opts[b'comment']})
986 if actions:
987 if actions:
987 params = {b'objectIdentifier': drev[b'phid'],
988 params = {b'objectIdentifier': drev[b'phid'],
988 b'transactions': actions}
989 b'transactions': actions}
989 callconduit(repo, b'differential.revision.edit', params)
990 callconduit(repo, b'differential.revision.edit', params)
990
991
991 templatekeyword = registrar.templatekeyword()
992 templatekeyword = registrar.templatekeyword()
992
993
993 @templatekeyword(b'phabreview', requires={b'ctx'})
994 @templatekeyword(b'phabreview', requires={b'ctx'})
994 def template_review(context, mapping):
995 def template_review(context, mapping):
995 """:phabreview: Object describing the review for this changeset.
996 """:phabreview: Object describing the review for this changeset.
996 Has attributes `url` and `id`.
997 Has attributes `url` and `id`.
997 """
998 """
998 ctx = context.resource(mapping, b'ctx')
999 ctx = context.resource(mapping, b'ctx')
999 m = _differentialrevisiondescre.search(ctx.description())
1000 m = _differentialrevisiondescre.search(ctx.description())
1000 if m:
1001 if m:
1001 return templateutil.hybriddict({
1002 return templateutil.hybriddict({
1002 b'url': m.group(b'url'),
1003 b'url': m.group(b'url'),
1003 b'id': b"D{}".format(m.group(b'id')),
1004 b'id': b"D{}".format(m.group(b'id')),
1004 })
1005 })
1005 else:
1006 else:
1006 tags = ctx.repo().nodetags(ctx.node())
1007 tags = ctx.repo().nodetags(ctx.node())
1007 for t in tags:
1008 for t in tags:
1008 if _differentialrevisiontagre.match(t):
1009 if _differentialrevisiontagre.match(t):
1009 url = ctx.repo().ui.config(b'phabricator', b'url')
1010 url = ctx.repo().ui.config(b'phabricator', b'url')
1010 if not url.endswith(b'/'):
1011 if not url.endswith(b'/'):
1011 url += b'/'
1012 url += b'/'
1012 url += t
1013 url += t
1013
1014
1014 return templateutil.hybriddict({
1015 return templateutil.hybriddict({
1015 b'url': url,
1016 b'url': url,
1016 b'id': t,
1017 b'id': t,
1017 })
1018 })
1018 return None
1019 return None
General Comments 0
You need to be logged in to leave comments. Login now