##// END OF EJS Templates
py3: convert URL to str before passing it to request...
Ian Moody -
r42067:a98dabdb default
parent child Browse files
Show More
@@ -1,1019 +1,1019 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templateutil,
68 templateutil,
69 url as urlmod,
69 url as urlmod,
70 util,
70 util,
71 )
71 )
72 from mercurial.utils import (
72 from mercurial.utils import (
73 procutil,
73 procutil,
74 stringutil,
74 stringutil,
75 )
75 )
76
76
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # be specifying the version(s) of Mercurial they are tested with, or
79 # be specifying the version(s) of Mercurial they are tested with, or
80 # leave the attribute unspecified.
80 # leave the attribute unspecified.
81 testedwith = 'ships-with-hg-core'
81 testedwith = 'ships-with-hg-core'
82
82
83 cmdtable = {}
83 cmdtable = {}
84 command = registrar.command(cmdtable)
84 command = registrar.command(cmdtable)
85
85
86 configtable = {}
86 configtable = {}
87 configitem = registrar.configitem(configtable)
87 configitem = registrar.configitem(configtable)
88
88
89 # developer config: phabricator.batchsize
89 # developer config: phabricator.batchsize
90 configitem(b'phabricator', b'batchsize',
90 configitem(b'phabricator', b'batchsize',
91 default=12,
91 default=12,
92 )
92 )
93 configitem(b'phabricator', b'callsign',
93 configitem(b'phabricator', b'callsign',
94 default=None,
94 default=None,
95 )
95 )
96 configitem(b'phabricator', b'curlcmd',
96 configitem(b'phabricator', b'curlcmd',
97 default=None,
97 default=None,
98 )
98 )
99 # developer config: phabricator.repophid
99 # developer config: phabricator.repophid
100 configitem(b'phabricator', b'repophid',
100 configitem(b'phabricator', b'repophid',
101 default=None,
101 default=None,
102 )
102 )
103 configitem(b'phabricator', b'url',
103 configitem(b'phabricator', b'url',
104 default=None,
104 default=None,
105 )
105 )
106 configitem(b'phabsend', b'confirm',
106 configitem(b'phabsend', b'confirm',
107 default=False,
107 default=False,
108 )
108 )
109
109
110 colortable = {
110 colortable = {
111 b'phabricator.action.created': b'green',
111 b'phabricator.action.created': b'green',
112 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.updated': b'magenta',
113 b'phabricator.action.updated': b'magenta',
114 b'phabricator.desc': b'',
114 b'phabricator.desc': b'',
115 b'phabricator.drev': b'bold',
115 b'phabricator.drev': b'bold',
116 b'phabricator.node': b'',
116 b'phabricator.node': b'',
117 }
117 }
118
118
119 _VCR_FLAGS = [
119 _VCR_FLAGS = [
120 (b'', b'test-vcr', b'',
120 (b'', b'test-vcr', b'',
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 b', otherwise will mock all http requests using the specified vcr file.'
122 b', otherwise will mock all http requests using the specified vcr file.'
123 b' (ADVANCED)'
123 b' (ADVANCED)'
124 )),
124 )),
125 ]
125 ]
126
126
127 def vcrcommand(name, flags, spec, helpcategory=None):
127 def vcrcommand(name, flags, spec, helpcategory=None):
128 fullflags = flags + _VCR_FLAGS
128 fullflags = flags + _VCR_FLAGS
129 def decorate(fn):
129 def decorate(fn):
130 def inner(*args, **kwargs):
130 def inner(*args, **kwargs):
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 if cassette:
132 if cassette:
133 import hgdemandimport
133 import hgdemandimport
134 with hgdemandimport.deactivated():
134 with hgdemandimport.deactivated():
135 import vcr as vcrmod
135 import vcr as vcrmod
136 import vcr.stubs as stubs
136 import vcr.stubs as stubs
137 vcr = vcrmod.VCR(
137 vcr = vcrmod.VCR(
138 serializer=r'json',
138 serializer=r'json',
139 custom_patches=[
139 custom_patches=[
140 (urlmod, r'httpconnection',
140 (urlmod, r'httpconnection',
141 stubs.VCRHTTPConnection),
141 stubs.VCRHTTPConnection),
142 (urlmod, r'httpsconnection',
142 (urlmod, r'httpsconnection',
143 stubs.VCRHTTPSConnection),
143 stubs.VCRHTTPSConnection),
144 ])
144 ])
145 with vcr.use_cassette(cassette):
145 with vcr.use_cassette(cassette):
146 return fn(*args, **kwargs)
146 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 inner.__name__ = fn.__name__
148 inner.__name__ = fn.__name__
149 inner.__doc__ = fn.__doc__
149 inner.__doc__ = fn.__doc__
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return decorate
151 return decorate
152
152
153 def urlencodenested(params):
153 def urlencodenested(params):
154 """like urlencode, but works with nested parameters.
154 """like urlencode, but works with nested parameters.
155
155
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 """
159 """
160 flatparams = util.sortdict()
160 flatparams = util.sortdict()
161 def process(prefix, obj):
161 def process(prefix, obj):
162 if isinstance(obj, bool):
162 if isinstance(obj, bool):
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
164 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
165 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 if items is None:
166 if items is None:
167 flatparams[prefix] = obj
167 flatparams[prefix] = obj
168 else:
168 else:
169 for k, v in items(obj):
169 for k, v in items(obj):
170 if prefix:
170 if prefix:
171 process(b'%s[%s]' % (prefix, k), v)
171 process(b'%s[%s]' % (prefix, k), v)
172 else:
172 else:
173 process(k, v)
173 process(k, v)
174 process(b'', params)
174 process(b'', params)
175 return util.urlreq.urlencode(flatparams)
175 return util.urlreq.urlencode(flatparams)
176
176
177 def readurltoken(repo):
177 def readurltoken(repo):
178 """return conduit url, token and make sure they exist
178 """return conduit url, token and make sure they exist
179
179
180 Currently read from [auth] config section. In the future, it might
180 Currently read from [auth] config section. In the future, it might
181 make sense to read from .arcconfig and .arcrc as well.
181 make sense to read from .arcconfig and .arcrc as well.
182 """
182 """
183 url = repo.ui.config(b'phabricator', b'url')
183 url = repo.ui.config(b'phabricator', b'url')
184 if not url:
184 if not url:
185 raise error.Abort(_(b'config %s.%s is required')
185 raise error.Abort(_(b'config %s.%s is required')
186 % (b'phabricator', b'url'))
186 % (b'phabricator', b'url'))
187
187
188 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
188 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 token = None
189 token = None
190
190
191 if res:
191 if res:
192 group, auth = res
192 group, auth = res
193
193
194 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
194 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195
195
196 token = auth.get(b'phabtoken')
196 token = auth.get(b'phabtoken')
197
197
198 if not token:
198 if not token:
199 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
199 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 % (url,))
200 % (url,))
201
201
202 return url, token
202 return url, token
203
203
204 def callconduit(repo, name, params):
204 def callconduit(repo, name, params):
205 """call Conduit API, params is a dict. return json.loads result, or None"""
205 """call Conduit API, params is a dict. return json.loads result, or None"""
206 host, token = readurltoken(repo)
206 host, token = readurltoken(repo)
207 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
207 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
208 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 params = params.copy()
209 params = params.copy()
210 params[b'api.token'] = token
210 params[b'api.token'] = token
211 data = urlencodenested(params)
211 data = urlencodenested(params)
212 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
212 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 if curlcmd:
213 if curlcmd:
214 sin, sout = procutil.popen2(b'%s -d @- %s'
214 sin, sout = procutil.popen2(b'%s -d @- %s'
215 % (curlcmd, procutil.shellquote(url)))
215 % (curlcmd, procutil.shellquote(url)))
216 sin.write(data)
216 sin.write(data)
217 sin.close()
217 sin.close()
218 body = sout.read()
218 body = sout.read()
219 else:
219 else:
220 urlopener = urlmod.opener(repo.ui, authinfo)
220 urlopener = urlmod.opener(repo.ui, authinfo)
221 request = util.urlreq.request(url, data=data)
221 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 with contextlib.closing(urlopener.open(request)) as rsp:
222 with contextlib.closing(urlopener.open(request)) as rsp:
223 body = rsp.read()
223 body = rsp.read()
224 repo.ui.debug(b'Conduit Response: %s\n' % body)
224 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 parsed = pycompat.rapply(
225 parsed = pycompat.rapply(
226 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
226 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 else x,
227 else x,
228 json.loads(body)
228 json.loads(body)
229 )
229 )
230 if parsed.get(b'error_code'):
230 if parsed.get(b'error_code'):
231 msg = (_(b'Conduit Error (%s): %s')
231 msg = (_(b'Conduit Error (%s): %s')
232 % (parsed[b'error_code'], parsed[b'error_info']))
232 % (parsed[b'error_code'], parsed[b'error_info']))
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234 return parsed[b'result']
234 return parsed[b'result']
235
235
236 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
236 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 def debugcallconduit(ui, repo, name):
237 def debugcallconduit(ui, repo, name):
238 """call Conduit API
238 """call Conduit API
239
239
240 Call parameters are read from stdin as a JSON blob. Result will be written
240 Call parameters are read from stdin as a JSON blob. Result will be written
241 to stdout as a JSON blob.
241 to stdout as a JSON blob.
242 """
242 """
243 params = json.loads(ui.fin.read())
243 params = json.loads(ui.fin.read())
244 result = callconduit(repo, name, params)
244 result = callconduit(repo, name, params)
245 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
245 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
246 ui.write(b'%s\n' % s)
246 ui.write(b'%s\n' % s)
247
247
248 def getrepophid(repo):
248 def getrepophid(repo):
249 """given callsign, return repository PHID or None"""
249 """given callsign, return repository PHID or None"""
250 # developer config: phabricator.repophid
250 # developer config: phabricator.repophid
251 repophid = repo.ui.config(b'phabricator', b'repophid')
251 repophid = repo.ui.config(b'phabricator', b'repophid')
252 if repophid:
252 if repophid:
253 return repophid
253 return repophid
254 callsign = repo.ui.config(b'phabricator', b'callsign')
254 callsign = repo.ui.config(b'phabricator', b'callsign')
255 if not callsign:
255 if not callsign:
256 return None
256 return None
257 query = callconduit(repo, b'diffusion.repository.search',
257 query = callconduit(repo, b'diffusion.repository.search',
258 {b'constraints': {b'callsigns': [callsign]}})
258 {b'constraints': {b'callsigns': [callsign]}})
259 if len(query[b'data']) == 0:
259 if len(query[b'data']) == 0:
260 return None
260 return None
261 repophid = query[b'data'][0][b'phid']
261 repophid = query[b'data'][0][b'phid']
262 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
262 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
263 return repophid
263 return repophid
264
264
265 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
265 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
266 _differentialrevisiondescre = re.compile(
266 _differentialrevisiondescre = re.compile(
267 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
267 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
268
268
269 def getoldnodedrevmap(repo, nodelist):
269 def getoldnodedrevmap(repo, nodelist):
270 """find previous nodes that has been sent to Phabricator
270 """find previous nodes that has been sent to Phabricator
271
271
272 return {node: (oldnode, Differential diff, Differential Revision ID)}
272 return {node: (oldnode, Differential diff, Differential Revision ID)}
273 for node in nodelist with known previous sent versions, or associated
273 for node in nodelist with known previous sent versions, or associated
274 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
274 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
275 be ``None``.
275 be ``None``.
276
276
277 Examines commit messages like "Differential Revision:" to get the
277 Examines commit messages like "Differential Revision:" to get the
278 association information.
278 association information.
279
279
280 If such commit message line is not found, examines all precursors and their
280 If such commit message line is not found, examines all precursors and their
281 tags. Tags with format like "D1234" are considered a match and the node
281 tags. Tags with format like "D1234" are considered a match and the node
282 with that tag, and the number after "D" (ex. 1234) will be returned.
282 with that tag, and the number after "D" (ex. 1234) will be returned.
283
283
284 The ``old node``, if not None, is guaranteed to be the last diff of
284 The ``old node``, if not None, is guaranteed to be the last diff of
285 corresponding Differential Revision, and exist in the repo.
285 corresponding Differential Revision, and exist in the repo.
286 """
286 """
287 unfi = repo.unfiltered()
287 unfi = repo.unfiltered()
288 nodemap = unfi.changelog.nodemap
288 nodemap = unfi.changelog.nodemap
289
289
290 result = {} # {node: (oldnode?, lastdiff?, drev)}
290 result = {} # {node: (oldnode?, lastdiff?, drev)}
291 toconfirm = {} # {node: (force, {precnode}, drev)}
291 toconfirm = {} # {node: (force, {precnode}, drev)}
292 for node in nodelist:
292 for node in nodelist:
293 ctx = unfi[node]
293 ctx = unfi[node]
294 # For tags like "D123", put them into "toconfirm" to verify later
294 # For tags like "D123", put them into "toconfirm" to verify later
295 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
295 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
296 for n in precnodes:
296 for n in precnodes:
297 if n in nodemap:
297 if n in nodemap:
298 for tag in unfi.nodetags(n):
298 for tag in unfi.nodetags(n):
299 m = _differentialrevisiontagre.match(tag)
299 m = _differentialrevisiontagre.match(tag)
300 if m:
300 if m:
301 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
301 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
302 continue
302 continue
303
303
304 # Check commit message
304 # Check commit message
305 m = _differentialrevisiondescre.search(ctx.description())
305 m = _differentialrevisiondescre.search(ctx.description())
306 if m:
306 if m:
307 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
307 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
308
308
309 # Double check if tags are genuine by collecting all old nodes from
309 # Double check if tags are genuine by collecting all old nodes from
310 # Phabricator, and expect precursors overlap with it.
310 # Phabricator, and expect precursors overlap with it.
311 if toconfirm:
311 if toconfirm:
312 drevs = [drev for force, precs, drev in toconfirm.values()]
312 drevs = [drev for force, precs, drev in toconfirm.values()]
313 alldiffs = callconduit(unfi, b'differential.querydiffs',
313 alldiffs = callconduit(unfi, b'differential.querydiffs',
314 {b'revisionIDs': drevs})
314 {b'revisionIDs': drevs})
315 getnode = lambda d: bin(
315 getnode = lambda d: bin(
316 getdiffmeta(d).get(b'node', b'')) or None
316 getdiffmeta(d).get(b'node', b'')) or None
317 for newnode, (force, precset, drev) in toconfirm.items():
317 for newnode, (force, precset, drev) in toconfirm.items():
318 diffs = [d for d in alldiffs.values()
318 diffs = [d for d in alldiffs.values()
319 if int(d[b'revisionID']) == drev]
319 if int(d[b'revisionID']) == drev]
320
320
321 # "precursors" as known by Phabricator
321 # "precursors" as known by Phabricator
322 phprecset = set(getnode(d) for d in diffs)
322 phprecset = set(getnode(d) for d in diffs)
323
323
324 # Ignore if precursors (Phabricator and local repo) do not overlap,
324 # Ignore if precursors (Phabricator and local repo) do not overlap,
325 # and force is not set (when commit message says nothing)
325 # and force is not set (when commit message says nothing)
326 if not force and not bool(phprecset & precset):
326 if not force and not bool(phprecset & precset):
327 tagname = b'D%d' % drev
327 tagname = b'D%d' % drev
328 tags.tag(repo, tagname, nullid, message=None, user=None,
328 tags.tag(repo, tagname, nullid, message=None, user=None,
329 date=None, local=True)
329 date=None, local=True)
330 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
330 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
331 b'Differential history\n') % drev)
331 b'Differential history\n') % drev)
332 continue
332 continue
333
333
334 # Find the last node using Phabricator metadata, and make sure it
334 # Find the last node using Phabricator metadata, and make sure it
335 # exists in the repo
335 # exists in the repo
336 oldnode = lastdiff = None
336 oldnode = lastdiff = None
337 if diffs:
337 if diffs:
338 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
338 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
339 oldnode = getnode(lastdiff)
339 oldnode = getnode(lastdiff)
340 if oldnode and oldnode not in nodemap:
340 if oldnode and oldnode not in nodemap:
341 oldnode = None
341 oldnode = None
342
342
343 result[newnode] = (oldnode, lastdiff, drev)
343 result[newnode] = (oldnode, lastdiff, drev)
344
344
345 return result
345 return result
346
346
347 def getdiff(ctx, diffopts):
347 def getdiff(ctx, diffopts):
348 """plain-text diff without header (user, commit message, etc)"""
348 """plain-text diff without header (user, commit message, etc)"""
349 output = util.stringio()
349 output = util.stringio()
350 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
350 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
351 None, opts=diffopts):
351 None, opts=diffopts):
352 output.write(chunk)
352 output.write(chunk)
353 return output.getvalue()
353 return output.getvalue()
354
354
355 def creatediff(ctx):
355 def creatediff(ctx):
356 """create a Differential Diff"""
356 """create a Differential Diff"""
357 repo = ctx.repo()
357 repo = ctx.repo()
358 repophid = getrepophid(repo)
358 repophid = getrepophid(repo)
359 # Create a "Differential Diff" via "differential.createrawdiff" API
359 # Create a "Differential Diff" via "differential.createrawdiff" API
360 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
360 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
361 if repophid:
361 if repophid:
362 params[b'repositoryPHID'] = repophid
362 params[b'repositoryPHID'] = repophid
363 diff = callconduit(repo, b'differential.createrawdiff', params)
363 diff = callconduit(repo, b'differential.createrawdiff', params)
364 if not diff:
364 if not diff:
365 raise error.Abort(_(b'cannot create diff for %s') % ctx)
365 raise error.Abort(_(b'cannot create diff for %s') % ctx)
366 return diff
366 return diff
367
367
368 def writediffproperties(ctx, diff):
368 def writediffproperties(ctx, diff):
369 """write metadata to diff so patches could be applied losslessly"""
369 """write metadata to diff so patches could be applied losslessly"""
370 params = {
370 params = {
371 b'diff_id': diff[b'id'],
371 b'diff_id': diff[b'id'],
372 b'name': b'hg:meta',
372 b'name': b'hg:meta',
373 b'data': json.dumps({
373 b'data': json.dumps({
374 b'user': ctx.user(),
374 b'user': ctx.user(),
375 b'date': b'%d %d' % ctx.date(),
375 b'date': b'%d %d' % ctx.date(),
376 b'node': ctx.hex(),
376 b'node': ctx.hex(),
377 b'parent': ctx.p1().hex(),
377 b'parent': ctx.p1().hex(),
378 }),
378 }),
379 }
379 }
380 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
380 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
381
381
382 params = {
382 params = {
383 b'diff_id': diff[b'id'],
383 b'diff_id': diff[b'id'],
384 b'name': b'local:commits',
384 b'name': b'local:commits',
385 b'data': json.dumps({
385 b'data': json.dumps({
386 ctx.hex(): {
386 ctx.hex(): {
387 b'author': stringutil.person(ctx.user()),
387 b'author': stringutil.person(ctx.user()),
388 b'authorEmail': stringutil.email(ctx.user()),
388 b'authorEmail': stringutil.email(ctx.user()),
389 b'time': ctx.date()[0],
389 b'time': ctx.date()[0],
390 },
390 },
391 }),
391 }),
392 }
392 }
393 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
394
394
395 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
395 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
396 olddiff=None, actions=None):
396 olddiff=None, actions=None):
397 """create or update a Differential Revision
397 """create or update a Differential Revision
398
398
399 If revid is None, create a new Differential Revision, otherwise update
399 If revid is None, create a new Differential Revision, otherwise update
400 revid. If parentrevid is not None, set it as a dependency.
400 revid. If parentrevid is not None, set it as a dependency.
401
401
402 If oldnode is not None, check if the patch content (without commit message
402 If oldnode is not None, check if the patch content (without commit message
403 and metadata) has changed before creating another diff.
403 and metadata) has changed before creating another diff.
404
404
405 If actions is not None, they will be appended to the transaction.
405 If actions is not None, they will be appended to the transaction.
406 """
406 """
407 repo = ctx.repo()
407 repo = ctx.repo()
408 if oldnode:
408 if oldnode:
409 diffopts = mdiff.diffopts(git=True, context=32767)
409 diffopts = mdiff.diffopts(git=True, context=32767)
410 oldctx = repo.unfiltered()[oldnode]
410 oldctx = repo.unfiltered()[oldnode]
411 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
411 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
412 else:
412 else:
413 neednewdiff = True
413 neednewdiff = True
414
414
415 transactions = []
415 transactions = []
416 if neednewdiff:
416 if neednewdiff:
417 diff = creatediff(ctx)
417 diff = creatediff(ctx)
418 transactions.append({b'type': b'update', b'value': diff[b'phid']})
418 transactions.append({b'type': b'update', b'value': diff[b'phid']})
419 else:
419 else:
420 # Even if we don't need to upload a new diff because the patch content
420 # Even if we don't need to upload a new diff because the patch content
421 # does not change. We might still need to update its metadata so
421 # does not change. We might still need to update its metadata so
422 # pushers could know the correct node metadata.
422 # pushers could know the correct node metadata.
423 assert olddiff
423 assert olddiff
424 diff = olddiff
424 diff = olddiff
425 writediffproperties(ctx, diff)
425 writediffproperties(ctx, diff)
426
426
427 # Use a temporary summary to set dependency. There might be better ways but
427 # Use a temporary summary to set dependency. There might be better ways but
428 # I cannot find them for now. But do not do that if we are updating an
428 # I cannot find them for now. But do not do that if we are updating an
429 # existing revision (revid is not None) since that introduces visible
429 # existing revision (revid is not None) since that introduces visible
430 # churns (someone edited "Summary" twice) on the web page.
430 # churns (someone edited "Summary" twice) on the web page.
431 if parentrevid and revid is None:
431 if parentrevid and revid is None:
432 summary = b'Depends on D%s' % parentrevid
432 summary = b'Depends on D%s' % parentrevid
433 transactions += [{b'type': b'summary', b'value': summary},
433 transactions += [{b'type': b'summary', b'value': summary},
434 {b'type': b'summary', b'value': b' '}]
434 {b'type': b'summary', b'value': b' '}]
435
435
436 if actions:
436 if actions:
437 transactions += actions
437 transactions += actions
438
438
439 # Parse commit message and update related fields.
439 # Parse commit message and update related fields.
440 desc = ctx.description()
440 desc = ctx.description()
441 info = callconduit(repo, b'differential.parsecommitmessage',
441 info = callconduit(repo, b'differential.parsecommitmessage',
442 {b'corpus': desc})
442 {b'corpus': desc})
443 for k, v in info[b'fields'].items():
443 for k, v in info[b'fields'].items():
444 if k in [b'title', b'summary', b'testPlan']:
444 if k in [b'title', b'summary', b'testPlan']:
445 transactions.append({b'type': k, b'value': v})
445 transactions.append({b'type': k, b'value': v})
446
446
447 params = {b'transactions': transactions}
447 params = {b'transactions': transactions}
448 if revid is not None:
448 if revid is not None:
449 # Update an existing Differential Revision
449 # Update an existing Differential Revision
450 params[b'objectIdentifier'] = revid
450 params[b'objectIdentifier'] = revid
451
451
452 revision = callconduit(repo, b'differential.revision.edit', params)
452 revision = callconduit(repo, b'differential.revision.edit', params)
453 if not revision:
453 if not revision:
454 raise error.Abort(_(b'cannot create revision for %s') % ctx)
454 raise error.Abort(_(b'cannot create revision for %s') % ctx)
455
455
456 return revision, diff
456 return revision, diff
457
457
458 def userphids(repo, names):
458 def userphids(repo, names):
459 """convert user names to PHIDs"""
459 """convert user names to PHIDs"""
460 names = [name.lower() for name in names]
460 names = [name.lower() for name in names]
461 query = {b'constraints': {b'usernames': names}}
461 query = {b'constraints': {b'usernames': names}}
462 result = callconduit(repo, b'user.search', query)
462 result = callconduit(repo, b'user.search', query)
463 # username not found is not an error of the API. So check if we have missed
463 # username not found is not an error of the API. So check if we have missed
464 # some names here.
464 # some names here.
465 data = result[b'data']
465 data = result[b'data']
466 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
466 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
467 unresolved = set(names) - resolved
467 unresolved = set(names) - resolved
468 if unresolved:
468 if unresolved:
469 raise error.Abort(_(b'unknown username: %s')
469 raise error.Abort(_(b'unknown username: %s')
470 % b' '.join(sorted(unresolved)))
470 % b' '.join(sorted(unresolved)))
471 return [entry[b'phid'] for entry in data]
471 return [entry[b'phid'] for entry in data]
472
472
473 @vcrcommand(b'phabsend',
473 @vcrcommand(b'phabsend',
474 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
474 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
475 (b'', b'amend', True, _(b'update commit messages')),
475 (b'', b'amend', True, _(b'update commit messages')),
476 (b'', b'reviewer', [], _(b'specify reviewers')),
476 (b'', b'reviewer', [], _(b'specify reviewers')),
477 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
477 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
478 _(b'REV [OPTIONS]'),
478 _(b'REV [OPTIONS]'),
479 helpcategory=command.CATEGORY_IMPORT_EXPORT)
479 helpcategory=command.CATEGORY_IMPORT_EXPORT)
480 def phabsend(ui, repo, *revs, **opts):
480 def phabsend(ui, repo, *revs, **opts):
481 """upload changesets to Phabricator
481 """upload changesets to Phabricator
482
482
483 If there are multiple revisions specified, they will be send as a stack
483 If there are multiple revisions specified, they will be send as a stack
484 with a linear dependencies relationship using the order specified by the
484 with a linear dependencies relationship using the order specified by the
485 revset.
485 revset.
486
486
487 For the first time uploading changesets, local tags will be created to
487 For the first time uploading changesets, local tags will be created to
488 maintain the association. After the first time, phabsend will check
488 maintain the association. After the first time, phabsend will check
489 obsstore and tags information so it can figure out whether to update an
489 obsstore and tags information so it can figure out whether to update an
490 existing Differential Revision, or create a new one.
490 existing Differential Revision, or create a new one.
491
491
492 If --amend is set, update commit messages so they have the
492 If --amend is set, update commit messages so they have the
493 ``Differential Revision`` URL, remove related tags. This is similar to what
493 ``Differential Revision`` URL, remove related tags. This is similar to what
494 arcanist will do, and is more desired in author-push workflows. Otherwise,
494 arcanist will do, and is more desired in author-push workflows. Otherwise,
495 use local tags to record the ``Differential Revision`` association.
495 use local tags to record the ``Differential Revision`` association.
496
496
497 The --confirm option lets you confirm changesets before sending them. You
497 The --confirm option lets you confirm changesets before sending them. You
498 can also add following to your configuration file to make it default
498 can also add following to your configuration file to make it default
499 behaviour::
499 behaviour::
500
500
501 [phabsend]
501 [phabsend]
502 confirm = true
502 confirm = true
503
503
504 phabsend will check obsstore and the above association to decide whether to
504 phabsend will check obsstore and the above association to decide whether to
505 update an existing Differential Revision, or create a new one.
505 update an existing Differential Revision, or create a new one.
506 """
506 """
507 revs = list(revs) + opts.get(b'rev', [])
507 revs = list(revs) + opts.get(b'rev', [])
508 revs = scmutil.revrange(repo, revs)
508 revs = scmutil.revrange(repo, revs)
509
509
510 if not revs:
510 if not revs:
511 raise error.Abort(_(b'phabsend requires at least one changeset'))
511 raise error.Abort(_(b'phabsend requires at least one changeset'))
512 if opts.get(b'amend'):
512 if opts.get(b'amend'):
513 cmdutil.checkunfinished(repo)
513 cmdutil.checkunfinished(repo)
514
514
515 # {newnode: (oldnode, olddiff, olddrev}
515 # {newnode: (oldnode, olddiff, olddrev}
516 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
516 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
517
517
518 confirm = ui.configbool(b'phabsend', b'confirm')
518 confirm = ui.configbool(b'phabsend', b'confirm')
519 confirm |= bool(opts.get(b'confirm'))
519 confirm |= bool(opts.get(b'confirm'))
520 if confirm:
520 if confirm:
521 confirmed = _confirmbeforesend(repo, revs, oldmap)
521 confirmed = _confirmbeforesend(repo, revs, oldmap)
522 if not confirmed:
522 if not confirmed:
523 raise error.Abort(_(b'phabsend cancelled'))
523 raise error.Abort(_(b'phabsend cancelled'))
524
524
525 actions = []
525 actions = []
526 reviewers = opts.get(b'reviewer', [])
526 reviewers = opts.get(b'reviewer', [])
527 if reviewers:
527 if reviewers:
528 phids = userphids(repo, reviewers)
528 phids = userphids(repo, reviewers)
529 actions.append({b'type': b'reviewers.add', b'value': phids})
529 actions.append({b'type': b'reviewers.add', b'value': phids})
530
530
531 drevids = [] # [int]
531 drevids = [] # [int]
532 diffmap = {} # {newnode: diff}
532 diffmap = {} # {newnode: diff}
533
533
534 # Send patches one by one so we know their Differential Revision IDs and
534 # Send patches one by one so we know their Differential Revision IDs and
535 # can provide dependency relationship
535 # can provide dependency relationship
536 lastrevid = None
536 lastrevid = None
537 for rev in revs:
537 for rev in revs:
538 ui.debug(b'sending rev %d\n' % rev)
538 ui.debug(b'sending rev %d\n' % rev)
539 ctx = repo[rev]
539 ctx = repo[rev]
540
540
541 # Get Differential Revision ID
541 # Get Differential Revision ID
542 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
542 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
543 if oldnode != ctx.node() or opts.get(b'amend'):
543 if oldnode != ctx.node() or opts.get(b'amend'):
544 # Create or update Differential Revision
544 # Create or update Differential Revision
545 revision, diff = createdifferentialrevision(
545 revision, diff = createdifferentialrevision(
546 ctx, revid, lastrevid, oldnode, olddiff, actions)
546 ctx, revid, lastrevid, oldnode, olddiff, actions)
547 diffmap[ctx.node()] = diff
547 diffmap[ctx.node()] = diff
548 newrevid = int(revision[b'object'][b'id'])
548 newrevid = int(revision[b'object'][b'id'])
549 if revid:
549 if revid:
550 action = b'updated'
550 action = b'updated'
551 else:
551 else:
552 action = b'created'
552 action = b'created'
553
553
554 # Create a local tag to note the association, if commit message
554 # Create a local tag to note the association, if commit message
555 # does not have it already
555 # does not have it already
556 m = _differentialrevisiondescre.search(ctx.description())
556 m = _differentialrevisiondescre.search(ctx.description())
557 if not m or int(m.group(b'id')) != newrevid:
557 if not m or int(m.group(b'id')) != newrevid:
558 tagname = b'D%d' % newrevid
558 tagname = b'D%d' % newrevid
559 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
559 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
560 date=None, local=True)
560 date=None, local=True)
561 else:
561 else:
562 # Nothing changed. But still set "newrevid" so the next revision
562 # Nothing changed. But still set "newrevid" so the next revision
563 # could depend on this one.
563 # could depend on this one.
564 newrevid = revid
564 newrevid = revid
565 action = b'skipped'
565 action = b'skipped'
566
566
567 actiondesc = ui.label(
567 actiondesc = ui.label(
568 {b'created': _(b'created'),
568 {b'created': _(b'created'),
569 b'skipped': _(b'skipped'),
569 b'skipped': _(b'skipped'),
570 b'updated': _(b'updated')}[action],
570 b'updated': _(b'updated')}[action],
571 b'phabricator.action.%s' % action)
571 b'phabricator.action.%s' % action)
572 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
572 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
573 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
573 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
574 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
574 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
575 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
575 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
576 desc))
576 desc))
577 drevids.append(newrevid)
577 drevids.append(newrevid)
578 lastrevid = newrevid
578 lastrevid = newrevid
579
579
580 # Update commit messages and remove tags
580 # Update commit messages and remove tags
581 if opts.get(b'amend'):
581 if opts.get(b'amend'):
582 unfi = repo.unfiltered()
582 unfi = repo.unfiltered()
583 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
583 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
584 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
584 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
585 wnode = unfi[b'.'].node()
585 wnode = unfi[b'.'].node()
586 mapping = {} # {oldnode: [newnode]}
586 mapping = {} # {oldnode: [newnode]}
587 for i, rev in enumerate(revs):
587 for i, rev in enumerate(revs):
588 old = unfi[rev]
588 old = unfi[rev]
589 drevid = drevids[i]
589 drevid = drevids[i]
590 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
590 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
591 newdesc = getdescfromdrev(drev)
591 newdesc = getdescfromdrev(drev)
592 # Make sure commit message contain "Differential Revision"
592 # Make sure commit message contain "Differential Revision"
593 if old.description() != newdesc:
593 if old.description() != newdesc:
594 if old.phase() == phases.public:
594 if old.phase() == phases.public:
595 ui.warn(_("warning: not updating public commit %s\n")
595 ui.warn(_("warning: not updating public commit %s\n")
596 % scmutil.formatchangeid(old))
596 % scmutil.formatchangeid(old))
597 continue
597 continue
598 parents = [
598 parents = [
599 mapping.get(old.p1().node(), (old.p1(),))[0],
599 mapping.get(old.p1().node(), (old.p1(),))[0],
600 mapping.get(old.p2().node(), (old.p2(),))[0],
600 mapping.get(old.p2().node(), (old.p2(),))[0],
601 ]
601 ]
602 new = context.metadataonlyctx(
602 new = context.metadataonlyctx(
603 repo, old, parents=parents, text=newdesc,
603 repo, old, parents=parents, text=newdesc,
604 user=old.user(), date=old.date(), extra=old.extra())
604 user=old.user(), date=old.date(), extra=old.extra())
605
605
606 newnode = new.commit()
606 newnode = new.commit()
607
607
608 mapping[old.node()] = [newnode]
608 mapping[old.node()] = [newnode]
609 # Update diff property
609 # Update diff property
610 writediffproperties(unfi[newnode], diffmap[old.node()])
610 writediffproperties(unfi[newnode], diffmap[old.node()])
611 # Remove local tags since it's no longer necessary
611 # Remove local tags since it's no longer necessary
612 tagname = b'D%d' % drevid
612 tagname = b'D%d' % drevid
613 if tagname in repo.tags():
613 if tagname in repo.tags():
614 tags.tag(repo, tagname, nullid, message=None, user=None,
614 tags.tag(repo, tagname, nullid, message=None, user=None,
615 date=None, local=True)
615 date=None, local=True)
616 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
616 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
617 if wnode in mapping:
617 if wnode in mapping:
618 unfi.setparents(mapping[wnode][0])
618 unfi.setparents(mapping[wnode][0])
619
619
620 # Map from "hg:meta" keys to header understood by "hg import". The order is
620 # Map from "hg:meta" keys to header understood by "hg import". The order is
621 # consistent with "hg export" output.
621 # consistent with "hg export" output.
622 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
622 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
623 (b'node', b'Node ID'), (b'parent', b'Parent ')])
623 (b'node', b'Node ID'), (b'parent', b'Parent ')])
624
624
625 def _confirmbeforesend(repo, revs, oldmap):
625 def _confirmbeforesend(repo, revs, oldmap):
626 url, token = readurltoken(repo)
626 url, token = readurltoken(repo)
627 ui = repo.ui
627 ui = repo.ui
628 for rev in revs:
628 for rev in revs:
629 ctx = repo[rev]
629 ctx = repo[rev]
630 desc = ctx.description().splitlines()[0]
630 desc = ctx.description().splitlines()[0]
631 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
631 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
632 if drevid:
632 if drevid:
633 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
633 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
634 else:
634 else:
635 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
635 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
636
636
637 ui.write(_(b'%s - %s: %s\n')
637 ui.write(_(b'%s - %s: %s\n')
638 % (drevdesc,
638 % (drevdesc,
639 ui.label(bytes(ctx), b'phabricator.node'),
639 ui.label(bytes(ctx), b'phabricator.node'),
640 ui.label(desc, b'phabricator.desc')))
640 ui.label(desc, b'phabricator.desc')))
641
641
642 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
642 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
643 b'$$ &Yes $$ &No') % url):
643 b'$$ &Yes $$ &No') % url):
644 return False
644 return False
645
645
646 return True
646 return True
647
647
648 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
648 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
649 b'abandoned'}
649 b'abandoned'}
650
650
651 def _getstatusname(drev):
651 def _getstatusname(drev):
652 """get normalized status name from a Differential Revision"""
652 """get normalized status name from a Differential Revision"""
653 return drev[b'statusName'].replace(b' ', b'').lower()
653 return drev[b'statusName'].replace(b' ', b'').lower()
654
654
655 # Small language to specify differential revisions. Support symbols: (), :X,
655 # Small language to specify differential revisions. Support symbols: (), :X,
656 # +, and -.
656 # +, and -.
657
657
658 _elements = {
658 _elements = {
659 # token-type: binding-strength, primary, prefix, infix, suffix
659 # token-type: binding-strength, primary, prefix, infix, suffix
660 b'(': (12, None, (b'group', 1, b')'), None, None),
660 b'(': (12, None, (b'group', 1, b')'), None, None),
661 b':': (8, None, (b'ancestors', 8), None, None),
661 b':': (8, None, (b'ancestors', 8), None, None),
662 b'&': (5, None, None, (b'and_', 5), None),
662 b'&': (5, None, None, (b'and_', 5), None),
663 b'+': (4, None, None, (b'add', 4), None),
663 b'+': (4, None, None, (b'add', 4), None),
664 b'-': (4, None, None, (b'sub', 4), None),
664 b'-': (4, None, None, (b'sub', 4), None),
665 b')': (0, None, None, None, None),
665 b')': (0, None, None, None, None),
666 b'symbol': (0, b'symbol', None, None, None),
666 b'symbol': (0, b'symbol', None, None, None),
667 b'end': (0, None, None, None, None),
667 b'end': (0, None, None, None, None),
668 }
668 }
669
669
670 def _tokenize(text):
670 def _tokenize(text):
671 view = memoryview(text) # zero-copy slice
671 view = memoryview(text) # zero-copy slice
672 special = b'():+-& '
672 special = b'():+-& '
673 pos = 0
673 pos = 0
674 length = len(text)
674 length = len(text)
675 while pos < length:
675 while pos < length:
676 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
676 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
677 view[pos:]))
677 view[pos:]))
678 if symbol:
678 if symbol:
679 yield (b'symbol', symbol, pos)
679 yield (b'symbol', symbol, pos)
680 pos += len(symbol)
680 pos += len(symbol)
681 else: # special char, ignore space
681 else: # special char, ignore space
682 if text[pos] != b' ':
682 if text[pos] != b' ':
683 yield (text[pos], None, pos)
683 yield (text[pos], None, pos)
684 pos += 1
684 pos += 1
685 yield (b'end', None, pos)
685 yield (b'end', None, pos)
686
686
687 def _parse(text):
687 def _parse(text):
688 tree, pos = parser.parser(_elements).parse(_tokenize(text))
688 tree, pos = parser.parser(_elements).parse(_tokenize(text))
689 if pos != len(text):
689 if pos != len(text):
690 raise error.ParseError(b'invalid token', pos)
690 raise error.ParseError(b'invalid token', pos)
691 return tree
691 return tree
692
692
693 def _parsedrev(symbol):
693 def _parsedrev(symbol):
694 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
694 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
695 if symbol.startswith(b'D') and symbol[1:].isdigit():
695 if symbol.startswith(b'D') and symbol[1:].isdigit():
696 return int(symbol[1:])
696 return int(symbol[1:])
697 if symbol.isdigit():
697 if symbol.isdigit():
698 return int(symbol)
698 return int(symbol)
699
699
700 def _prefetchdrevs(tree):
700 def _prefetchdrevs(tree):
701 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
701 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
702 drevs = set()
702 drevs = set()
703 ancestordrevs = set()
703 ancestordrevs = set()
704 op = tree[0]
704 op = tree[0]
705 if op == b'symbol':
705 if op == b'symbol':
706 r = _parsedrev(tree[1])
706 r = _parsedrev(tree[1])
707 if r:
707 if r:
708 drevs.add(r)
708 drevs.add(r)
709 elif op == b'ancestors':
709 elif op == b'ancestors':
710 r, a = _prefetchdrevs(tree[1])
710 r, a = _prefetchdrevs(tree[1])
711 drevs.update(r)
711 drevs.update(r)
712 ancestordrevs.update(r)
712 ancestordrevs.update(r)
713 ancestordrevs.update(a)
713 ancestordrevs.update(a)
714 else:
714 else:
715 for t in tree[1:]:
715 for t in tree[1:]:
716 r, a = _prefetchdrevs(t)
716 r, a = _prefetchdrevs(t)
717 drevs.update(r)
717 drevs.update(r)
718 ancestordrevs.update(a)
718 ancestordrevs.update(a)
719 return drevs, ancestordrevs
719 return drevs, ancestordrevs
720
720
721 def querydrev(repo, spec):
721 def querydrev(repo, spec):
722 """return a list of "Differential Revision" dicts
722 """return a list of "Differential Revision" dicts
723
723
724 spec is a string using a simple query language, see docstring in phabread
724 spec is a string using a simple query language, see docstring in phabread
725 for details.
725 for details.
726
726
727 A "Differential Revision dict" looks like:
727 A "Differential Revision dict" looks like:
728
728
729 {
729 {
730 "id": "2",
730 "id": "2",
731 "phid": "PHID-DREV-672qvysjcczopag46qty",
731 "phid": "PHID-DREV-672qvysjcczopag46qty",
732 "title": "example",
732 "title": "example",
733 "uri": "https://phab.example.com/D2",
733 "uri": "https://phab.example.com/D2",
734 "dateCreated": "1499181406",
734 "dateCreated": "1499181406",
735 "dateModified": "1499182103",
735 "dateModified": "1499182103",
736 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
736 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
737 "status": "0",
737 "status": "0",
738 "statusName": "Needs Review",
738 "statusName": "Needs Review",
739 "properties": [],
739 "properties": [],
740 "branch": null,
740 "branch": null,
741 "summary": "",
741 "summary": "",
742 "testPlan": "",
742 "testPlan": "",
743 "lineCount": "2",
743 "lineCount": "2",
744 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
744 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
745 "diffs": [
745 "diffs": [
746 "3",
746 "3",
747 "4",
747 "4",
748 ],
748 ],
749 "commits": [],
749 "commits": [],
750 "reviewers": [],
750 "reviewers": [],
751 "ccs": [],
751 "ccs": [],
752 "hashes": [],
752 "hashes": [],
753 "auxiliary": {
753 "auxiliary": {
754 "phabricator:projects": [],
754 "phabricator:projects": [],
755 "phabricator:depends-on": [
755 "phabricator:depends-on": [
756 "PHID-DREV-gbapp366kutjebt7agcd"
756 "PHID-DREV-gbapp366kutjebt7agcd"
757 ]
757 ]
758 },
758 },
759 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
759 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
760 "sourcePath": null
760 "sourcePath": null
761 }
761 }
762 """
762 """
763 def fetch(params):
763 def fetch(params):
764 """params -> single drev or None"""
764 """params -> single drev or None"""
765 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
765 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
766 if key in prefetched:
766 if key in prefetched:
767 return prefetched[key]
767 return prefetched[key]
768 drevs = callconduit(repo, b'differential.query', params)
768 drevs = callconduit(repo, b'differential.query', params)
769 # Fill prefetched with the result
769 # Fill prefetched with the result
770 for drev in drevs:
770 for drev in drevs:
771 prefetched[drev[b'phid']] = drev
771 prefetched[drev[b'phid']] = drev
772 prefetched[int(drev[b'id'])] = drev
772 prefetched[int(drev[b'id'])] = drev
773 if key not in prefetched:
773 if key not in prefetched:
774 raise error.Abort(_(b'cannot get Differential Revision %r')
774 raise error.Abort(_(b'cannot get Differential Revision %r')
775 % params)
775 % params)
776 return prefetched[key]
776 return prefetched[key]
777
777
778 def getstack(topdrevids):
778 def getstack(topdrevids):
779 """given a top, get a stack from the bottom, [id] -> [id]"""
779 """given a top, get a stack from the bottom, [id] -> [id]"""
780 visited = set()
780 visited = set()
781 result = []
781 result = []
782 queue = [{r'ids': [i]} for i in topdrevids]
782 queue = [{r'ids': [i]} for i in topdrevids]
783 while queue:
783 while queue:
784 params = queue.pop()
784 params = queue.pop()
785 drev = fetch(params)
785 drev = fetch(params)
786 if drev[b'id'] in visited:
786 if drev[b'id'] in visited:
787 continue
787 continue
788 visited.add(drev[b'id'])
788 visited.add(drev[b'id'])
789 result.append(int(drev[b'id']))
789 result.append(int(drev[b'id']))
790 auxiliary = drev.get(b'auxiliary', {})
790 auxiliary = drev.get(b'auxiliary', {})
791 depends = auxiliary.get(b'phabricator:depends-on', [])
791 depends = auxiliary.get(b'phabricator:depends-on', [])
792 for phid in depends:
792 for phid in depends:
793 queue.append({b'phids': [phid]})
793 queue.append({b'phids': [phid]})
794 result.reverse()
794 result.reverse()
795 return smartset.baseset(result)
795 return smartset.baseset(result)
796
796
797 # Initialize prefetch cache
797 # Initialize prefetch cache
798 prefetched = {} # {id or phid: drev}
798 prefetched = {} # {id or phid: drev}
799
799
800 tree = _parse(spec)
800 tree = _parse(spec)
801 drevs, ancestordrevs = _prefetchdrevs(tree)
801 drevs, ancestordrevs = _prefetchdrevs(tree)
802
802
803 # developer config: phabricator.batchsize
803 # developer config: phabricator.batchsize
804 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
804 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
805
805
806 # Prefetch Differential Revisions in batch
806 # Prefetch Differential Revisions in batch
807 tofetch = set(drevs)
807 tofetch = set(drevs)
808 for r in ancestordrevs:
808 for r in ancestordrevs:
809 tofetch.update(range(max(1, r - batchsize), r + 1))
809 tofetch.update(range(max(1, r - batchsize), r + 1))
810 if drevs:
810 if drevs:
811 fetch({b'ids': list(tofetch)})
811 fetch({b'ids': list(tofetch)})
812 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
812 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
813
813
814 # Walk through the tree, return smartsets
814 # Walk through the tree, return smartsets
815 def walk(tree):
815 def walk(tree):
816 op = tree[0]
816 op = tree[0]
817 if op == b'symbol':
817 if op == b'symbol':
818 drev = _parsedrev(tree[1])
818 drev = _parsedrev(tree[1])
819 if drev:
819 if drev:
820 return smartset.baseset([drev])
820 return smartset.baseset([drev])
821 elif tree[1] in _knownstatusnames:
821 elif tree[1] in _knownstatusnames:
822 drevs = [r for r in validids
822 drevs = [r for r in validids
823 if _getstatusname(prefetched[r]) == tree[1]]
823 if _getstatusname(prefetched[r]) == tree[1]]
824 return smartset.baseset(drevs)
824 return smartset.baseset(drevs)
825 else:
825 else:
826 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
826 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
827 elif op in {b'and_', b'add', b'sub'}:
827 elif op in {b'and_', b'add', b'sub'}:
828 assert len(tree) == 3
828 assert len(tree) == 3
829 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
829 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
830 elif op == b'group':
830 elif op == b'group':
831 return walk(tree[1])
831 return walk(tree[1])
832 elif op == b'ancestors':
832 elif op == b'ancestors':
833 return getstack(walk(tree[1]))
833 return getstack(walk(tree[1]))
834 else:
834 else:
835 raise error.ProgrammingError(b'illegal tree: %r' % tree)
835 raise error.ProgrammingError(b'illegal tree: %r' % tree)
836
836
837 return [prefetched[r] for r in walk(tree)]
837 return [prefetched[r] for r in walk(tree)]
838
838
839 def getdescfromdrev(drev):
839 def getdescfromdrev(drev):
840 """get description (commit message) from "Differential Revision"
840 """get description (commit message) from "Differential Revision"
841
841
842 This is similar to differential.getcommitmessage API. But we only care
842 This is similar to differential.getcommitmessage API. But we only care
843 about limited fields: title, summary, test plan, and URL.
843 about limited fields: title, summary, test plan, and URL.
844 """
844 """
845 title = drev[b'title']
845 title = drev[b'title']
846 summary = drev[b'summary'].rstrip()
846 summary = drev[b'summary'].rstrip()
847 testplan = drev[b'testPlan'].rstrip()
847 testplan = drev[b'testPlan'].rstrip()
848 if testplan:
848 if testplan:
849 testplan = b'Test Plan:\n%s' % testplan
849 testplan = b'Test Plan:\n%s' % testplan
850 uri = b'Differential Revision: %s' % drev[b'uri']
850 uri = b'Differential Revision: %s' % drev[b'uri']
851 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
851 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
852
852
853 def getdiffmeta(diff):
853 def getdiffmeta(diff):
854 """get commit metadata (date, node, user, p1) from a diff object
854 """get commit metadata (date, node, user, p1) from a diff object
855
855
856 The metadata could be "hg:meta", sent by phabsend, like:
856 The metadata could be "hg:meta", sent by phabsend, like:
857
857
858 "properties": {
858 "properties": {
859 "hg:meta": {
859 "hg:meta": {
860 "date": "1499571514 25200",
860 "date": "1499571514 25200",
861 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
861 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
862 "user": "Foo Bar <foo@example.com>",
862 "user": "Foo Bar <foo@example.com>",
863 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
863 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
864 }
864 }
865 }
865 }
866
866
867 Or converted from "local:commits", sent by "arc", like:
867 Or converted from "local:commits", sent by "arc", like:
868
868
869 "properties": {
869 "properties": {
870 "local:commits": {
870 "local:commits": {
871 "98c08acae292b2faf60a279b4189beb6cff1414d": {
871 "98c08acae292b2faf60a279b4189beb6cff1414d": {
872 "author": "Foo Bar",
872 "author": "Foo Bar",
873 "time": 1499546314,
873 "time": 1499546314,
874 "branch": "default",
874 "branch": "default",
875 "tag": "",
875 "tag": "",
876 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "local": "1000",
878 "local": "1000",
879 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
879 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
880 "summary": "...",
880 "summary": "...",
881 "message": "...",
881 "message": "...",
882 "authorEmail": "foo@example.com"
882 "authorEmail": "foo@example.com"
883 }
883 }
884 }
884 }
885 }
885 }
886
886
887 Note: metadata extracted from "local:commits" will lose time zone
887 Note: metadata extracted from "local:commits" will lose time zone
888 information.
888 information.
889 """
889 """
890 props = diff.get(b'properties') or {}
890 props = diff.get(b'properties') or {}
891 meta = props.get(b'hg:meta')
891 meta = props.get(b'hg:meta')
892 if not meta and props.get(b'local:commits'):
892 if not meta and props.get(b'local:commits'):
893 commit = sorted(props[b'local:commits'].values())[0]
893 commit = sorted(props[b'local:commits'].values())[0]
894 meta = {
894 meta = {
895 b'date': b'%d 0' % commit[b'time'],
895 b'date': b'%d 0' % commit[b'time'],
896 b'node': commit[b'rev'],
896 b'node': commit[b'rev'],
897 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
897 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
898 }
898 }
899 if len(commit.get(b'parents', ())) >= 1:
899 if len(commit.get(b'parents', ())) >= 1:
900 meta[b'parent'] = commit[b'parents'][0]
900 meta[b'parent'] = commit[b'parents'][0]
901 return meta or {}
901 return meta or {}
902
902
903 def readpatch(repo, drevs, write):
903 def readpatch(repo, drevs, write):
904 """generate plain-text patch readable by 'hg import'
904 """generate plain-text patch readable by 'hg import'
905
905
906 write is usually ui.write. drevs is what "querydrev" returns, results of
906 write is usually ui.write. drevs is what "querydrev" returns, results of
907 "differential.query".
907 "differential.query".
908 """
908 """
909 # Prefetch hg:meta property for all diffs
909 # Prefetch hg:meta property for all diffs
910 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
910 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
911 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
911 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
912
912
913 # Generate patch for each drev
913 # Generate patch for each drev
914 for drev in drevs:
914 for drev in drevs:
915 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
915 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
916
916
917 diffid = max(int(v) for v in drev[b'diffs'])
917 diffid = max(int(v) for v in drev[b'diffs'])
918 body = callconduit(repo, b'differential.getrawdiff',
918 body = callconduit(repo, b'differential.getrawdiff',
919 {b'diffID': diffid})
919 {b'diffID': diffid})
920 desc = getdescfromdrev(drev)
920 desc = getdescfromdrev(drev)
921 header = b'# HG changeset patch\n'
921 header = b'# HG changeset patch\n'
922
922
923 # Try to preserve metadata from hg:meta property. Write hg patch
923 # Try to preserve metadata from hg:meta property. Write hg patch
924 # headers that can be read by the "import" command. See patchheadermap
924 # headers that can be read by the "import" command. See patchheadermap
925 # and extract in mercurial/patch.py for supported headers.
925 # and extract in mercurial/patch.py for supported headers.
926 meta = getdiffmeta(diffs[str(diffid)])
926 meta = getdiffmeta(diffs[str(diffid)])
927 for k in _metanamemap.keys():
927 for k in _metanamemap.keys():
928 if k in meta:
928 if k in meta:
929 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
929 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
930
930
931 content = b'%s%s\n%s' % (header, desc, body)
931 content = b'%s%s\n%s' % (header, desc, body)
932 write(content)
932 write(content)
933
933
934 @vcrcommand(b'phabread',
934 @vcrcommand(b'phabread',
935 [(b'', b'stack', False, _(b'read dependencies'))],
935 [(b'', b'stack', False, _(b'read dependencies'))],
936 _(b'DREVSPEC [OPTIONS]'),
936 _(b'DREVSPEC [OPTIONS]'),
937 helpcategory=command.CATEGORY_IMPORT_EXPORT)
937 helpcategory=command.CATEGORY_IMPORT_EXPORT)
938 def phabread(ui, repo, spec, **opts):
938 def phabread(ui, repo, spec, **opts):
939 """print patches from Phabricator suitable for importing
939 """print patches from Phabricator suitable for importing
940
940
941 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
941 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
942 the number ``123``. It could also have common operators like ``+``, ``-``,
942 the number ``123``. It could also have common operators like ``+``, ``-``,
943 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
943 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
944 select a stack.
944 select a stack.
945
945
946 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
946 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
947 could be used to filter patches by status. For performance reason, they
947 could be used to filter patches by status. For performance reason, they
948 only represent a subset of non-status selections and cannot be used alone.
948 only represent a subset of non-status selections and cannot be used alone.
949
949
950 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
950 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
951 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
951 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
952 stack up to D9.
952 stack up to D9.
953
953
954 If --stack is given, follow dependencies information and read all patches.
954 If --stack is given, follow dependencies information and read all patches.
955 It is equivalent to the ``:`` operator.
955 It is equivalent to the ``:`` operator.
956 """
956 """
957 if opts.get(b'stack'):
957 if opts.get(b'stack'):
958 spec = b':(%s)' % spec
958 spec = b':(%s)' % spec
959 drevs = querydrev(repo, spec)
959 drevs = querydrev(repo, spec)
960 readpatch(repo, drevs, ui.write)
960 readpatch(repo, drevs, ui.write)
961
961
962 @vcrcommand(b'phabupdate',
962 @vcrcommand(b'phabupdate',
963 [(b'', b'accept', False, _(b'accept revisions')),
963 [(b'', b'accept', False, _(b'accept revisions')),
964 (b'', b'reject', False, _(b'reject revisions')),
964 (b'', b'reject', False, _(b'reject revisions')),
965 (b'', b'abandon', False, _(b'abandon revisions')),
965 (b'', b'abandon', False, _(b'abandon revisions')),
966 (b'', b'reclaim', False, _(b'reclaim revisions')),
966 (b'', b'reclaim', False, _(b'reclaim revisions')),
967 (b'm', b'comment', b'', _(b'comment on the last revision')),
967 (b'm', b'comment', b'', _(b'comment on the last revision')),
968 ], _(b'DREVSPEC [OPTIONS]'),
968 ], _(b'DREVSPEC [OPTIONS]'),
969 helpcategory=command.CATEGORY_IMPORT_EXPORT)
969 helpcategory=command.CATEGORY_IMPORT_EXPORT)
970 def phabupdate(ui, repo, spec, **opts):
970 def phabupdate(ui, repo, spec, **opts):
971 """update Differential Revision in batch
971 """update Differential Revision in batch
972
972
973 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
973 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
974 """
974 """
975 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
975 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
976 if len(flags) > 1:
976 if len(flags) > 1:
977 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
977 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
978
978
979 actions = []
979 actions = []
980 for f in flags:
980 for f in flags:
981 actions.append({b'type': f, b'value': b'true'})
981 actions.append({b'type': f, b'value': b'true'})
982
982
983 drevs = querydrev(repo, spec)
983 drevs = querydrev(repo, spec)
984 for i, drev in enumerate(drevs):
984 for i, drev in enumerate(drevs):
985 if i + 1 == len(drevs) and opts.get(b'comment'):
985 if i + 1 == len(drevs) and opts.get(b'comment'):
986 actions.append({b'type': b'comment', b'value': opts[b'comment']})
986 actions.append({b'type': b'comment', b'value': opts[b'comment']})
987 if actions:
987 if actions:
988 params = {b'objectIdentifier': drev[b'phid'],
988 params = {b'objectIdentifier': drev[b'phid'],
989 b'transactions': actions}
989 b'transactions': actions}
990 callconduit(repo, b'differential.revision.edit', params)
990 callconduit(repo, b'differential.revision.edit', params)
991
991
992 templatekeyword = registrar.templatekeyword()
992 templatekeyword = registrar.templatekeyword()
993
993
994 @templatekeyword(b'phabreview', requires={b'ctx'})
994 @templatekeyword(b'phabreview', requires={b'ctx'})
995 def template_review(context, mapping):
995 def template_review(context, mapping):
996 """:phabreview: Object describing the review for this changeset.
996 """:phabreview: Object describing the review for this changeset.
997 Has attributes `url` and `id`.
997 Has attributes `url` and `id`.
998 """
998 """
999 ctx = context.resource(mapping, b'ctx')
999 ctx = context.resource(mapping, b'ctx')
1000 m = _differentialrevisiondescre.search(ctx.description())
1000 m = _differentialrevisiondescre.search(ctx.description())
1001 if m:
1001 if m:
1002 return templateutil.hybriddict({
1002 return templateutil.hybriddict({
1003 b'url': m.group(b'url'),
1003 b'url': m.group(b'url'),
1004 b'id': b"D{}".format(m.group(b'id')),
1004 b'id': b"D{}".format(m.group(b'id')),
1005 })
1005 })
1006 else:
1006 else:
1007 tags = ctx.repo().nodetags(ctx.node())
1007 tags = ctx.repo().nodetags(ctx.node())
1008 for t in tags:
1008 for t in tags:
1009 if _differentialrevisiontagre.match(t):
1009 if _differentialrevisiontagre.match(t):
1010 url = ctx.repo().ui.config(b'phabricator', b'url')
1010 url = ctx.repo().ui.config(b'phabricator', b'url')
1011 if not url.endswith(b'/'):
1011 if not url.endswith(b'/'):
1012 url += b'/'
1012 url += b'/'
1013 url += t
1013 url += t
1014
1014
1015 return templateutil.hybriddict({
1015 return templateutil.hybriddict({
1016 b'url': url,
1016 b'url': url,
1017 b'id': t,
1017 b'id': t,
1018 })
1018 })
1019 return None
1019 return None
General Comments 0
You need to be logged in to leave comments. Login now