##// END OF EJS Templates
py3: only pass unicode to json.dumps in writediffproperties...
Ian Moody -
r42069:8fd19a7b default
parent child Browse files
Show More
@@ -1,1019 +1,1020 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templateutil,
68 templateutil,
69 url as urlmod,
69 url as urlmod,
70 util,
70 util,
71 )
71 )
72 from mercurial.utils import (
72 from mercurial.utils import (
73 procutil,
73 procutil,
74 stringutil,
74 stringutil,
75 )
75 )
76
76
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # be specifying the version(s) of Mercurial they are tested with, or
79 # be specifying the version(s) of Mercurial they are tested with, or
80 # leave the attribute unspecified.
80 # leave the attribute unspecified.
81 testedwith = 'ships-with-hg-core'
81 testedwith = 'ships-with-hg-core'
82
82
83 cmdtable = {}
83 cmdtable = {}
84 command = registrar.command(cmdtable)
84 command = registrar.command(cmdtable)
85
85
86 configtable = {}
86 configtable = {}
87 configitem = registrar.configitem(configtable)
87 configitem = registrar.configitem(configtable)
88
88
89 # developer config: phabricator.batchsize
89 # developer config: phabricator.batchsize
90 configitem(b'phabricator', b'batchsize',
90 configitem(b'phabricator', b'batchsize',
91 default=12,
91 default=12,
92 )
92 )
93 configitem(b'phabricator', b'callsign',
93 configitem(b'phabricator', b'callsign',
94 default=None,
94 default=None,
95 )
95 )
96 configitem(b'phabricator', b'curlcmd',
96 configitem(b'phabricator', b'curlcmd',
97 default=None,
97 default=None,
98 )
98 )
99 # developer config: phabricator.repophid
99 # developer config: phabricator.repophid
100 configitem(b'phabricator', b'repophid',
100 configitem(b'phabricator', b'repophid',
101 default=None,
101 default=None,
102 )
102 )
103 configitem(b'phabricator', b'url',
103 configitem(b'phabricator', b'url',
104 default=None,
104 default=None,
105 )
105 )
106 configitem(b'phabsend', b'confirm',
106 configitem(b'phabsend', b'confirm',
107 default=False,
107 default=False,
108 )
108 )
109
109
110 colortable = {
110 colortable = {
111 b'phabricator.action.created': b'green',
111 b'phabricator.action.created': b'green',
112 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.updated': b'magenta',
113 b'phabricator.action.updated': b'magenta',
114 b'phabricator.desc': b'',
114 b'phabricator.desc': b'',
115 b'phabricator.drev': b'bold',
115 b'phabricator.drev': b'bold',
116 b'phabricator.node': b'',
116 b'phabricator.node': b'',
117 }
117 }
118
118
119 _VCR_FLAGS = [
119 _VCR_FLAGS = [
120 (b'', b'test-vcr', b'',
120 (b'', b'test-vcr', b'',
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 b', otherwise will mock all http requests using the specified vcr file.'
122 b', otherwise will mock all http requests using the specified vcr file.'
123 b' (ADVANCED)'
123 b' (ADVANCED)'
124 )),
124 )),
125 ]
125 ]
126
126
127 def vcrcommand(name, flags, spec, helpcategory=None):
127 def vcrcommand(name, flags, spec, helpcategory=None):
128 fullflags = flags + _VCR_FLAGS
128 fullflags = flags + _VCR_FLAGS
129 def decorate(fn):
129 def decorate(fn):
130 def inner(*args, **kwargs):
130 def inner(*args, **kwargs):
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
131 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 if cassette:
132 if cassette:
133 import hgdemandimport
133 import hgdemandimport
134 with hgdemandimport.deactivated():
134 with hgdemandimport.deactivated():
135 import vcr as vcrmod
135 import vcr as vcrmod
136 import vcr.stubs as stubs
136 import vcr.stubs as stubs
137 vcr = vcrmod.VCR(
137 vcr = vcrmod.VCR(
138 serializer=r'json',
138 serializer=r'json',
139 custom_patches=[
139 custom_patches=[
140 (urlmod, r'httpconnection',
140 (urlmod, r'httpconnection',
141 stubs.VCRHTTPConnection),
141 stubs.VCRHTTPConnection),
142 (urlmod, r'httpsconnection',
142 (urlmod, r'httpsconnection',
143 stubs.VCRHTTPSConnection),
143 stubs.VCRHTTPSConnection),
144 ])
144 ])
145 with vcr.use_cassette(cassette):
145 with vcr.use_cassette(cassette):
146 return fn(*args, **kwargs)
146 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 inner.__name__ = fn.__name__
148 inner.__name__ = fn.__name__
149 inner.__doc__ = fn.__doc__
149 inner.__doc__ = fn.__doc__
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
150 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return decorate
151 return decorate
152
152
153 def urlencodenested(params):
153 def urlencodenested(params):
154 """like urlencode, but works with nested parameters.
154 """like urlencode, but works with nested parameters.
155
155
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
156 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
157 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
158 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 """
159 """
160 flatparams = util.sortdict()
160 flatparams = util.sortdict()
161 def process(prefix, obj):
161 def process(prefix, obj):
162 if isinstance(obj, bool):
162 if isinstance(obj, bool):
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
163 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
164 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
165 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 if items is None:
166 if items is None:
167 flatparams[prefix] = obj
167 flatparams[prefix] = obj
168 else:
168 else:
169 for k, v in items(obj):
169 for k, v in items(obj):
170 if prefix:
170 if prefix:
171 process(b'%s[%s]' % (prefix, k), v)
171 process(b'%s[%s]' % (prefix, k), v)
172 else:
172 else:
173 process(k, v)
173 process(k, v)
174 process(b'', params)
174 process(b'', params)
175 return util.urlreq.urlencode(flatparams)
175 return util.urlreq.urlencode(flatparams)
176
176
177 def readurltoken(repo):
177 def readurltoken(repo):
178 """return conduit url, token and make sure they exist
178 """return conduit url, token and make sure they exist
179
179
180 Currently read from [auth] config section. In the future, it might
180 Currently read from [auth] config section. In the future, it might
181 make sense to read from .arcconfig and .arcrc as well.
181 make sense to read from .arcconfig and .arcrc as well.
182 """
182 """
183 url = repo.ui.config(b'phabricator', b'url')
183 url = repo.ui.config(b'phabricator', b'url')
184 if not url:
184 if not url:
185 raise error.Abort(_(b'config %s.%s is required')
185 raise error.Abort(_(b'config %s.%s is required')
186 % (b'phabricator', b'url'))
186 % (b'phabricator', b'url'))
187
187
188 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
188 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 token = None
189 token = None
190
190
191 if res:
191 if res:
192 group, auth = res
192 group, auth = res
193
193
194 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
194 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195
195
196 token = auth.get(b'phabtoken')
196 token = auth.get(b'phabtoken')
197
197
198 if not token:
198 if not token:
199 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
199 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 % (url,))
200 % (url,))
201
201
202 return url, token
202 return url, token
203
203
204 def callconduit(repo, name, params):
204 def callconduit(repo, name, params):
205 """call Conduit API, params is a dict. return json.loads result, or None"""
205 """call Conduit API, params is a dict. return json.loads result, or None"""
206 host, token = readurltoken(repo)
206 host, token = readurltoken(repo)
207 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
207 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
208 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 params = params.copy()
209 params = params.copy()
210 params[b'api.token'] = token
210 params[b'api.token'] = token
211 data = urlencodenested(params)
211 data = urlencodenested(params)
212 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
212 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 if curlcmd:
213 if curlcmd:
214 sin, sout = procutil.popen2(b'%s -d @- %s'
214 sin, sout = procutil.popen2(b'%s -d @- %s'
215 % (curlcmd, procutil.shellquote(url)))
215 % (curlcmd, procutil.shellquote(url)))
216 sin.write(data)
216 sin.write(data)
217 sin.close()
217 sin.close()
218 body = sout.read()
218 body = sout.read()
219 else:
219 else:
220 urlopener = urlmod.opener(repo.ui, authinfo)
220 urlopener = urlmod.opener(repo.ui, authinfo)
221 request = util.urlreq.request(pycompat.strurl(url), data=data)
221 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 with contextlib.closing(urlopener.open(request)) as rsp:
222 with contextlib.closing(urlopener.open(request)) as rsp:
223 body = rsp.read()
223 body = rsp.read()
224 repo.ui.debug(b'Conduit Response: %s\n' % body)
224 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 parsed = pycompat.rapply(
225 parsed = pycompat.rapply(
226 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
226 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 else x,
227 else x,
228 json.loads(body)
228 json.loads(body)
229 )
229 )
230 if parsed.get(b'error_code'):
230 if parsed.get(b'error_code'):
231 msg = (_(b'Conduit Error (%s): %s')
231 msg = (_(b'Conduit Error (%s): %s')
232 % (parsed[b'error_code'], parsed[b'error_info']))
232 % (parsed[b'error_code'], parsed[b'error_info']))
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234 return parsed[b'result']
234 return parsed[b'result']
235
235
236 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
236 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 def debugcallconduit(ui, repo, name):
237 def debugcallconduit(ui, repo, name):
238 """call Conduit API
238 """call Conduit API
239
239
240 Call parameters are read from stdin as a JSON blob. Result will be written
240 Call parameters are read from stdin as a JSON blob. Result will be written
241 to stdout as a JSON blob.
241 to stdout as a JSON blob.
242 """
242 """
243 params = json.loads(ui.fin.read())
243 params = json.loads(ui.fin.read())
244 result = callconduit(repo, name, params)
244 result = callconduit(repo, name, params)
245 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
245 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
246 ui.write(b'%s\n' % s)
246 ui.write(b'%s\n' % s)
247
247
248 def getrepophid(repo):
248 def getrepophid(repo):
249 """given callsign, return repository PHID or None"""
249 """given callsign, return repository PHID or None"""
250 # developer config: phabricator.repophid
250 # developer config: phabricator.repophid
251 repophid = repo.ui.config(b'phabricator', b'repophid')
251 repophid = repo.ui.config(b'phabricator', b'repophid')
252 if repophid:
252 if repophid:
253 return repophid
253 return repophid
254 callsign = repo.ui.config(b'phabricator', b'callsign')
254 callsign = repo.ui.config(b'phabricator', b'callsign')
255 if not callsign:
255 if not callsign:
256 return None
256 return None
257 query = callconduit(repo, b'diffusion.repository.search',
257 query = callconduit(repo, b'diffusion.repository.search',
258 {b'constraints': {b'callsigns': [callsign]}})
258 {b'constraints': {b'callsigns': [callsign]}})
259 if len(query[b'data']) == 0:
259 if len(query[b'data']) == 0:
260 return None
260 return None
261 repophid = query[b'data'][0][b'phid']
261 repophid = query[b'data'][0][b'phid']
262 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
262 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
263 return repophid
263 return repophid
264
264
265 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
265 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
266 _differentialrevisiondescre = re.compile(
266 _differentialrevisiondescre = re.compile(
267 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
267 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
268
268
269 def getoldnodedrevmap(repo, nodelist):
269 def getoldnodedrevmap(repo, nodelist):
270 """find previous nodes that has been sent to Phabricator
270 """find previous nodes that has been sent to Phabricator
271
271
272 return {node: (oldnode, Differential diff, Differential Revision ID)}
272 return {node: (oldnode, Differential diff, Differential Revision ID)}
273 for node in nodelist with known previous sent versions, or associated
273 for node in nodelist with known previous sent versions, or associated
274 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
274 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
275 be ``None``.
275 be ``None``.
276
276
277 Examines commit messages like "Differential Revision:" to get the
277 Examines commit messages like "Differential Revision:" to get the
278 association information.
278 association information.
279
279
280 If such commit message line is not found, examines all precursors and their
280 If such commit message line is not found, examines all precursors and their
281 tags. Tags with format like "D1234" are considered a match and the node
281 tags. Tags with format like "D1234" are considered a match and the node
282 with that tag, and the number after "D" (ex. 1234) will be returned.
282 with that tag, and the number after "D" (ex. 1234) will be returned.
283
283
284 The ``old node``, if not None, is guaranteed to be the last diff of
284 The ``old node``, if not None, is guaranteed to be the last diff of
285 corresponding Differential Revision, and exist in the repo.
285 corresponding Differential Revision, and exist in the repo.
286 """
286 """
287 unfi = repo.unfiltered()
287 unfi = repo.unfiltered()
288 nodemap = unfi.changelog.nodemap
288 nodemap = unfi.changelog.nodemap
289
289
290 result = {} # {node: (oldnode?, lastdiff?, drev)}
290 result = {} # {node: (oldnode?, lastdiff?, drev)}
291 toconfirm = {} # {node: (force, {precnode}, drev)}
291 toconfirm = {} # {node: (force, {precnode}, drev)}
292 for node in nodelist:
292 for node in nodelist:
293 ctx = unfi[node]
293 ctx = unfi[node]
294 # For tags like "D123", put them into "toconfirm" to verify later
294 # For tags like "D123", put them into "toconfirm" to verify later
295 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
295 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
296 for n in precnodes:
296 for n in precnodes:
297 if n in nodemap:
297 if n in nodemap:
298 for tag in unfi.nodetags(n):
298 for tag in unfi.nodetags(n):
299 m = _differentialrevisiontagre.match(tag)
299 m = _differentialrevisiontagre.match(tag)
300 if m:
300 if m:
301 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
301 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
302 continue
302 continue
303
303
304 # Check commit message
304 # Check commit message
305 m = _differentialrevisiondescre.search(ctx.description())
305 m = _differentialrevisiondescre.search(ctx.description())
306 if m:
306 if m:
307 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
307 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
308
308
309 # Double check if tags are genuine by collecting all old nodes from
309 # Double check if tags are genuine by collecting all old nodes from
310 # Phabricator, and expect precursors overlap with it.
310 # Phabricator, and expect precursors overlap with it.
311 if toconfirm:
311 if toconfirm:
312 drevs = [drev for force, precs, drev in toconfirm.values()]
312 drevs = [drev for force, precs, drev in toconfirm.values()]
313 alldiffs = callconduit(unfi, b'differential.querydiffs',
313 alldiffs = callconduit(unfi, b'differential.querydiffs',
314 {b'revisionIDs': drevs})
314 {b'revisionIDs': drevs})
315 getnode = lambda d: bin(
315 getnode = lambda d: bin(
316 getdiffmeta(d).get(b'node', b'')) or None
316 getdiffmeta(d).get(b'node', b'')) or None
317 for newnode, (force, precset, drev) in toconfirm.items():
317 for newnode, (force, precset, drev) in toconfirm.items():
318 diffs = [d for d in alldiffs.values()
318 diffs = [d for d in alldiffs.values()
319 if int(d[b'revisionID']) == drev]
319 if int(d[b'revisionID']) == drev]
320
320
321 # "precursors" as known by Phabricator
321 # "precursors" as known by Phabricator
322 phprecset = set(getnode(d) for d in diffs)
322 phprecset = set(getnode(d) for d in diffs)
323
323
324 # Ignore if precursors (Phabricator and local repo) do not overlap,
324 # Ignore if precursors (Phabricator and local repo) do not overlap,
325 # and force is not set (when commit message says nothing)
325 # and force is not set (when commit message says nothing)
326 if not force and not bool(phprecset & precset):
326 if not force and not bool(phprecset & precset):
327 tagname = b'D%d' % drev
327 tagname = b'D%d' % drev
328 tags.tag(repo, tagname, nullid, message=None, user=None,
328 tags.tag(repo, tagname, nullid, message=None, user=None,
329 date=None, local=True)
329 date=None, local=True)
330 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
330 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
331 b'Differential history\n') % drev)
331 b'Differential history\n') % drev)
332 continue
332 continue
333
333
334 # Find the last node using Phabricator metadata, and make sure it
334 # Find the last node using Phabricator metadata, and make sure it
335 # exists in the repo
335 # exists in the repo
336 oldnode = lastdiff = None
336 oldnode = lastdiff = None
337 if diffs:
337 if diffs:
338 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
338 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
339 oldnode = getnode(lastdiff)
339 oldnode = getnode(lastdiff)
340 if oldnode and oldnode not in nodemap:
340 if oldnode and oldnode not in nodemap:
341 oldnode = None
341 oldnode = None
342
342
343 result[newnode] = (oldnode, lastdiff, drev)
343 result[newnode] = (oldnode, lastdiff, drev)
344
344
345 return result
345 return result
346
346
347 def getdiff(ctx, diffopts):
347 def getdiff(ctx, diffopts):
348 """plain-text diff without header (user, commit message, etc)"""
348 """plain-text diff without header (user, commit message, etc)"""
349 output = util.stringio()
349 output = util.stringio()
350 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
350 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
351 None, opts=diffopts):
351 None, opts=diffopts):
352 output.write(chunk)
352 output.write(chunk)
353 return output.getvalue()
353 return output.getvalue()
354
354
355 def creatediff(ctx):
355 def creatediff(ctx):
356 """create a Differential Diff"""
356 """create a Differential Diff"""
357 repo = ctx.repo()
357 repo = ctx.repo()
358 repophid = getrepophid(repo)
358 repophid = getrepophid(repo)
359 # Create a "Differential Diff" via "differential.createrawdiff" API
359 # Create a "Differential Diff" via "differential.createrawdiff" API
360 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
360 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
361 if repophid:
361 if repophid:
362 params[b'repositoryPHID'] = repophid
362 params[b'repositoryPHID'] = repophid
363 diff = callconduit(repo, b'differential.createrawdiff', params)
363 diff = callconduit(repo, b'differential.createrawdiff', params)
364 if not diff:
364 if not diff:
365 raise error.Abort(_(b'cannot create diff for %s') % ctx)
365 raise error.Abort(_(b'cannot create diff for %s') % ctx)
366 return diff
366 return diff
367
367
368 def writediffproperties(ctx, diff):
368 def writediffproperties(ctx, diff):
369 """write metadata to diff so patches could be applied losslessly"""
369 """write metadata to diff so patches could be applied losslessly"""
370 params = {
370 params = {
371 b'diff_id': diff[b'id'],
371 b'diff_id': diff[b'id'],
372 b'name': b'hg:meta',
372 b'name': b'hg:meta',
373 b'data': json.dumps({
373 b'data': json.dumps({
374 b'user': ctx.user(),
374 u'user': encoding.unifromlocal(ctx.user()),
375 b'date': b'%d %d' % ctx.date(),
375 u'date': u'{:.0f} {}'.format(*ctx.date()),
376 b'node': ctx.hex(),
376 u'node': encoding.unifromlocal(ctx.hex()),
377 b'parent': ctx.p1().hex(),
377 u'parent': encoding.unifromlocal(ctx.p1().hex()),
378 }),
378 }),
379 }
379 }
380 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
380 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
381
381
382 params = {
382 params = {
383 b'diff_id': diff[b'id'],
383 b'diff_id': diff[b'id'],
384 b'name': b'local:commits',
384 b'name': b'local:commits',
385 b'data': json.dumps({
385 b'data': json.dumps({
386 ctx.hex(): {
386 encoding.unifromlocal(ctx.hex()): {
387 b'author': stringutil.person(ctx.user()),
387 u'author': encoding.unifromlocal(stringutil.person(ctx.user())),
388 b'authorEmail': stringutil.email(ctx.user()),
388 u'authorEmail': encoding.unifromlocal(
389 b'time': ctx.date()[0],
389 stringutil.email(ctx.user())),
390 u'time': u'{:.0f}'.format(ctx.date()[0]),
390 },
391 },
391 }),
392 }),
392 }
393 }
393 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
394 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
394
395
395 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
396 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
396 olddiff=None, actions=None):
397 olddiff=None, actions=None):
397 """create or update a Differential Revision
398 """create or update a Differential Revision
398
399
399 If revid is None, create a new Differential Revision, otherwise update
400 If revid is None, create a new Differential Revision, otherwise update
400 revid. If parentrevid is not None, set it as a dependency.
401 revid. If parentrevid is not None, set it as a dependency.
401
402
402 If oldnode is not None, check if the patch content (without commit message
403 If oldnode is not None, check if the patch content (without commit message
403 and metadata) has changed before creating another diff.
404 and metadata) has changed before creating another diff.
404
405
405 If actions is not None, they will be appended to the transaction.
406 If actions is not None, they will be appended to the transaction.
406 """
407 """
407 repo = ctx.repo()
408 repo = ctx.repo()
408 if oldnode:
409 if oldnode:
409 diffopts = mdiff.diffopts(git=True, context=32767)
410 diffopts = mdiff.diffopts(git=True, context=32767)
410 oldctx = repo.unfiltered()[oldnode]
411 oldctx = repo.unfiltered()[oldnode]
411 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
412 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
412 else:
413 else:
413 neednewdiff = True
414 neednewdiff = True
414
415
415 transactions = []
416 transactions = []
416 if neednewdiff:
417 if neednewdiff:
417 diff = creatediff(ctx)
418 diff = creatediff(ctx)
418 transactions.append({b'type': b'update', b'value': diff[b'phid']})
419 transactions.append({b'type': b'update', b'value': diff[b'phid']})
419 else:
420 else:
420 # Even if we don't need to upload a new diff because the patch content
421 # Even if we don't need to upload a new diff because the patch content
421 # does not change. We might still need to update its metadata so
422 # does not change. We might still need to update its metadata so
422 # pushers could know the correct node metadata.
423 # pushers could know the correct node metadata.
423 assert olddiff
424 assert olddiff
424 diff = olddiff
425 diff = olddiff
425 writediffproperties(ctx, diff)
426 writediffproperties(ctx, diff)
426
427
427 # Use a temporary summary to set dependency. There might be better ways but
428 # Use a temporary summary to set dependency. There might be better ways but
428 # I cannot find them for now. But do not do that if we are updating an
429 # I cannot find them for now. But do not do that if we are updating an
429 # existing revision (revid is not None) since that introduces visible
430 # existing revision (revid is not None) since that introduces visible
430 # churns (someone edited "Summary" twice) on the web page.
431 # churns (someone edited "Summary" twice) on the web page.
431 if parentrevid and revid is None:
432 if parentrevid and revid is None:
432 summary = b'Depends on D%s' % parentrevid
433 summary = b'Depends on D%s' % parentrevid
433 transactions += [{b'type': b'summary', b'value': summary},
434 transactions += [{b'type': b'summary', b'value': summary},
434 {b'type': b'summary', b'value': b' '}]
435 {b'type': b'summary', b'value': b' '}]
435
436
436 if actions:
437 if actions:
437 transactions += actions
438 transactions += actions
438
439
439 # Parse commit message and update related fields.
440 # Parse commit message and update related fields.
440 desc = ctx.description()
441 desc = ctx.description()
441 info = callconduit(repo, b'differential.parsecommitmessage',
442 info = callconduit(repo, b'differential.parsecommitmessage',
442 {b'corpus': desc})
443 {b'corpus': desc})
443 for k, v in info[b'fields'].items():
444 for k, v in info[b'fields'].items():
444 if k in [b'title', b'summary', b'testPlan']:
445 if k in [b'title', b'summary', b'testPlan']:
445 transactions.append({b'type': k, b'value': v})
446 transactions.append({b'type': k, b'value': v})
446
447
447 params = {b'transactions': transactions}
448 params = {b'transactions': transactions}
448 if revid is not None:
449 if revid is not None:
449 # Update an existing Differential Revision
450 # Update an existing Differential Revision
450 params[b'objectIdentifier'] = revid
451 params[b'objectIdentifier'] = revid
451
452
452 revision = callconduit(repo, b'differential.revision.edit', params)
453 revision = callconduit(repo, b'differential.revision.edit', params)
453 if not revision:
454 if not revision:
454 raise error.Abort(_(b'cannot create revision for %s') % ctx)
455 raise error.Abort(_(b'cannot create revision for %s') % ctx)
455
456
456 return revision, diff
457 return revision, diff
457
458
458 def userphids(repo, names):
459 def userphids(repo, names):
459 """convert user names to PHIDs"""
460 """convert user names to PHIDs"""
460 names = [name.lower() for name in names]
461 names = [name.lower() for name in names]
461 query = {b'constraints': {b'usernames': names}}
462 query = {b'constraints': {b'usernames': names}}
462 result = callconduit(repo, b'user.search', query)
463 result = callconduit(repo, b'user.search', query)
463 # username not found is not an error of the API. So check if we have missed
464 # username not found is not an error of the API. So check if we have missed
464 # some names here.
465 # some names here.
465 data = result[b'data']
466 data = result[b'data']
466 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
467 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
467 unresolved = set(names) - resolved
468 unresolved = set(names) - resolved
468 if unresolved:
469 if unresolved:
469 raise error.Abort(_(b'unknown username: %s')
470 raise error.Abort(_(b'unknown username: %s')
470 % b' '.join(sorted(unresolved)))
471 % b' '.join(sorted(unresolved)))
471 return [entry[b'phid'] for entry in data]
472 return [entry[b'phid'] for entry in data]
472
473
473 @vcrcommand(b'phabsend',
474 @vcrcommand(b'phabsend',
474 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
475 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
475 (b'', b'amend', True, _(b'update commit messages')),
476 (b'', b'amend', True, _(b'update commit messages')),
476 (b'', b'reviewer', [], _(b'specify reviewers')),
477 (b'', b'reviewer', [], _(b'specify reviewers')),
477 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
478 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
478 _(b'REV [OPTIONS]'),
479 _(b'REV [OPTIONS]'),
479 helpcategory=command.CATEGORY_IMPORT_EXPORT)
480 helpcategory=command.CATEGORY_IMPORT_EXPORT)
480 def phabsend(ui, repo, *revs, **opts):
481 def phabsend(ui, repo, *revs, **opts):
481 """upload changesets to Phabricator
482 """upload changesets to Phabricator
482
483
483 If there are multiple revisions specified, they will be send as a stack
484 If there are multiple revisions specified, they will be send as a stack
484 with a linear dependencies relationship using the order specified by the
485 with a linear dependencies relationship using the order specified by the
485 revset.
486 revset.
486
487
487 For the first time uploading changesets, local tags will be created to
488 For the first time uploading changesets, local tags will be created to
488 maintain the association. After the first time, phabsend will check
489 maintain the association. After the first time, phabsend will check
489 obsstore and tags information so it can figure out whether to update an
490 obsstore and tags information so it can figure out whether to update an
490 existing Differential Revision, or create a new one.
491 existing Differential Revision, or create a new one.
491
492
492 If --amend is set, update commit messages so they have the
493 If --amend is set, update commit messages so they have the
493 ``Differential Revision`` URL, remove related tags. This is similar to what
494 ``Differential Revision`` URL, remove related tags. This is similar to what
494 arcanist will do, and is more desired in author-push workflows. Otherwise,
495 arcanist will do, and is more desired in author-push workflows. Otherwise,
495 use local tags to record the ``Differential Revision`` association.
496 use local tags to record the ``Differential Revision`` association.
496
497
497 The --confirm option lets you confirm changesets before sending them. You
498 The --confirm option lets you confirm changesets before sending them. You
498 can also add following to your configuration file to make it default
499 can also add following to your configuration file to make it default
499 behaviour::
500 behaviour::
500
501
501 [phabsend]
502 [phabsend]
502 confirm = true
503 confirm = true
503
504
504 phabsend will check obsstore and the above association to decide whether to
505 phabsend will check obsstore and the above association to decide whether to
505 update an existing Differential Revision, or create a new one.
506 update an existing Differential Revision, or create a new one.
506 """
507 """
507 revs = list(revs) + opts.get(b'rev', [])
508 revs = list(revs) + opts.get(b'rev', [])
508 revs = scmutil.revrange(repo, revs)
509 revs = scmutil.revrange(repo, revs)
509
510
510 if not revs:
511 if not revs:
511 raise error.Abort(_(b'phabsend requires at least one changeset'))
512 raise error.Abort(_(b'phabsend requires at least one changeset'))
512 if opts.get(b'amend'):
513 if opts.get(b'amend'):
513 cmdutil.checkunfinished(repo)
514 cmdutil.checkunfinished(repo)
514
515
515 # {newnode: (oldnode, olddiff, olddrev}
516 # {newnode: (oldnode, olddiff, olddrev}
516 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
517 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
517
518
518 confirm = ui.configbool(b'phabsend', b'confirm')
519 confirm = ui.configbool(b'phabsend', b'confirm')
519 confirm |= bool(opts.get(b'confirm'))
520 confirm |= bool(opts.get(b'confirm'))
520 if confirm:
521 if confirm:
521 confirmed = _confirmbeforesend(repo, revs, oldmap)
522 confirmed = _confirmbeforesend(repo, revs, oldmap)
522 if not confirmed:
523 if not confirmed:
523 raise error.Abort(_(b'phabsend cancelled'))
524 raise error.Abort(_(b'phabsend cancelled'))
524
525
525 actions = []
526 actions = []
526 reviewers = opts.get(b'reviewer', [])
527 reviewers = opts.get(b'reviewer', [])
527 if reviewers:
528 if reviewers:
528 phids = userphids(repo, reviewers)
529 phids = userphids(repo, reviewers)
529 actions.append({b'type': b'reviewers.add', b'value': phids})
530 actions.append({b'type': b'reviewers.add', b'value': phids})
530
531
531 drevids = [] # [int]
532 drevids = [] # [int]
532 diffmap = {} # {newnode: diff}
533 diffmap = {} # {newnode: diff}
533
534
534 # Send patches one by one so we know their Differential Revision IDs and
535 # Send patches one by one so we know their Differential Revision IDs and
535 # can provide dependency relationship
536 # can provide dependency relationship
536 lastrevid = None
537 lastrevid = None
537 for rev in revs:
538 for rev in revs:
538 ui.debug(b'sending rev %d\n' % rev)
539 ui.debug(b'sending rev %d\n' % rev)
539 ctx = repo[rev]
540 ctx = repo[rev]
540
541
541 # Get Differential Revision ID
542 # Get Differential Revision ID
542 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
543 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
543 if oldnode != ctx.node() or opts.get(b'amend'):
544 if oldnode != ctx.node() or opts.get(b'amend'):
544 # Create or update Differential Revision
545 # Create or update Differential Revision
545 revision, diff = createdifferentialrevision(
546 revision, diff = createdifferentialrevision(
546 ctx, revid, lastrevid, oldnode, olddiff, actions)
547 ctx, revid, lastrevid, oldnode, olddiff, actions)
547 diffmap[ctx.node()] = diff
548 diffmap[ctx.node()] = diff
548 newrevid = int(revision[b'object'][b'id'])
549 newrevid = int(revision[b'object'][b'id'])
549 if revid:
550 if revid:
550 action = b'updated'
551 action = b'updated'
551 else:
552 else:
552 action = b'created'
553 action = b'created'
553
554
554 # Create a local tag to note the association, if commit message
555 # Create a local tag to note the association, if commit message
555 # does not have it already
556 # does not have it already
556 m = _differentialrevisiondescre.search(ctx.description())
557 m = _differentialrevisiondescre.search(ctx.description())
557 if not m or int(m.group(b'id')) != newrevid:
558 if not m or int(m.group(b'id')) != newrevid:
558 tagname = b'D%d' % newrevid
559 tagname = b'D%d' % newrevid
559 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
560 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
560 date=None, local=True)
561 date=None, local=True)
561 else:
562 else:
562 # Nothing changed. But still set "newrevid" so the next revision
563 # Nothing changed. But still set "newrevid" so the next revision
563 # could depend on this one.
564 # could depend on this one.
564 newrevid = revid
565 newrevid = revid
565 action = b'skipped'
566 action = b'skipped'
566
567
567 actiondesc = ui.label(
568 actiondesc = ui.label(
568 {b'created': _(b'created'),
569 {b'created': _(b'created'),
569 b'skipped': _(b'skipped'),
570 b'skipped': _(b'skipped'),
570 b'updated': _(b'updated')}[action],
571 b'updated': _(b'updated')}[action],
571 b'phabricator.action.%s' % action)
572 b'phabricator.action.%s' % action)
572 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
573 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
573 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
574 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
574 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
575 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
575 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
576 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
576 desc))
577 desc))
577 drevids.append(newrevid)
578 drevids.append(newrevid)
578 lastrevid = newrevid
579 lastrevid = newrevid
579
580
580 # Update commit messages and remove tags
581 # Update commit messages and remove tags
581 if opts.get(b'amend'):
582 if opts.get(b'amend'):
582 unfi = repo.unfiltered()
583 unfi = repo.unfiltered()
583 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
584 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
584 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
585 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
585 wnode = unfi[b'.'].node()
586 wnode = unfi[b'.'].node()
586 mapping = {} # {oldnode: [newnode]}
587 mapping = {} # {oldnode: [newnode]}
587 for i, rev in enumerate(revs):
588 for i, rev in enumerate(revs):
588 old = unfi[rev]
589 old = unfi[rev]
589 drevid = drevids[i]
590 drevid = drevids[i]
590 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
591 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
591 newdesc = getdescfromdrev(drev)
592 newdesc = getdescfromdrev(drev)
592 # Make sure commit message contain "Differential Revision"
593 # Make sure commit message contain "Differential Revision"
593 if old.description() != newdesc:
594 if old.description() != newdesc:
594 if old.phase() == phases.public:
595 if old.phase() == phases.public:
595 ui.warn(_("warning: not updating public commit %s\n")
596 ui.warn(_("warning: not updating public commit %s\n")
596 % scmutil.formatchangeid(old))
597 % scmutil.formatchangeid(old))
597 continue
598 continue
598 parents = [
599 parents = [
599 mapping.get(old.p1().node(), (old.p1(),))[0],
600 mapping.get(old.p1().node(), (old.p1(),))[0],
600 mapping.get(old.p2().node(), (old.p2(),))[0],
601 mapping.get(old.p2().node(), (old.p2(),))[0],
601 ]
602 ]
602 new = context.metadataonlyctx(
603 new = context.metadataonlyctx(
603 repo, old, parents=parents, text=newdesc,
604 repo, old, parents=parents, text=newdesc,
604 user=old.user(), date=old.date(), extra=old.extra())
605 user=old.user(), date=old.date(), extra=old.extra())
605
606
606 newnode = new.commit()
607 newnode = new.commit()
607
608
608 mapping[old.node()] = [newnode]
609 mapping[old.node()] = [newnode]
609 # Update diff property
610 # Update diff property
610 writediffproperties(unfi[newnode], diffmap[old.node()])
611 writediffproperties(unfi[newnode], diffmap[old.node()])
611 # Remove local tags since it's no longer necessary
612 # Remove local tags since it's no longer necessary
612 tagname = b'D%d' % drevid
613 tagname = b'D%d' % drevid
613 if tagname in repo.tags():
614 if tagname in repo.tags():
614 tags.tag(repo, tagname, nullid, message=None, user=None,
615 tags.tag(repo, tagname, nullid, message=None, user=None,
615 date=None, local=True)
616 date=None, local=True)
616 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
617 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
617 if wnode in mapping:
618 if wnode in mapping:
618 unfi.setparents(mapping[wnode][0])
619 unfi.setparents(mapping[wnode][0])
619
620
620 # Map from "hg:meta" keys to header understood by "hg import". The order is
621 # Map from "hg:meta" keys to header understood by "hg import". The order is
621 # consistent with "hg export" output.
622 # consistent with "hg export" output.
622 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
623 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
623 (b'node', b'Node ID'), (b'parent', b'Parent ')])
624 (b'node', b'Node ID'), (b'parent', b'Parent ')])
624
625
625 def _confirmbeforesend(repo, revs, oldmap):
626 def _confirmbeforesend(repo, revs, oldmap):
626 url, token = readurltoken(repo)
627 url, token = readurltoken(repo)
627 ui = repo.ui
628 ui = repo.ui
628 for rev in revs:
629 for rev in revs:
629 ctx = repo[rev]
630 ctx = repo[rev]
630 desc = ctx.description().splitlines()[0]
631 desc = ctx.description().splitlines()[0]
631 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
632 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
632 if drevid:
633 if drevid:
633 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
634 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
634 else:
635 else:
635 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
636 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
636
637
637 ui.write(_(b'%s - %s: %s\n')
638 ui.write(_(b'%s - %s: %s\n')
638 % (drevdesc,
639 % (drevdesc,
639 ui.label(bytes(ctx), b'phabricator.node'),
640 ui.label(bytes(ctx), b'phabricator.node'),
640 ui.label(desc, b'phabricator.desc')))
641 ui.label(desc, b'phabricator.desc')))
641
642
642 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
643 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
643 b'$$ &Yes $$ &No') % url):
644 b'$$ &Yes $$ &No') % url):
644 return False
645 return False
645
646
646 return True
647 return True
647
648
648 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
649 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
649 b'abandoned'}
650 b'abandoned'}
650
651
651 def _getstatusname(drev):
652 def _getstatusname(drev):
652 """get normalized status name from a Differential Revision"""
653 """get normalized status name from a Differential Revision"""
653 return drev[b'statusName'].replace(b' ', b'').lower()
654 return drev[b'statusName'].replace(b' ', b'').lower()
654
655
655 # Small language to specify differential revisions. Support symbols: (), :X,
656 # Small language to specify differential revisions. Support symbols: (), :X,
656 # +, and -.
657 # +, and -.
657
658
658 _elements = {
659 _elements = {
659 # token-type: binding-strength, primary, prefix, infix, suffix
660 # token-type: binding-strength, primary, prefix, infix, suffix
660 b'(': (12, None, (b'group', 1, b')'), None, None),
661 b'(': (12, None, (b'group', 1, b')'), None, None),
661 b':': (8, None, (b'ancestors', 8), None, None),
662 b':': (8, None, (b'ancestors', 8), None, None),
662 b'&': (5, None, None, (b'and_', 5), None),
663 b'&': (5, None, None, (b'and_', 5), None),
663 b'+': (4, None, None, (b'add', 4), None),
664 b'+': (4, None, None, (b'add', 4), None),
664 b'-': (4, None, None, (b'sub', 4), None),
665 b'-': (4, None, None, (b'sub', 4), None),
665 b')': (0, None, None, None, None),
666 b')': (0, None, None, None, None),
666 b'symbol': (0, b'symbol', None, None, None),
667 b'symbol': (0, b'symbol', None, None, None),
667 b'end': (0, None, None, None, None),
668 b'end': (0, None, None, None, None),
668 }
669 }
669
670
670 def _tokenize(text):
671 def _tokenize(text):
671 view = memoryview(text) # zero-copy slice
672 view = memoryview(text) # zero-copy slice
672 special = b'():+-& '
673 special = b'():+-& '
673 pos = 0
674 pos = 0
674 length = len(text)
675 length = len(text)
675 while pos < length:
676 while pos < length:
676 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
677 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
677 view[pos:]))
678 view[pos:]))
678 if symbol:
679 if symbol:
679 yield (b'symbol', symbol, pos)
680 yield (b'symbol', symbol, pos)
680 pos += len(symbol)
681 pos += len(symbol)
681 else: # special char, ignore space
682 else: # special char, ignore space
682 if text[pos] != b' ':
683 if text[pos] != b' ':
683 yield (text[pos], None, pos)
684 yield (text[pos], None, pos)
684 pos += 1
685 pos += 1
685 yield (b'end', None, pos)
686 yield (b'end', None, pos)
686
687
687 def _parse(text):
688 def _parse(text):
688 tree, pos = parser.parser(_elements).parse(_tokenize(text))
689 tree, pos = parser.parser(_elements).parse(_tokenize(text))
689 if pos != len(text):
690 if pos != len(text):
690 raise error.ParseError(b'invalid token', pos)
691 raise error.ParseError(b'invalid token', pos)
691 return tree
692 return tree
692
693
693 def _parsedrev(symbol):
694 def _parsedrev(symbol):
694 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
695 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
695 if symbol.startswith(b'D') and symbol[1:].isdigit():
696 if symbol.startswith(b'D') and symbol[1:].isdigit():
696 return int(symbol[1:])
697 return int(symbol[1:])
697 if symbol.isdigit():
698 if symbol.isdigit():
698 return int(symbol)
699 return int(symbol)
699
700
700 def _prefetchdrevs(tree):
701 def _prefetchdrevs(tree):
701 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
702 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
702 drevs = set()
703 drevs = set()
703 ancestordrevs = set()
704 ancestordrevs = set()
704 op = tree[0]
705 op = tree[0]
705 if op == b'symbol':
706 if op == b'symbol':
706 r = _parsedrev(tree[1])
707 r = _parsedrev(tree[1])
707 if r:
708 if r:
708 drevs.add(r)
709 drevs.add(r)
709 elif op == b'ancestors':
710 elif op == b'ancestors':
710 r, a = _prefetchdrevs(tree[1])
711 r, a = _prefetchdrevs(tree[1])
711 drevs.update(r)
712 drevs.update(r)
712 ancestordrevs.update(r)
713 ancestordrevs.update(r)
713 ancestordrevs.update(a)
714 ancestordrevs.update(a)
714 else:
715 else:
715 for t in tree[1:]:
716 for t in tree[1:]:
716 r, a = _prefetchdrevs(t)
717 r, a = _prefetchdrevs(t)
717 drevs.update(r)
718 drevs.update(r)
718 ancestordrevs.update(a)
719 ancestordrevs.update(a)
719 return drevs, ancestordrevs
720 return drevs, ancestordrevs
720
721
721 def querydrev(repo, spec):
722 def querydrev(repo, spec):
722 """return a list of "Differential Revision" dicts
723 """return a list of "Differential Revision" dicts
723
724
724 spec is a string using a simple query language, see docstring in phabread
725 spec is a string using a simple query language, see docstring in phabread
725 for details.
726 for details.
726
727
727 A "Differential Revision dict" looks like:
728 A "Differential Revision dict" looks like:
728
729
729 {
730 {
730 "id": "2",
731 "id": "2",
731 "phid": "PHID-DREV-672qvysjcczopag46qty",
732 "phid": "PHID-DREV-672qvysjcczopag46qty",
732 "title": "example",
733 "title": "example",
733 "uri": "https://phab.example.com/D2",
734 "uri": "https://phab.example.com/D2",
734 "dateCreated": "1499181406",
735 "dateCreated": "1499181406",
735 "dateModified": "1499182103",
736 "dateModified": "1499182103",
736 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
737 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
737 "status": "0",
738 "status": "0",
738 "statusName": "Needs Review",
739 "statusName": "Needs Review",
739 "properties": [],
740 "properties": [],
740 "branch": null,
741 "branch": null,
741 "summary": "",
742 "summary": "",
742 "testPlan": "",
743 "testPlan": "",
743 "lineCount": "2",
744 "lineCount": "2",
744 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
745 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
745 "diffs": [
746 "diffs": [
746 "3",
747 "3",
747 "4",
748 "4",
748 ],
749 ],
749 "commits": [],
750 "commits": [],
750 "reviewers": [],
751 "reviewers": [],
751 "ccs": [],
752 "ccs": [],
752 "hashes": [],
753 "hashes": [],
753 "auxiliary": {
754 "auxiliary": {
754 "phabricator:projects": [],
755 "phabricator:projects": [],
755 "phabricator:depends-on": [
756 "phabricator:depends-on": [
756 "PHID-DREV-gbapp366kutjebt7agcd"
757 "PHID-DREV-gbapp366kutjebt7agcd"
757 ]
758 ]
758 },
759 },
759 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
760 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
760 "sourcePath": null
761 "sourcePath": null
761 }
762 }
762 """
763 """
763 def fetch(params):
764 def fetch(params):
764 """params -> single drev or None"""
765 """params -> single drev or None"""
765 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
766 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
766 if key in prefetched:
767 if key in prefetched:
767 return prefetched[key]
768 return prefetched[key]
768 drevs = callconduit(repo, b'differential.query', params)
769 drevs = callconduit(repo, b'differential.query', params)
769 # Fill prefetched with the result
770 # Fill prefetched with the result
770 for drev in drevs:
771 for drev in drevs:
771 prefetched[drev[b'phid']] = drev
772 prefetched[drev[b'phid']] = drev
772 prefetched[int(drev[b'id'])] = drev
773 prefetched[int(drev[b'id'])] = drev
773 if key not in prefetched:
774 if key not in prefetched:
774 raise error.Abort(_(b'cannot get Differential Revision %r')
775 raise error.Abort(_(b'cannot get Differential Revision %r')
775 % params)
776 % params)
776 return prefetched[key]
777 return prefetched[key]
777
778
778 def getstack(topdrevids):
779 def getstack(topdrevids):
779 """given a top, get a stack from the bottom, [id] -> [id]"""
780 """given a top, get a stack from the bottom, [id] -> [id]"""
780 visited = set()
781 visited = set()
781 result = []
782 result = []
782 queue = [{b'ids': [i]} for i in topdrevids]
783 queue = [{b'ids': [i]} for i in topdrevids]
783 while queue:
784 while queue:
784 params = queue.pop()
785 params = queue.pop()
785 drev = fetch(params)
786 drev = fetch(params)
786 if drev[b'id'] in visited:
787 if drev[b'id'] in visited:
787 continue
788 continue
788 visited.add(drev[b'id'])
789 visited.add(drev[b'id'])
789 result.append(int(drev[b'id']))
790 result.append(int(drev[b'id']))
790 auxiliary = drev.get(b'auxiliary', {})
791 auxiliary = drev.get(b'auxiliary', {})
791 depends = auxiliary.get(b'phabricator:depends-on', [])
792 depends = auxiliary.get(b'phabricator:depends-on', [])
792 for phid in depends:
793 for phid in depends:
793 queue.append({b'phids': [phid]})
794 queue.append({b'phids': [phid]})
794 result.reverse()
795 result.reverse()
795 return smartset.baseset(result)
796 return smartset.baseset(result)
796
797
797 # Initialize prefetch cache
798 # Initialize prefetch cache
798 prefetched = {} # {id or phid: drev}
799 prefetched = {} # {id or phid: drev}
799
800
800 tree = _parse(spec)
801 tree = _parse(spec)
801 drevs, ancestordrevs = _prefetchdrevs(tree)
802 drevs, ancestordrevs = _prefetchdrevs(tree)
802
803
803 # developer config: phabricator.batchsize
804 # developer config: phabricator.batchsize
804 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
805 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
805
806
806 # Prefetch Differential Revisions in batch
807 # Prefetch Differential Revisions in batch
807 tofetch = set(drevs)
808 tofetch = set(drevs)
808 for r in ancestordrevs:
809 for r in ancestordrevs:
809 tofetch.update(range(max(1, r - batchsize), r + 1))
810 tofetch.update(range(max(1, r - batchsize), r + 1))
810 if drevs:
811 if drevs:
811 fetch({b'ids': list(tofetch)})
812 fetch({b'ids': list(tofetch)})
812 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
813 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
813
814
814 # Walk through the tree, return smartsets
815 # Walk through the tree, return smartsets
815 def walk(tree):
816 def walk(tree):
816 op = tree[0]
817 op = tree[0]
817 if op == b'symbol':
818 if op == b'symbol':
818 drev = _parsedrev(tree[1])
819 drev = _parsedrev(tree[1])
819 if drev:
820 if drev:
820 return smartset.baseset([drev])
821 return smartset.baseset([drev])
821 elif tree[1] in _knownstatusnames:
822 elif tree[1] in _knownstatusnames:
822 drevs = [r for r in validids
823 drevs = [r for r in validids
823 if _getstatusname(prefetched[r]) == tree[1]]
824 if _getstatusname(prefetched[r]) == tree[1]]
824 return smartset.baseset(drevs)
825 return smartset.baseset(drevs)
825 else:
826 else:
826 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
827 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
827 elif op in {b'and_', b'add', b'sub'}:
828 elif op in {b'and_', b'add', b'sub'}:
828 assert len(tree) == 3
829 assert len(tree) == 3
829 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
830 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
830 elif op == b'group':
831 elif op == b'group':
831 return walk(tree[1])
832 return walk(tree[1])
832 elif op == b'ancestors':
833 elif op == b'ancestors':
833 return getstack(walk(tree[1]))
834 return getstack(walk(tree[1]))
834 else:
835 else:
835 raise error.ProgrammingError(b'illegal tree: %r' % tree)
836 raise error.ProgrammingError(b'illegal tree: %r' % tree)
836
837
837 return [prefetched[r] for r in walk(tree)]
838 return [prefetched[r] for r in walk(tree)]
838
839
839 def getdescfromdrev(drev):
840 def getdescfromdrev(drev):
840 """get description (commit message) from "Differential Revision"
841 """get description (commit message) from "Differential Revision"
841
842
842 This is similar to differential.getcommitmessage API. But we only care
843 This is similar to differential.getcommitmessage API. But we only care
843 about limited fields: title, summary, test plan, and URL.
844 about limited fields: title, summary, test plan, and URL.
844 """
845 """
845 title = drev[b'title']
846 title = drev[b'title']
846 summary = drev[b'summary'].rstrip()
847 summary = drev[b'summary'].rstrip()
847 testplan = drev[b'testPlan'].rstrip()
848 testplan = drev[b'testPlan'].rstrip()
848 if testplan:
849 if testplan:
849 testplan = b'Test Plan:\n%s' % testplan
850 testplan = b'Test Plan:\n%s' % testplan
850 uri = b'Differential Revision: %s' % drev[b'uri']
851 uri = b'Differential Revision: %s' % drev[b'uri']
851 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
852 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
852
853
853 def getdiffmeta(diff):
854 def getdiffmeta(diff):
854 """get commit metadata (date, node, user, p1) from a diff object
855 """get commit metadata (date, node, user, p1) from a diff object
855
856
856 The metadata could be "hg:meta", sent by phabsend, like:
857 The metadata could be "hg:meta", sent by phabsend, like:
857
858
858 "properties": {
859 "properties": {
859 "hg:meta": {
860 "hg:meta": {
860 "date": "1499571514 25200",
861 "date": "1499571514 25200",
861 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
862 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
862 "user": "Foo Bar <foo@example.com>",
863 "user": "Foo Bar <foo@example.com>",
863 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
864 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
864 }
865 }
865 }
866 }
866
867
867 Or converted from "local:commits", sent by "arc", like:
868 Or converted from "local:commits", sent by "arc", like:
868
869
869 "properties": {
870 "properties": {
870 "local:commits": {
871 "local:commits": {
871 "98c08acae292b2faf60a279b4189beb6cff1414d": {
872 "98c08acae292b2faf60a279b4189beb6cff1414d": {
872 "author": "Foo Bar",
873 "author": "Foo Bar",
873 "time": 1499546314,
874 "time": 1499546314,
874 "branch": "default",
875 "branch": "default",
875 "tag": "",
876 "tag": "",
876 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "local": "1000",
879 "local": "1000",
879 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
880 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
880 "summary": "...",
881 "summary": "...",
881 "message": "...",
882 "message": "...",
882 "authorEmail": "foo@example.com"
883 "authorEmail": "foo@example.com"
883 }
884 }
884 }
885 }
885 }
886 }
886
887
887 Note: metadata extracted from "local:commits" will lose time zone
888 Note: metadata extracted from "local:commits" will lose time zone
888 information.
889 information.
889 """
890 """
890 props = diff.get(b'properties') or {}
891 props = diff.get(b'properties') or {}
891 meta = props.get(b'hg:meta')
892 meta = props.get(b'hg:meta')
892 if not meta and props.get(b'local:commits'):
893 if not meta and props.get(b'local:commits'):
893 commit = sorted(props[b'local:commits'].values())[0]
894 commit = sorted(props[b'local:commits'].values())[0]
894 meta = {
895 meta = {
895 b'date': b'%d 0' % commit[b'time'],
896 b'date': b'%d 0' % commit[b'time'],
896 b'node': commit[b'rev'],
897 b'node': commit[b'rev'],
897 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
898 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
898 }
899 }
899 if len(commit.get(b'parents', ())) >= 1:
900 if len(commit.get(b'parents', ())) >= 1:
900 meta[b'parent'] = commit[b'parents'][0]
901 meta[b'parent'] = commit[b'parents'][0]
901 return meta or {}
902 return meta or {}
902
903
903 def readpatch(repo, drevs, write):
904 def readpatch(repo, drevs, write):
904 """generate plain-text patch readable by 'hg import'
905 """generate plain-text patch readable by 'hg import'
905
906
906 write is usually ui.write. drevs is what "querydrev" returns, results of
907 write is usually ui.write. drevs is what "querydrev" returns, results of
907 "differential.query".
908 "differential.query".
908 """
909 """
909 # Prefetch hg:meta property for all diffs
910 # Prefetch hg:meta property for all diffs
910 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
911 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
911 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
912 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
912
913
913 # Generate patch for each drev
914 # Generate patch for each drev
914 for drev in drevs:
915 for drev in drevs:
915 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
916 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
916
917
917 diffid = max(int(v) for v in drev[b'diffs'])
918 diffid = max(int(v) for v in drev[b'diffs'])
918 body = callconduit(repo, b'differential.getrawdiff',
919 body = callconduit(repo, b'differential.getrawdiff',
919 {b'diffID': diffid})
920 {b'diffID': diffid})
920 desc = getdescfromdrev(drev)
921 desc = getdescfromdrev(drev)
921 header = b'# HG changeset patch\n'
922 header = b'# HG changeset patch\n'
922
923
923 # Try to preserve metadata from hg:meta property. Write hg patch
924 # Try to preserve metadata from hg:meta property. Write hg patch
924 # headers that can be read by the "import" command. See patchheadermap
925 # headers that can be read by the "import" command. See patchheadermap
925 # and extract in mercurial/patch.py for supported headers.
926 # and extract in mercurial/patch.py for supported headers.
926 meta = getdiffmeta(diffs[b'%d' % diffid])
927 meta = getdiffmeta(diffs[b'%d' % diffid])
927 for k in _metanamemap.keys():
928 for k in _metanamemap.keys():
928 if k in meta:
929 if k in meta:
929 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
930 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
930
931
931 content = b'%s%s\n%s' % (header, desc, body)
932 content = b'%s%s\n%s' % (header, desc, body)
932 write(content)
933 write(content)
933
934
934 @vcrcommand(b'phabread',
935 @vcrcommand(b'phabread',
935 [(b'', b'stack', False, _(b'read dependencies'))],
936 [(b'', b'stack', False, _(b'read dependencies'))],
936 _(b'DREVSPEC [OPTIONS]'),
937 _(b'DREVSPEC [OPTIONS]'),
937 helpcategory=command.CATEGORY_IMPORT_EXPORT)
938 helpcategory=command.CATEGORY_IMPORT_EXPORT)
938 def phabread(ui, repo, spec, **opts):
939 def phabread(ui, repo, spec, **opts):
939 """print patches from Phabricator suitable for importing
940 """print patches from Phabricator suitable for importing
940
941
941 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
942 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
942 the number ``123``. It could also have common operators like ``+``, ``-``,
943 the number ``123``. It could also have common operators like ``+``, ``-``,
943 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
944 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
944 select a stack.
945 select a stack.
945
946
946 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
947 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
947 could be used to filter patches by status. For performance reason, they
948 could be used to filter patches by status. For performance reason, they
948 only represent a subset of non-status selections and cannot be used alone.
949 only represent a subset of non-status selections and cannot be used alone.
949
950
950 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
951 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
951 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
952 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
952 stack up to D9.
953 stack up to D9.
953
954
954 If --stack is given, follow dependencies information and read all patches.
955 If --stack is given, follow dependencies information and read all patches.
955 It is equivalent to the ``:`` operator.
956 It is equivalent to the ``:`` operator.
956 """
957 """
957 if opts.get(b'stack'):
958 if opts.get(b'stack'):
958 spec = b':(%s)' % spec
959 spec = b':(%s)' % spec
959 drevs = querydrev(repo, spec)
960 drevs = querydrev(repo, spec)
960 readpatch(repo, drevs, ui.write)
961 readpatch(repo, drevs, ui.write)
961
962
962 @vcrcommand(b'phabupdate',
963 @vcrcommand(b'phabupdate',
963 [(b'', b'accept', False, _(b'accept revisions')),
964 [(b'', b'accept', False, _(b'accept revisions')),
964 (b'', b'reject', False, _(b'reject revisions')),
965 (b'', b'reject', False, _(b'reject revisions')),
965 (b'', b'abandon', False, _(b'abandon revisions')),
966 (b'', b'abandon', False, _(b'abandon revisions')),
966 (b'', b'reclaim', False, _(b'reclaim revisions')),
967 (b'', b'reclaim', False, _(b'reclaim revisions')),
967 (b'm', b'comment', b'', _(b'comment on the last revision')),
968 (b'm', b'comment', b'', _(b'comment on the last revision')),
968 ], _(b'DREVSPEC [OPTIONS]'),
969 ], _(b'DREVSPEC [OPTIONS]'),
969 helpcategory=command.CATEGORY_IMPORT_EXPORT)
970 helpcategory=command.CATEGORY_IMPORT_EXPORT)
970 def phabupdate(ui, repo, spec, **opts):
971 def phabupdate(ui, repo, spec, **opts):
971 """update Differential Revision in batch
972 """update Differential Revision in batch
972
973
973 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
974 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
974 """
975 """
975 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
976 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
976 if len(flags) > 1:
977 if len(flags) > 1:
977 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
978 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
978
979
979 actions = []
980 actions = []
980 for f in flags:
981 for f in flags:
981 actions.append({b'type': f, b'value': b'true'})
982 actions.append({b'type': f, b'value': b'true'})
982
983
983 drevs = querydrev(repo, spec)
984 drevs = querydrev(repo, spec)
984 for i, drev in enumerate(drevs):
985 for i, drev in enumerate(drevs):
985 if i + 1 == len(drevs) and opts.get(b'comment'):
986 if i + 1 == len(drevs) and opts.get(b'comment'):
986 actions.append({b'type': b'comment', b'value': opts[b'comment']})
987 actions.append({b'type': b'comment', b'value': opts[b'comment']})
987 if actions:
988 if actions:
988 params = {b'objectIdentifier': drev[b'phid'],
989 params = {b'objectIdentifier': drev[b'phid'],
989 b'transactions': actions}
990 b'transactions': actions}
990 callconduit(repo, b'differential.revision.edit', params)
991 callconduit(repo, b'differential.revision.edit', params)
991
992
992 templatekeyword = registrar.templatekeyword()
993 templatekeyword = registrar.templatekeyword()
993
994
994 @templatekeyword(b'phabreview', requires={b'ctx'})
995 @templatekeyword(b'phabreview', requires={b'ctx'})
995 def template_review(context, mapping):
996 def template_review(context, mapping):
996 """:phabreview: Object describing the review for this changeset.
997 """:phabreview: Object describing the review for this changeset.
997 Has attributes `url` and `id`.
998 Has attributes `url` and `id`.
998 """
999 """
999 ctx = context.resource(mapping, b'ctx')
1000 ctx = context.resource(mapping, b'ctx')
1000 m = _differentialrevisiondescre.search(ctx.description())
1001 m = _differentialrevisiondescre.search(ctx.description())
1001 if m:
1002 if m:
1002 return templateutil.hybriddict({
1003 return templateutil.hybriddict({
1003 b'url': m.group(b'url'),
1004 b'url': m.group(b'url'),
1004 b'id': b"D{}".format(m.group(b'id')),
1005 b'id': b"D{}".format(m.group(b'id')),
1005 })
1006 })
1006 else:
1007 else:
1007 tags = ctx.repo().nodetags(ctx.node())
1008 tags = ctx.repo().nodetags(ctx.node())
1008 for t in tags:
1009 for t in tags:
1009 if _differentialrevisiontagre.match(t):
1010 if _differentialrevisiontagre.match(t):
1010 url = ctx.repo().ui.config(b'phabricator', b'url')
1011 url = ctx.repo().ui.config(b'phabricator', b'url')
1011 if not url.endswith(b'/'):
1012 if not url.endswith(b'/'):
1012 url += b'/'
1013 url += b'/'
1013 url += t
1014 url += t
1014
1015
1015 return templateutil.hybriddict({
1016 return templateutil.hybriddict({
1016 b'url': url,
1017 b'url': url,
1017 b'id': t,
1018 b'id': t,
1018 })
1019 })
1019 return None
1020 return None
General Comments 0
You need to be logged in to leave comments. Login now