##// END OF EJS Templates
phabricator: set local:commits time metadata as an int, not a string...
Ian Moody -
r42370:c0e30c9e default
parent child Browse files
Show More
@@ -1,1033 +1,1033 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'node': ctx.hex(),
387 b'node': ctx.hex(),
388 b'parent': ctx.p1().hex(),
388 b'parent': ctx.p1().hex(),
389 }),
389 }),
390 }
390 }
391 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
391 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392
392
393 params = {
393 params = {
394 b'diff_id': diff[b'id'],
394 b'diff_id': diff[b'id'],
395 b'name': b'local:commits',
395 b'name': b'local:commits',
396 b'data': templatefilters.json({
396 b'data': templatefilters.json({
397 ctx.hex(): {
397 ctx.hex(): {
398 b'author': stringutil.person(ctx.user()),
398 b'author': stringutil.person(ctx.user()),
399 b'authorEmail': stringutil.email(ctx.user()),
399 b'authorEmail': stringutil.email(ctx.user()),
400 b'time': b'%d' % ctx.date()[0],
400 b'time': int(ctx.date()[0]),
401 },
401 },
402 }),
402 }),
403 }
403 }
404 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
404 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
405
405
406 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
406 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
407 olddiff=None, actions=None):
407 olddiff=None, actions=None):
408 """create or update a Differential Revision
408 """create or update a Differential Revision
409
409
410 If revid is None, create a new Differential Revision, otherwise update
410 If revid is None, create a new Differential Revision, otherwise update
411 revid. If parentrevid is not None, set it as a dependency.
411 revid. If parentrevid is not None, set it as a dependency.
412
412
413 If oldnode is not None, check if the patch content (without commit message
413 If oldnode is not None, check if the patch content (without commit message
414 and metadata) has changed before creating another diff.
414 and metadata) has changed before creating another diff.
415
415
416 If actions is not None, they will be appended to the transaction.
416 If actions is not None, they will be appended to the transaction.
417 """
417 """
418 repo = ctx.repo()
418 repo = ctx.repo()
419 if oldnode:
419 if oldnode:
420 diffopts = mdiff.diffopts(git=True, context=32767)
420 diffopts = mdiff.diffopts(git=True, context=32767)
421 oldctx = repo.unfiltered()[oldnode]
421 oldctx = repo.unfiltered()[oldnode]
422 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
422 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
423 else:
423 else:
424 neednewdiff = True
424 neednewdiff = True
425
425
426 transactions = []
426 transactions = []
427 if neednewdiff:
427 if neednewdiff:
428 diff = creatediff(ctx)
428 diff = creatediff(ctx)
429 transactions.append({b'type': b'update', b'value': diff[b'phid']})
429 transactions.append({b'type': b'update', b'value': diff[b'phid']})
430 else:
430 else:
431 # Even if we don't need to upload a new diff because the patch content
431 # Even if we don't need to upload a new diff because the patch content
432 # does not change. We might still need to update its metadata so
432 # does not change. We might still need to update its metadata so
433 # pushers could know the correct node metadata.
433 # pushers could know the correct node metadata.
434 assert olddiff
434 assert olddiff
435 diff = olddiff
435 diff = olddiff
436 writediffproperties(ctx, diff)
436 writediffproperties(ctx, diff)
437
437
438 # Use a temporary summary to set dependency. There might be better ways but
438 # Use a temporary summary to set dependency. There might be better ways but
439 # I cannot find them for now. But do not do that if we are updating an
439 # I cannot find them for now. But do not do that if we are updating an
440 # existing revision (revid is not None) since that introduces visible
440 # existing revision (revid is not None) since that introduces visible
441 # churns (someone edited "Summary" twice) on the web page.
441 # churns (someone edited "Summary" twice) on the web page.
442 if parentrevid and revid is None:
442 if parentrevid and revid is None:
443 summary = b'Depends on D%d' % parentrevid
443 summary = b'Depends on D%d' % parentrevid
444 transactions += [{b'type': b'summary', b'value': summary},
444 transactions += [{b'type': b'summary', b'value': summary},
445 {b'type': b'summary', b'value': b' '}]
445 {b'type': b'summary', b'value': b' '}]
446
446
447 if actions:
447 if actions:
448 transactions += actions
448 transactions += actions
449
449
450 # Parse commit message and update related fields.
450 # Parse commit message and update related fields.
451 desc = ctx.description()
451 desc = ctx.description()
452 info = callconduit(repo, b'differential.parsecommitmessage',
452 info = callconduit(repo, b'differential.parsecommitmessage',
453 {b'corpus': desc})
453 {b'corpus': desc})
454 for k, v in info[b'fields'].items():
454 for k, v in info[b'fields'].items():
455 if k in [b'title', b'summary', b'testPlan']:
455 if k in [b'title', b'summary', b'testPlan']:
456 transactions.append({b'type': k, b'value': v})
456 transactions.append({b'type': k, b'value': v})
457
457
458 params = {b'transactions': transactions}
458 params = {b'transactions': transactions}
459 if revid is not None:
459 if revid is not None:
460 # Update an existing Differential Revision
460 # Update an existing Differential Revision
461 params[b'objectIdentifier'] = revid
461 params[b'objectIdentifier'] = revid
462
462
463 revision = callconduit(repo, b'differential.revision.edit', params)
463 revision = callconduit(repo, b'differential.revision.edit', params)
464 if not revision:
464 if not revision:
465 raise error.Abort(_(b'cannot create revision for %s') % ctx)
465 raise error.Abort(_(b'cannot create revision for %s') % ctx)
466
466
467 return revision, diff
467 return revision, diff
468
468
469 def userphids(repo, names):
469 def userphids(repo, names):
470 """convert user names to PHIDs"""
470 """convert user names to PHIDs"""
471 names = [name.lower() for name in names]
471 names = [name.lower() for name in names]
472 query = {b'constraints': {b'usernames': names}}
472 query = {b'constraints': {b'usernames': names}}
473 result = callconduit(repo, b'user.search', query)
473 result = callconduit(repo, b'user.search', query)
474 # username not found is not an error of the API. So check if we have missed
474 # username not found is not an error of the API. So check if we have missed
475 # some names here.
475 # some names here.
476 data = result[b'data']
476 data = result[b'data']
477 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
477 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
478 unresolved = set(names) - resolved
478 unresolved = set(names) - resolved
479 if unresolved:
479 if unresolved:
480 raise error.Abort(_(b'unknown username: %s')
480 raise error.Abort(_(b'unknown username: %s')
481 % b' '.join(sorted(unresolved)))
481 % b' '.join(sorted(unresolved)))
482 return [entry[b'phid'] for entry in data]
482 return [entry[b'phid'] for entry in data]
483
483
484 @vcrcommand(b'phabsend',
484 @vcrcommand(b'phabsend',
485 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
485 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
486 (b'', b'amend', True, _(b'update commit messages')),
486 (b'', b'amend', True, _(b'update commit messages')),
487 (b'', b'reviewer', [], _(b'specify reviewers')),
487 (b'', b'reviewer', [], _(b'specify reviewers')),
488 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
488 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
489 _(b'REV [OPTIONS]'),
489 _(b'REV [OPTIONS]'),
490 helpcategory=command.CATEGORY_IMPORT_EXPORT)
490 helpcategory=command.CATEGORY_IMPORT_EXPORT)
491 def phabsend(ui, repo, *revs, **opts):
491 def phabsend(ui, repo, *revs, **opts):
492 """upload changesets to Phabricator
492 """upload changesets to Phabricator
493
493
494 If there are multiple revisions specified, they will be send as a stack
494 If there are multiple revisions specified, they will be send as a stack
495 with a linear dependencies relationship using the order specified by the
495 with a linear dependencies relationship using the order specified by the
496 revset.
496 revset.
497
497
498 For the first time uploading changesets, local tags will be created to
498 For the first time uploading changesets, local tags will be created to
499 maintain the association. After the first time, phabsend will check
499 maintain the association. After the first time, phabsend will check
500 obsstore and tags information so it can figure out whether to update an
500 obsstore and tags information so it can figure out whether to update an
501 existing Differential Revision, or create a new one.
501 existing Differential Revision, or create a new one.
502
502
503 If --amend is set, update commit messages so they have the
503 If --amend is set, update commit messages so they have the
504 ``Differential Revision`` URL, remove related tags. This is similar to what
504 ``Differential Revision`` URL, remove related tags. This is similar to what
505 arcanist will do, and is more desired in author-push workflows. Otherwise,
505 arcanist will do, and is more desired in author-push workflows. Otherwise,
506 use local tags to record the ``Differential Revision`` association.
506 use local tags to record the ``Differential Revision`` association.
507
507
508 The --confirm option lets you confirm changesets before sending them. You
508 The --confirm option lets you confirm changesets before sending them. You
509 can also add following to your configuration file to make it default
509 can also add following to your configuration file to make it default
510 behaviour::
510 behaviour::
511
511
512 [phabsend]
512 [phabsend]
513 confirm = true
513 confirm = true
514
514
515 phabsend will check obsstore and the above association to decide whether to
515 phabsend will check obsstore and the above association to decide whether to
516 update an existing Differential Revision, or create a new one.
516 update an existing Differential Revision, or create a new one.
517 """
517 """
518 opts = pycompat.byteskwargs(opts)
518 opts = pycompat.byteskwargs(opts)
519 revs = list(revs) + opts.get(b'rev', [])
519 revs = list(revs) + opts.get(b'rev', [])
520 revs = scmutil.revrange(repo, revs)
520 revs = scmutil.revrange(repo, revs)
521
521
522 if not revs:
522 if not revs:
523 raise error.Abort(_(b'phabsend requires at least one changeset'))
523 raise error.Abort(_(b'phabsend requires at least one changeset'))
524 if opts.get(b'amend'):
524 if opts.get(b'amend'):
525 cmdutil.checkunfinished(repo)
525 cmdutil.checkunfinished(repo)
526
526
527 # {newnode: (oldnode, olddiff, olddrev}
527 # {newnode: (oldnode, olddiff, olddrev}
528 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
528 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
529
529
530 confirm = ui.configbool(b'phabsend', b'confirm')
530 confirm = ui.configbool(b'phabsend', b'confirm')
531 confirm |= bool(opts.get(b'confirm'))
531 confirm |= bool(opts.get(b'confirm'))
532 if confirm:
532 if confirm:
533 confirmed = _confirmbeforesend(repo, revs, oldmap)
533 confirmed = _confirmbeforesend(repo, revs, oldmap)
534 if not confirmed:
534 if not confirmed:
535 raise error.Abort(_(b'phabsend cancelled'))
535 raise error.Abort(_(b'phabsend cancelled'))
536
536
537 actions = []
537 actions = []
538 reviewers = opts.get(b'reviewer', [])
538 reviewers = opts.get(b'reviewer', [])
539 if reviewers:
539 if reviewers:
540 phids = userphids(repo, reviewers)
540 phids = userphids(repo, reviewers)
541 actions.append({b'type': b'reviewers.add', b'value': phids})
541 actions.append({b'type': b'reviewers.add', b'value': phids})
542
542
543 drevids = [] # [int]
543 drevids = [] # [int]
544 diffmap = {} # {newnode: diff}
544 diffmap = {} # {newnode: diff}
545
545
546 # Send patches one by one so we know their Differential Revision IDs and
546 # Send patches one by one so we know their Differential Revision IDs and
547 # can provide dependency relationship
547 # can provide dependency relationship
548 lastrevid = None
548 lastrevid = None
549 for rev in revs:
549 for rev in revs:
550 ui.debug(b'sending rev %d\n' % rev)
550 ui.debug(b'sending rev %d\n' % rev)
551 ctx = repo[rev]
551 ctx = repo[rev]
552
552
553 # Get Differential Revision ID
553 # Get Differential Revision ID
554 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
554 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
555 if oldnode != ctx.node() or opts.get(b'amend'):
555 if oldnode != ctx.node() or opts.get(b'amend'):
556 # Create or update Differential Revision
556 # Create or update Differential Revision
557 revision, diff = createdifferentialrevision(
557 revision, diff = createdifferentialrevision(
558 ctx, revid, lastrevid, oldnode, olddiff, actions)
558 ctx, revid, lastrevid, oldnode, olddiff, actions)
559 diffmap[ctx.node()] = diff
559 diffmap[ctx.node()] = diff
560 newrevid = int(revision[b'object'][b'id'])
560 newrevid = int(revision[b'object'][b'id'])
561 if revid:
561 if revid:
562 action = b'updated'
562 action = b'updated'
563 else:
563 else:
564 action = b'created'
564 action = b'created'
565
565
566 # Create a local tag to note the association, if commit message
566 # Create a local tag to note the association, if commit message
567 # does not have it already
567 # does not have it already
568 m = _differentialrevisiondescre.search(ctx.description())
568 m = _differentialrevisiondescre.search(ctx.description())
569 if not m or int(m.group(r'id')) != newrevid:
569 if not m or int(m.group(r'id')) != newrevid:
570 tagname = b'D%d' % newrevid
570 tagname = b'D%d' % newrevid
571 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
571 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
572 date=None, local=True)
572 date=None, local=True)
573 else:
573 else:
574 # Nothing changed. But still set "newrevid" so the next revision
574 # Nothing changed. But still set "newrevid" so the next revision
575 # could depend on this one.
575 # could depend on this one.
576 newrevid = revid
576 newrevid = revid
577 action = b'skipped'
577 action = b'skipped'
578
578
579 actiondesc = ui.label(
579 actiondesc = ui.label(
580 {b'created': _(b'created'),
580 {b'created': _(b'created'),
581 b'skipped': _(b'skipped'),
581 b'skipped': _(b'skipped'),
582 b'updated': _(b'updated')}[action],
582 b'updated': _(b'updated')}[action],
583 b'phabricator.action.%s' % action)
583 b'phabricator.action.%s' % action)
584 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
584 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
585 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
585 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
586 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
586 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
587 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
587 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
588 desc))
588 desc))
589 drevids.append(newrevid)
589 drevids.append(newrevid)
590 lastrevid = newrevid
590 lastrevid = newrevid
591
591
592 # Update commit messages and remove tags
592 # Update commit messages and remove tags
593 if opts.get(b'amend'):
593 if opts.get(b'amend'):
594 unfi = repo.unfiltered()
594 unfi = repo.unfiltered()
595 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
595 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
596 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
596 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
597 wnode = unfi[b'.'].node()
597 wnode = unfi[b'.'].node()
598 mapping = {} # {oldnode: [newnode]}
598 mapping = {} # {oldnode: [newnode]}
599 for i, rev in enumerate(revs):
599 for i, rev in enumerate(revs):
600 old = unfi[rev]
600 old = unfi[rev]
601 drevid = drevids[i]
601 drevid = drevids[i]
602 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
602 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
603 newdesc = getdescfromdrev(drev)
603 newdesc = getdescfromdrev(drev)
604 # Make sure commit message contain "Differential Revision"
604 # Make sure commit message contain "Differential Revision"
605 if old.description() != newdesc:
605 if old.description() != newdesc:
606 if old.phase() == phases.public:
606 if old.phase() == phases.public:
607 ui.warn(_("warning: not updating public commit %s\n")
607 ui.warn(_("warning: not updating public commit %s\n")
608 % scmutil.formatchangeid(old))
608 % scmutil.formatchangeid(old))
609 continue
609 continue
610 parents = [
610 parents = [
611 mapping.get(old.p1().node(), (old.p1(),))[0],
611 mapping.get(old.p1().node(), (old.p1(),))[0],
612 mapping.get(old.p2().node(), (old.p2(),))[0],
612 mapping.get(old.p2().node(), (old.p2(),))[0],
613 ]
613 ]
614 new = context.metadataonlyctx(
614 new = context.metadataonlyctx(
615 repo, old, parents=parents, text=newdesc,
615 repo, old, parents=parents, text=newdesc,
616 user=old.user(), date=old.date(), extra=old.extra())
616 user=old.user(), date=old.date(), extra=old.extra())
617
617
618 newnode = new.commit()
618 newnode = new.commit()
619
619
620 mapping[old.node()] = [newnode]
620 mapping[old.node()] = [newnode]
621 # Update diff property
621 # Update diff property
622 writediffproperties(unfi[newnode], diffmap[old.node()])
622 writediffproperties(unfi[newnode], diffmap[old.node()])
623 # Remove local tags since it's no longer necessary
623 # Remove local tags since it's no longer necessary
624 tagname = b'D%d' % drevid
624 tagname = b'D%d' % drevid
625 if tagname in repo.tags():
625 if tagname in repo.tags():
626 tags.tag(repo, tagname, nullid, message=None, user=None,
626 tags.tag(repo, tagname, nullid, message=None, user=None,
627 date=None, local=True)
627 date=None, local=True)
628 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
628 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
629 if wnode in mapping:
629 if wnode in mapping:
630 unfi.setparents(mapping[wnode][0])
630 unfi.setparents(mapping[wnode][0])
631
631
632 # Map from "hg:meta" keys to header understood by "hg import". The order is
632 # Map from "hg:meta" keys to header understood by "hg import". The order is
633 # consistent with "hg export" output.
633 # consistent with "hg export" output.
634 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
634 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
635 (b'node', b'Node ID'), (b'parent', b'Parent ')])
635 (b'node', b'Node ID'), (b'parent', b'Parent ')])
636
636
637 def _confirmbeforesend(repo, revs, oldmap):
637 def _confirmbeforesend(repo, revs, oldmap):
638 url, token = readurltoken(repo)
638 url, token = readurltoken(repo)
639 ui = repo.ui
639 ui = repo.ui
640 for rev in revs:
640 for rev in revs:
641 ctx = repo[rev]
641 ctx = repo[rev]
642 desc = ctx.description().splitlines()[0]
642 desc = ctx.description().splitlines()[0]
643 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
643 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
644 if drevid:
644 if drevid:
645 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
645 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
646 else:
646 else:
647 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
647 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
648
648
649 ui.write(_(b'%s - %s: %s\n')
649 ui.write(_(b'%s - %s: %s\n')
650 % (drevdesc,
650 % (drevdesc,
651 ui.label(bytes(ctx), b'phabricator.node'),
651 ui.label(bytes(ctx), b'phabricator.node'),
652 ui.label(desc, b'phabricator.desc')))
652 ui.label(desc, b'phabricator.desc')))
653
653
654 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
654 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
655 b'$$ &Yes $$ &No') % url):
655 b'$$ &Yes $$ &No') % url):
656 return False
656 return False
657
657
658 return True
658 return True
659
659
660 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
660 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
661 b'abandoned'}
661 b'abandoned'}
662
662
663 def _getstatusname(drev):
663 def _getstatusname(drev):
664 """get normalized status name from a Differential Revision"""
664 """get normalized status name from a Differential Revision"""
665 return drev[b'statusName'].replace(b' ', b'').lower()
665 return drev[b'statusName'].replace(b' ', b'').lower()
666
666
667 # Small language to specify differential revisions. Support symbols: (), :X,
667 # Small language to specify differential revisions. Support symbols: (), :X,
668 # +, and -.
668 # +, and -.
669
669
670 _elements = {
670 _elements = {
671 # token-type: binding-strength, primary, prefix, infix, suffix
671 # token-type: binding-strength, primary, prefix, infix, suffix
672 b'(': (12, None, (b'group', 1, b')'), None, None),
672 b'(': (12, None, (b'group', 1, b')'), None, None),
673 b':': (8, None, (b'ancestors', 8), None, None),
673 b':': (8, None, (b'ancestors', 8), None, None),
674 b'&': (5, None, None, (b'and_', 5), None),
674 b'&': (5, None, None, (b'and_', 5), None),
675 b'+': (4, None, None, (b'add', 4), None),
675 b'+': (4, None, None, (b'add', 4), None),
676 b'-': (4, None, None, (b'sub', 4), None),
676 b'-': (4, None, None, (b'sub', 4), None),
677 b')': (0, None, None, None, None),
677 b')': (0, None, None, None, None),
678 b'symbol': (0, b'symbol', None, None, None),
678 b'symbol': (0, b'symbol', None, None, None),
679 b'end': (0, None, None, None, None),
679 b'end': (0, None, None, None, None),
680 }
680 }
681
681
682 def _tokenize(text):
682 def _tokenize(text):
683 view = memoryview(text) # zero-copy slice
683 view = memoryview(text) # zero-copy slice
684 special = b'():+-& '
684 special = b'():+-& '
685 pos = 0
685 pos = 0
686 length = len(text)
686 length = len(text)
687 while pos < length:
687 while pos < length:
688 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
688 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
689 pycompat.iterbytestr(view[pos:])))
689 pycompat.iterbytestr(view[pos:])))
690 if symbol:
690 if symbol:
691 yield (b'symbol', symbol, pos)
691 yield (b'symbol', symbol, pos)
692 pos += len(symbol)
692 pos += len(symbol)
693 else: # special char, ignore space
693 else: # special char, ignore space
694 if text[pos] != b' ':
694 if text[pos] != b' ':
695 yield (text[pos], None, pos)
695 yield (text[pos], None, pos)
696 pos += 1
696 pos += 1
697 yield (b'end', None, pos)
697 yield (b'end', None, pos)
698
698
699 def _parse(text):
699 def _parse(text):
700 tree, pos = parser.parser(_elements).parse(_tokenize(text))
700 tree, pos = parser.parser(_elements).parse(_tokenize(text))
701 if pos != len(text):
701 if pos != len(text):
702 raise error.ParseError(b'invalid token', pos)
702 raise error.ParseError(b'invalid token', pos)
703 return tree
703 return tree
704
704
705 def _parsedrev(symbol):
705 def _parsedrev(symbol):
706 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
706 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
707 if symbol.startswith(b'D') and symbol[1:].isdigit():
707 if symbol.startswith(b'D') and symbol[1:].isdigit():
708 return int(symbol[1:])
708 return int(symbol[1:])
709 if symbol.isdigit():
709 if symbol.isdigit():
710 return int(symbol)
710 return int(symbol)
711
711
712 def _prefetchdrevs(tree):
712 def _prefetchdrevs(tree):
713 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
713 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
714 drevs = set()
714 drevs = set()
715 ancestordrevs = set()
715 ancestordrevs = set()
716 op = tree[0]
716 op = tree[0]
717 if op == b'symbol':
717 if op == b'symbol':
718 r = _parsedrev(tree[1])
718 r = _parsedrev(tree[1])
719 if r:
719 if r:
720 drevs.add(r)
720 drevs.add(r)
721 elif op == b'ancestors':
721 elif op == b'ancestors':
722 r, a = _prefetchdrevs(tree[1])
722 r, a = _prefetchdrevs(tree[1])
723 drevs.update(r)
723 drevs.update(r)
724 ancestordrevs.update(r)
724 ancestordrevs.update(r)
725 ancestordrevs.update(a)
725 ancestordrevs.update(a)
726 else:
726 else:
727 for t in tree[1:]:
727 for t in tree[1:]:
728 r, a = _prefetchdrevs(t)
728 r, a = _prefetchdrevs(t)
729 drevs.update(r)
729 drevs.update(r)
730 ancestordrevs.update(a)
730 ancestordrevs.update(a)
731 return drevs, ancestordrevs
731 return drevs, ancestordrevs
732
732
733 def querydrev(repo, spec):
733 def querydrev(repo, spec):
734 """return a list of "Differential Revision" dicts
734 """return a list of "Differential Revision" dicts
735
735
736 spec is a string using a simple query language, see docstring in phabread
736 spec is a string using a simple query language, see docstring in phabread
737 for details.
737 for details.
738
738
739 A "Differential Revision dict" looks like:
739 A "Differential Revision dict" looks like:
740
740
741 {
741 {
742 "id": "2",
742 "id": "2",
743 "phid": "PHID-DREV-672qvysjcczopag46qty",
743 "phid": "PHID-DREV-672qvysjcczopag46qty",
744 "title": "example",
744 "title": "example",
745 "uri": "https://phab.example.com/D2",
745 "uri": "https://phab.example.com/D2",
746 "dateCreated": "1499181406",
746 "dateCreated": "1499181406",
747 "dateModified": "1499182103",
747 "dateModified": "1499182103",
748 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
748 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
749 "status": "0",
749 "status": "0",
750 "statusName": "Needs Review",
750 "statusName": "Needs Review",
751 "properties": [],
751 "properties": [],
752 "branch": null,
752 "branch": null,
753 "summary": "",
753 "summary": "",
754 "testPlan": "",
754 "testPlan": "",
755 "lineCount": "2",
755 "lineCount": "2",
756 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
756 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
757 "diffs": [
757 "diffs": [
758 "3",
758 "3",
759 "4",
759 "4",
760 ],
760 ],
761 "commits": [],
761 "commits": [],
762 "reviewers": [],
762 "reviewers": [],
763 "ccs": [],
763 "ccs": [],
764 "hashes": [],
764 "hashes": [],
765 "auxiliary": {
765 "auxiliary": {
766 "phabricator:projects": [],
766 "phabricator:projects": [],
767 "phabricator:depends-on": [
767 "phabricator:depends-on": [
768 "PHID-DREV-gbapp366kutjebt7agcd"
768 "PHID-DREV-gbapp366kutjebt7agcd"
769 ]
769 ]
770 },
770 },
771 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
771 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
772 "sourcePath": null
772 "sourcePath": null
773 }
773 }
774 """
774 """
775 def fetch(params):
775 def fetch(params):
776 """params -> single drev or None"""
776 """params -> single drev or None"""
777 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
777 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
778 if key in prefetched:
778 if key in prefetched:
779 return prefetched[key]
779 return prefetched[key]
780 drevs = callconduit(repo, b'differential.query', params)
780 drevs = callconduit(repo, b'differential.query', params)
781 # Fill prefetched with the result
781 # Fill prefetched with the result
782 for drev in drevs:
782 for drev in drevs:
783 prefetched[drev[b'phid']] = drev
783 prefetched[drev[b'phid']] = drev
784 prefetched[int(drev[b'id'])] = drev
784 prefetched[int(drev[b'id'])] = drev
785 if key not in prefetched:
785 if key not in prefetched:
786 raise error.Abort(_(b'cannot get Differential Revision %r')
786 raise error.Abort(_(b'cannot get Differential Revision %r')
787 % params)
787 % params)
788 return prefetched[key]
788 return prefetched[key]
789
789
790 def getstack(topdrevids):
790 def getstack(topdrevids):
791 """given a top, get a stack from the bottom, [id] -> [id]"""
791 """given a top, get a stack from the bottom, [id] -> [id]"""
792 visited = set()
792 visited = set()
793 result = []
793 result = []
794 queue = [{b'ids': [i]} for i in topdrevids]
794 queue = [{b'ids': [i]} for i in topdrevids]
795 while queue:
795 while queue:
796 params = queue.pop()
796 params = queue.pop()
797 drev = fetch(params)
797 drev = fetch(params)
798 if drev[b'id'] in visited:
798 if drev[b'id'] in visited:
799 continue
799 continue
800 visited.add(drev[b'id'])
800 visited.add(drev[b'id'])
801 result.append(int(drev[b'id']))
801 result.append(int(drev[b'id']))
802 auxiliary = drev.get(b'auxiliary', {})
802 auxiliary = drev.get(b'auxiliary', {})
803 depends = auxiliary.get(b'phabricator:depends-on', [])
803 depends = auxiliary.get(b'phabricator:depends-on', [])
804 for phid in depends:
804 for phid in depends:
805 queue.append({b'phids': [phid]})
805 queue.append({b'phids': [phid]})
806 result.reverse()
806 result.reverse()
807 return smartset.baseset(result)
807 return smartset.baseset(result)
808
808
809 # Initialize prefetch cache
809 # Initialize prefetch cache
810 prefetched = {} # {id or phid: drev}
810 prefetched = {} # {id or phid: drev}
811
811
812 tree = _parse(spec)
812 tree = _parse(spec)
813 drevs, ancestordrevs = _prefetchdrevs(tree)
813 drevs, ancestordrevs = _prefetchdrevs(tree)
814
814
815 # developer config: phabricator.batchsize
815 # developer config: phabricator.batchsize
816 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
816 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
817
817
818 # Prefetch Differential Revisions in batch
818 # Prefetch Differential Revisions in batch
819 tofetch = set(drevs)
819 tofetch = set(drevs)
820 for r in ancestordrevs:
820 for r in ancestordrevs:
821 tofetch.update(range(max(1, r - batchsize), r + 1))
821 tofetch.update(range(max(1, r - batchsize), r + 1))
822 if drevs:
822 if drevs:
823 fetch({b'ids': list(tofetch)})
823 fetch({b'ids': list(tofetch)})
824 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
824 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
825
825
826 # Walk through the tree, return smartsets
826 # Walk through the tree, return smartsets
827 def walk(tree):
827 def walk(tree):
828 op = tree[0]
828 op = tree[0]
829 if op == b'symbol':
829 if op == b'symbol':
830 drev = _parsedrev(tree[1])
830 drev = _parsedrev(tree[1])
831 if drev:
831 if drev:
832 return smartset.baseset([drev])
832 return smartset.baseset([drev])
833 elif tree[1] in _knownstatusnames:
833 elif tree[1] in _knownstatusnames:
834 drevs = [r for r in validids
834 drevs = [r for r in validids
835 if _getstatusname(prefetched[r]) == tree[1]]
835 if _getstatusname(prefetched[r]) == tree[1]]
836 return smartset.baseset(drevs)
836 return smartset.baseset(drevs)
837 else:
837 else:
838 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
838 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
839 elif op in {b'and_', b'add', b'sub'}:
839 elif op in {b'and_', b'add', b'sub'}:
840 assert len(tree) == 3
840 assert len(tree) == 3
841 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
841 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
842 elif op == b'group':
842 elif op == b'group':
843 return walk(tree[1])
843 return walk(tree[1])
844 elif op == b'ancestors':
844 elif op == b'ancestors':
845 return getstack(walk(tree[1]))
845 return getstack(walk(tree[1]))
846 else:
846 else:
847 raise error.ProgrammingError(b'illegal tree: %r' % tree)
847 raise error.ProgrammingError(b'illegal tree: %r' % tree)
848
848
849 return [prefetched[r] for r in walk(tree)]
849 return [prefetched[r] for r in walk(tree)]
850
850
851 def getdescfromdrev(drev):
851 def getdescfromdrev(drev):
852 """get description (commit message) from "Differential Revision"
852 """get description (commit message) from "Differential Revision"
853
853
854 This is similar to differential.getcommitmessage API. But we only care
854 This is similar to differential.getcommitmessage API. But we only care
855 about limited fields: title, summary, test plan, and URL.
855 about limited fields: title, summary, test plan, and URL.
856 """
856 """
857 title = drev[b'title']
857 title = drev[b'title']
858 summary = drev[b'summary'].rstrip()
858 summary = drev[b'summary'].rstrip()
859 testplan = drev[b'testPlan'].rstrip()
859 testplan = drev[b'testPlan'].rstrip()
860 if testplan:
860 if testplan:
861 testplan = b'Test Plan:\n%s' % testplan
861 testplan = b'Test Plan:\n%s' % testplan
862 uri = b'Differential Revision: %s' % drev[b'uri']
862 uri = b'Differential Revision: %s' % drev[b'uri']
863 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
863 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
864
864
865 def getdiffmeta(diff):
865 def getdiffmeta(diff):
866 """get commit metadata (date, node, user, p1) from a diff object
866 """get commit metadata (date, node, user, p1) from a diff object
867
867
868 The metadata could be "hg:meta", sent by phabsend, like:
868 The metadata could be "hg:meta", sent by phabsend, like:
869
869
870 "properties": {
870 "properties": {
871 "hg:meta": {
871 "hg:meta": {
872 "date": "1499571514 25200",
872 "date": "1499571514 25200",
873 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
873 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
874 "user": "Foo Bar <foo@example.com>",
874 "user": "Foo Bar <foo@example.com>",
875 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
875 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
876 }
876 }
877 }
877 }
878
878
879 Or converted from "local:commits", sent by "arc", like:
879 Or converted from "local:commits", sent by "arc", like:
880
880
881 "properties": {
881 "properties": {
882 "local:commits": {
882 "local:commits": {
883 "98c08acae292b2faf60a279b4189beb6cff1414d": {
883 "98c08acae292b2faf60a279b4189beb6cff1414d": {
884 "author": "Foo Bar",
884 "author": "Foo Bar",
885 "time": 1499546314,
885 "time": 1499546314,
886 "branch": "default",
886 "branch": "default",
887 "tag": "",
887 "tag": "",
888 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
888 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
889 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
889 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
890 "local": "1000",
890 "local": "1000",
891 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
891 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
892 "summary": "...",
892 "summary": "...",
893 "message": "...",
893 "message": "...",
894 "authorEmail": "foo@example.com"
894 "authorEmail": "foo@example.com"
895 }
895 }
896 }
896 }
897 }
897 }
898
898
899 Note: metadata extracted from "local:commits" will lose time zone
899 Note: metadata extracted from "local:commits" will lose time zone
900 information.
900 information.
901 """
901 """
902 props = diff.get(b'properties') or {}
902 props = diff.get(b'properties') or {}
903 meta = props.get(b'hg:meta')
903 meta = props.get(b'hg:meta')
904 if not meta and props.get(b'local:commits'):
904 if not meta and props.get(b'local:commits'):
905 commit = sorted(props[b'local:commits'].values())[0]
905 commit = sorted(props[b'local:commits'].values())[0]
906 meta = {
906 meta = {
907 b'date': b'%d 0' % commit[b'time'],
907 b'date': b'%d 0' % commit[b'time'],
908 b'node': commit[b'rev'],
908 b'node': commit[b'rev'],
909 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
909 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
910 }
910 }
911 if len(commit.get(b'parents', ())) >= 1:
911 if len(commit.get(b'parents', ())) >= 1:
912 meta[b'parent'] = commit[b'parents'][0]
912 meta[b'parent'] = commit[b'parents'][0]
913 return meta or {}
913 return meta or {}
914
914
915 def readpatch(repo, drevs, write):
915 def readpatch(repo, drevs, write):
916 """generate plain-text patch readable by 'hg import'
916 """generate plain-text patch readable by 'hg import'
917
917
918 write is usually ui.write. drevs is what "querydrev" returns, results of
918 write is usually ui.write. drevs is what "querydrev" returns, results of
919 "differential.query".
919 "differential.query".
920 """
920 """
921 # Prefetch hg:meta property for all diffs
921 # Prefetch hg:meta property for all diffs
922 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
922 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
923 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
923 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
924
924
925 # Generate patch for each drev
925 # Generate patch for each drev
926 for drev in drevs:
926 for drev in drevs:
927 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
927 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
928
928
929 diffid = max(int(v) for v in drev[b'diffs'])
929 diffid = max(int(v) for v in drev[b'diffs'])
930 body = callconduit(repo, b'differential.getrawdiff',
930 body = callconduit(repo, b'differential.getrawdiff',
931 {b'diffID': diffid})
931 {b'diffID': diffid})
932 desc = getdescfromdrev(drev)
932 desc = getdescfromdrev(drev)
933 header = b'# HG changeset patch\n'
933 header = b'# HG changeset patch\n'
934
934
935 # Try to preserve metadata from hg:meta property. Write hg patch
935 # Try to preserve metadata from hg:meta property. Write hg patch
936 # headers that can be read by the "import" command. See patchheadermap
936 # headers that can be read by the "import" command. See patchheadermap
937 # and extract in mercurial/patch.py for supported headers.
937 # and extract in mercurial/patch.py for supported headers.
938 meta = getdiffmeta(diffs[b'%d' % diffid])
938 meta = getdiffmeta(diffs[b'%d' % diffid])
939 for k in _metanamemap.keys():
939 for k in _metanamemap.keys():
940 if k in meta:
940 if k in meta:
941 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
941 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
942
942
943 content = b'%s%s\n%s' % (header, desc, body)
943 content = b'%s%s\n%s' % (header, desc, body)
944 write(content)
944 write(content)
945
945
946 @vcrcommand(b'phabread',
946 @vcrcommand(b'phabread',
947 [(b'', b'stack', False, _(b'read dependencies'))],
947 [(b'', b'stack', False, _(b'read dependencies'))],
948 _(b'DREVSPEC [OPTIONS]'),
948 _(b'DREVSPEC [OPTIONS]'),
949 helpcategory=command.CATEGORY_IMPORT_EXPORT)
949 helpcategory=command.CATEGORY_IMPORT_EXPORT)
950 def phabread(ui, repo, spec, **opts):
950 def phabread(ui, repo, spec, **opts):
951 """print patches from Phabricator suitable for importing
951 """print patches from Phabricator suitable for importing
952
952
953 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
953 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
954 the number ``123``. It could also have common operators like ``+``, ``-``,
954 the number ``123``. It could also have common operators like ``+``, ``-``,
955 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
955 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
956 select a stack.
956 select a stack.
957
957
958 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
958 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
959 could be used to filter patches by status. For performance reason, they
959 could be used to filter patches by status. For performance reason, they
960 only represent a subset of non-status selections and cannot be used alone.
960 only represent a subset of non-status selections and cannot be used alone.
961
961
962 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
962 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
963 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
963 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
964 stack up to D9.
964 stack up to D9.
965
965
966 If --stack is given, follow dependencies information and read all patches.
966 If --stack is given, follow dependencies information and read all patches.
967 It is equivalent to the ``:`` operator.
967 It is equivalent to the ``:`` operator.
968 """
968 """
969 opts = pycompat.byteskwargs(opts)
969 opts = pycompat.byteskwargs(opts)
970 if opts.get(b'stack'):
970 if opts.get(b'stack'):
971 spec = b':(%s)' % spec
971 spec = b':(%s)' % spec
972 drevs = querydrev(repo, spec)
972 drevs = querydrev(repo, spec)
973 readpatch(repo, drevs, ui.write)
973 readpatch(repo, drevs, ui.write)
974
974
975 @vcrcommand(b'phabupdate',
975 @vcrcommand(b'phabupdate',
976 [(b'', b'accept', False, _(b'accept revisions')),
976 [(b'', b'accept', False, _(b'accept revisions')),
977 (b'', b'reject', False, _(b'reject revisions')),
977 (b'', b'reject', False, _(b'reject revisions')),
978 (b'', b'abandon', False, _(b'abandon revisions')),
978 (b'', b'abandon', False, _(b'abandon revisions')),
979 (b'', b'reclaim', False, _(b'reclaim revisions')),
979 (b'', b'reclaim', False, _(b'reclaim revisions')),
980 (b'm', b'comment', b'', _(b'comment on the last revision')),
980 (b'm', b'comment', b'', _(b'comment on the last revision')),
981 ], _(b'DREVSPEC [OPTIONS]'),
981 ], _(b'DREVSPEC [OPTIONS]'),
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
983 def phabupdate(ui, repo, spec, **opts):
983 def phabupdate(ui, repo, spec, **opts):
984 """update Differential Revision in batch
984 """update Differential Revision in batch
985
985
986 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
986 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
987 """
987 """
988 opts = pycompat.byteskwargs(opts)
988 opts = pycompat.byteskwargs(opts)
989 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
989 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
990 if len(flags) > 1:
990 if len(flags) > 1:
991 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
991 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
992
992
993 actions = []
993 actions = []
994 for f in flags:
994 for f in flags:
995 actions.append({b'type': f, b'value': b'true'})
995 actions.append({b'type': f, b'value': b'true'})
996
996
997 drevs = querydrev(repo, spec)
997 drevs = querydrev(repo, spec)
998 for i, drev in enumerate(drevs):
998 for i, drev in enumerate(drevs):
999 if i + 1 == len(drevs) and opts.get(b'comment'):
999 if i + 1 == len(drevs) and opts.get(b'comment'):
1000 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1000 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1001 if actions:
1001 if actions:
1002 params = {b'objectIdentifier': drev[b'phid'],
1002 params = {b'objectIdentifier': drev[b'phid'],
1003 b'transactions': actions}
1003 b'transactions': actions}
1004 callconduit(repo, b'differential.revision.edit', params)
1004 callconduit(repo, b'differential.revision.edit', params)
1005
1005
1006 templatekeyword = registrar.templatekeyword()
1006 templatekeyword = registrar.templatekeyword()
1007
1007
1008 @templatekeyword(b'phabreview', requires={b'ctx'})
1008 @templatekeyword(b'phabreview', requires={b'ctx'})
1009 def template_review(context, mapping):
1009 def template_review(context, mapping):
1010 """:phabreview: Object describing the review for this changeset.
1010 """:phabreview: Object describing the review for this changeset.
1011 Has attributes `url` and `id`.
1011 Has attributes `url` and `id`.
1012 """
1012 """
1013 ctx = context.resource(mapping, b'ctx')
1013 ctx = context.resource(mapping, b'ctx')
1014 m = _differentialrevisiondescre.search(ctx.description())
1014 m = _differentialrevisiondescre.search(ctx.description())
1015 if m:
1015 if m:
1016 return templateutil.hybriddict({
1016 return templateutil.hybriddict({
1017 b'url': m.group(r'url'),
1017 b'url': m.group(r'url'),
1018 b'id': b"D%s" % m.group(r'id'),
1018 b'id': b"D%s" % m.group(r'id'),
1019 })
1019 })
1020 else:
1020 else:
1021 tags = ctx.repo().nodetags(ctx.node())
1021 tags = ctx.repo().nodetags(ctx.node())
1022 for t in tags:
1022 for t in tags:
1023 if _differentialrevisiontagre.match(t):
1023 if _differentialrevisiontagre.match(t):
1024 url = ctx.repo().ui.config(b'phabricator', b'url')
1024 url = ctx.repo().ui.config(b'phabricator', b'url')
1025 if not url.endswith(b'/'):
1025 if not url.endswith(b'/'):
1026 url += b'/'
1026 url += b'/'
1027 url += t
1027 url += t
1028
1028
1029 return templateutil.hybriddict({
1029 return templateutil.hybriddict({
1030 b'url': url,
1030 b'url': url,
1031 b'id': t,
1031 b'id': t,
1032 })
1032 })
1033 return None
1033 return None
General Comments 0
You need to be logged in to leave comments. Login now