##// END OF EJS Templates
phabricator: include branch in the diffproperty metadata...
Ian Moody -
r42384:d49ab47b default
parent child Browse files
Show More
@@ -1,1033 +1,1035 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'branch': ctx.branch(),
387 b'node': ctx.hex(),
388 b'node': ctx.hex(),
388 b'parent': ctx.p1().hex(),
389 b'parent': ctx.p1().hex(),
389 }),
390 }),
390 }
391 }
391 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392
393
393 params = {
394 params = {
394 b'diff_id': diff[b'id'],
395 b'diff_id': diff[b'id'],
395 b'name': b'local:commits',
396 b'name': b'local:commits',
396 b'data': templatefilters.json({
397 b'data': templatefilters.json({
397 ctx.hex(): {
398 ctx.hex(): {
398 b'author': stringutil.person(ctx.user()),
399 b'author': stringutil.person(ctx.user()),
399 b'authorEmail': stringutil.email(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
400 b'time': int(ctx.date()[0]),
401 b'time': int(ctx.date()[0]),
402 b'branch': ctx.branch(),
401 },
403 },
402 }),
404 }),
403 }
405 }
404 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
406 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
405
407
406 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
408 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
407 olddiff=None, actions=None):
409 olddiff=None, actions=None):
408 """create or update a Differential Revision
410 """create or update a Differential Revision
409
411
410 If revid is None, create a new Differential Revision, otherwise update
412 If revid is None, create a new Differential Revision, otherwise update
411 revid. If parentrevid is not None, set it as a dependency.
413 revid. If parentrevid is not None, set it as a dependency.
412
414
413 If oldnode is not None, check if the patch content (without commit message
415 If oldnode is not None, check if the patch content (without commit message
414 and metadata) has changed before creating another diff.
416 and metadata) has changed before creating another diff.
415
417
416 If actions is not None, they will be appended to the transaction.
418 If actions is not None, they will be appended to the transaction.
417 """
419 """
418 repo = ctx.repo()
420 repo = ctx.repo()
419 if oldnode:
421 if oldnode:
420 diffopts = mdiff.diffopts(git=True, context=32767)
422 diffopts = mdiff.diffopts(git=True, context=32767)
421 oldctx = repo.unfiltered()[oldnode]
423 oldctx = repo.unfiltered()[oldnode]
422 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
424 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
423 else:
425 else:
424 neednewdiff = True
426 neednewdiff = True
425
427
426 transactions = []
428 transactions = []
427 if neednewdiff:
429 if neednewdiff:
428 diff = creatediff(ctx)
430 diff = creatediff(ctx)
429 transactions.append({b'type': b'update', b'value': diff[b'phid']})
431 transactions.append({b'type': b'update', b'value': diff[b'phid']})
430 else:
432 else:
431 # Even if we don't need to upload a new diff because the patch content
433 # Even if we don't need to upload a new diff because the patch content
432 # does not change. We might still need to update its metadata so
434 # does not change. We might still need to update its metadata so
433 # pushers could know the correct node metadata.
435 # pushers could know the correct node metadata.
434 assert olddiff
436 assert olddiff
435 diff = olddiff
437 diff = olddiff
436 writediffproperties(ctx, diff)
438 writediffproperties(ctx, diff)
437
439
438 # Use a temporary summary to set dependency. There might be better ways but
440 # Use a temporary summary to set dependency. There might be better ways but
439 # I cannot find them for now. But do not do that if we are updating an
441 # I cannot find them for now. But do not do that if we are updating an
440 # existing revision (revid is not None) since that introduces visible
442 # existing revision (revid is not None) since that introduces visible
441 # churns (someone edited "Summary" twice) on the web page.
443 # churns (someone edited "Summary" twice) on the web page.
442 if parentrevid and revid is None:
444 if parentrevid and revid is None:
443 summary = b'Depends on D%d' % parentrevid
445 summary = b'Depends on D%d' % parentrevid
444 transactions += [{b'type': b'summary', b'value': summary},
446 transactions += [{b'type': b'summary', b'value': summary},
445 {b'type': b'summary', b'value': b' '}]
447 {b'type': b'summary', b'value': b' '}]
446
448
447 if actions:
449 if actions:
448 transactions += actions
450 transactions += actions
449
451
450 # Parse commit message and update related fields.
452 # Parse commit message and update related fields.
451 desc = ctx.description()
453 desc = ctx.description()
452 info = callconduit(repo, b'differential.parsecommitmessage',
454 info = callconduit(repo, b'differential.parsecommitmessage',
453 {b'corpus': desc})
455 {b'corpus': desc})
454 for k, v in info[b'fields'].items():
456 for k, v in info[b'fields'].items():
455 if k in [b'title', b'summary', b'testPlan']:
457 if k in [b'title', b'summary', b'testPlan']:
456 transactions.append({b'type': k, b'value': v})
458 transactions.append({b'type': k, b'value': v})
457
459
458 params = {b'transactions': transactions}
460 params = {b'transactions': transactions}
459 if revid is not None:
461 if revid is not None:
460 # Update an existing Differential Revision
462 # Update an existing Differential Revision
461 params[b'objectIdentifier'] = revid
463 params[b'objectIdentifier'] = revid
462
464
463 revision = callconduit(repo, b'differential.revision.edit', params)
465 revision = callconduit(repo, b'differential.revision.edit', params)
464 if not revision:
466 if not revision:
465 raise error.Abort(_(b'cannot create revision for %s') % ctx)
467 raise error.Abort(_(b'cannot create revision for %s') % ctx)
466
468
467 return revision, diff
469 return revision, diff
468
470
469 def userphids(repo, names):
471 def userphids(repo, names):
470 """convert user names to PHIDs"""
472 """convert user names to PHIDs"""
471 names = [name.lower() for name in names]
473 names = [name.lower() for name in names]
472 query = {b'constraints': {b'usernames': names}}
474 query = {b'constraints': {b'usernames': names}}
473 result = callconduit(repo, b'user.search', query)
475 result = callconduit(repo, b'user.search', query)
474 # username not found is not an error of the API. So check if we have missed
476 # username not found is not an error of the API. So check if we have missed
475 # some names here.
477 # some names here.
476 data = result[b'data']
478 data = result[b'data']
477 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
479 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
478 unresolved = set(names) - resolved
480 unresolved = set(names) - resolved
479 if unresolved:
481 if unresolved:
480 raise error.Abort(_(b'unknown username: %s')
482 raise error.Abort(_(b'unknown username: %s')
481 % b' '.join(sorted(unresolved)))
483 % b' '.join(sorted(unresolved)))
482 return [entry[b'phid'] for entry in data]
484 return [entry[b'phid'] for entry in data]
483
485
484 @vcrcommand(b'phabsend',
486 @vcrcommand(b'phabsend',
485 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
486 (b'', b'amend', True, _(b'update commit messages')),
488 (b'', b'amend', True, _(b'update commit messages')),
487 (b'', b'reviewer', [], _(b'specify reviewers')),
489 (b'', b'reviewer', [], _(b'specify reviewers')),
488 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
489 _(b'REV [OPTIONS]'),
491 _(b'REV [OPTIONS]'),
490 helpcategory=command.CATEGORY_IMPORT_EXPORT)
492 helpcategory=command.CATEGORY_IMPORT_EXPORT)
491 def phabsend(ui, repo, *revs, **opts):
493 def phabsend(ui, repo, *revs, **opts):
492 """upload changesets to Phabricator
494 """upload changesets to Phabricator
493
495
494 If there are multiple revisions specified, they will be send as a stack
496 If there are multiple revisions specified, they will be send as a stack
495 with a linear dependencies relationship using the order specified by the
497 with a linear dependencies relationship using the order specified by the
496 revset.
498 revset.
497
499
498 For the first time uploading changesets, local tags will be created to
500 For the first time uploading changesets, local tags will be created to
499 maintain the association. After the first time, phabsend will check
501 maintain the association. After the first time, phabsend will check
500 obsstore and tags information so it can figure out whether to update an
502 obsstore and tags information so it can figure out whether to update an
501 existing Differential Revision, or create a new one.
503 existing Differential Revision, or create a new one.
502
504
503 If --amend is set, update commit messages so they have the
505 If --amend is set, update commit messages so they have the
504 ``Differential Revision`` URL, remove related tags. This is similar to what
506 ``Differential Revision`` URL, remove related tags. This is similar to what
505 arcanist will do, and is more desired in author-push workflows. Otherwise,
507 arcanist will do, and is more desired in author-push workflows. Otherwise,
506 use local tags to record the ``Differential Revision`` association.
508 use local tags to record the ``Differential Revision`` association.
507
509
508 The --confirm option lets you confirm changesets before sending them. You
510 The --confirm option lets you confirm changesets before sending them. You
509 can also add following to your configuration file to make it default
511 can also add following to your configuration file to make it default
510 behaviour::
512 behaviour::
511
513
512 [phabsend]
514 [phabsend]
513 confirm = true
515 confirm = true
514
516
515 phabsend will check obsstore and the above association to decide whether to
517 phabsend will check obsstore and the above association to decide whether to
516 update an existing Differential Revision, or create a new one.
518 update an existing Differential Revision, or create a new one.
517 """
519 """
518 opts = pycompat.byteskwargs(opts)
520 opts = pycompat.byteskwargs(opts)
519 revs = list(revs) + opts.get(b'rev', [])
521 revs = list(revs) + opts.get(b'rev', [])
520 revs = scmutil.revrange(repo, revs)
522 revs = scmutil.revrange(repo, revs)
521
523
522 if not revs:
524 if not revs:
523 raise error.Abort(_(b'phabsend requires at least one changeset'))
525 raise error.Abort(_(b'phabsend requires at least one changeset'))
524 if opts.get(b'amend'):
526 if opts.get(b'amend'):
525 cmdutil.checkunfinished(repo)
527 cmdutil.checkunfinished(repo)
526
528
527 # {newnode: (oldnode, olddiff, olddrev}
529 # {newnode: (oldnode, olddiff, olddrev}
528 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
530 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
529
531
530 confirm = ui.configbool(b'phabsend', b'confirm')
532 confirm = ui.configbool(b'phabsend', b'confirm')
531 confirm |= bool(opts.get(b'confirm'))
533 confirm |= bool(opts.get(b'confirm'))
532 if confirm:
534 if confirm:
533 confirmed = _confirmbeforesend(repo, revs, oldmap)
535 confirmed = _confirmbeforesend(repo, revs, oldmap)
534 if not confirmed:
536 if not confirmed:
535 raise error.Abort(_(b'phabsend cancelled'))
537 raise error.Abort(_(b'phabsend cancelled'))
536
538
537 actions = []
539 actions = []
538 reviewers = opts.get(b'reviewer', [])
540 reviewers = opts.get(b'reviewer', [])
539 if reviewers:
541 if reviewers:
540 phids = userphids(repo, reviewers)
542 phids = userphids(repo, reviewers)
541 actions.append({b'type': b'reviewers.add', b'value': phids})
543 actions.append({b'type': b'reviewers.add', b'value': phids})
542
544
543 drevids = [] # [int]
545 drevids = [] # [int]
544 diffmap = {} # {newnode: diff}
546 diffmap = {} # {newnode: diff}
545
547
546 # Send patches one by one so we know their Differential Revision IDs and
548 # Send patches one by one so we know their Differential Revision IDs and
547 # can provide dependency relationship
549 # can provide dependency relationship
548 lastrevid = None
550 lastrevid = None
549 for rev in revs:
551 for rev in revs:
550 ui.debug(b'sending rev %d\n' % rev)
552 ui.debug(b'sending rev %d\n' % rev)
551 ctx = repo[rev]
553 ctx = repo[rev]
552
554
553 # Get Differential Revision ID
555 # Get Differential Revision ID
554 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
556 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
555 if oldnode != ctx.node() or opts.get(b'amend'):
557 if oldnode != ctx.node() or opts.get(b'amend'):
556 # Create or update Differential Revision
558 # Create or update Differential Revision
557 revision, diff = createdifferentialrevision(
559 revision, diff = createdifferentialrevision(
558 ctx, revid, lastrevid, oldnode, olddiff, actions)
560 ctx, revid, lastrevid, oldnode, olddiff, actions)
559 diffmap[ctx.node()] = diff
561 diffmap[ctx.node()] = diff
560 newrevid = int(revision[b'object'][b'id'])
562 newrevid = int(revision[b'object'][b'id'])
561 if revid:
563 if revid:
562 action = b'updated'
564 action = b'updated'
563 else:
565 else:
564 action = b'created'
566 action = b'created'
565
567
566 # Create a local tag to note the association, if commit message
568 # Create a local tag to note the association, if commit message
567 # does not have it already
569 # does not have it already
568 m = _differentialrevisiondescre.search(ctx.description())
570 m = _differentialrevisiondescre.search(ctx.description())
569 if not m or int(m.group(r'id')) != newrevid:
571 if not m or int(m.group(r'id')) != newrevid:
570 tagname = b'D%d' % newrevid
572 tagname = b'D%d' % newrevid
571 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
573 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
572 date=None, local=True)
574 date=None, local=True)
573 else:
575 else:
574 # Nothing changed. But still set "newrevid" so the next revision
576 # Nothing changed. But still set "newrevid" so the next revision
575 # could depend on this one.
577 # could depend on this one.
576 newrevid = revid
578 newrevid = revid
577 action = b'skipped'
579 action = b'skipped'
578
580
579 actiondesc = ui.label(
581 actiondesc = ui.label(
580 {b'created': _(b'created'),
582 {b'created': _(b'created'),
581 b'skipped': _(b'skipped'),
583 b'skipped': _(b'skipped'),
582 b'updated': _(b'updated')}[action],
584 b'updated': _(b'updated')}[action],
583 b'phabricator.action.%s' % action)
585 b'phabricator.action.%s' % action)
584 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
586 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
585 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
587 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
586 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
588 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
587 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
589 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
588 desc))
590 desc))
589 drevids.append(newrevid)
591 drevids.append(newrevid)
590 lastrevid = newrevid
592 lastrevid = newrevid
591
593
592 # Update commit messages and remove tags
594 # Update commit messages and remove tags
593 if opts.get(b'amend'):
595 if opts.get(b'amend'):
594 unfi = repo.unfiltered()
596 unfi = repo.unfiltered()
595 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
597 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
596 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
598 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
597 wnode = unfi[b'.'].node()
599 wnode = unfi[b'.'].node()
598 mapping = {} # {oldnode: [newnode]}
600 mapping = {} # {oldnode: [newnode]}
599 for i, rev in enumerate(revs):
601 for i, rev in enumerate(revs):
600 old = unfi[rev]
602 old = unfi[rev]
601 drevid = drevids[i]
603 drevid = drevids[i]
602 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
604 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
603 newdesc = getdescfromdrev(drev)
605 newdesc = getdescfromdrev(drev)
604 # Make sure commit message contain "Differential Revision"
606 # Make sure commit message contain "Differential Revision"
605 if old.description() != newdesc:
607 if old.description() != newdesc:
606 if old.phase() == phases.public:
608 if old.phase() == phases.public:
607 ui.warn(_("warning: not updating public commit %s\n")
609 ui.warn(_("warning: not updating public commit %s\n")
608 % scmutil.formatchangeid(old))
610 % scmutil.formatchangeid(old))
609 continue
611 continue
610 parents = [
612 parents = [
611 mapping.get(old.p1().node(), (old.p1(),))[0],
613 mapping.get(old.p1().node(), (old.p1(),))[0],
612 mapping.get(old.p2().node(), (old.p2(),))[0],
614 mapping.get(old.p2().node(), (old.p2(),))[0],
613 ]
615 ]
614 new = context.metadataonlyctx(
616 new = context.metadataonlyctx(
615 repo, old, parents=parents, text=newdesc,
617 repo, old, parents=parents, text=newdesc,
616 user=old.user(), date=old.date(), extra=old.extra())
618 user=old.user(), date=old.date(), extra=old.extra())
617
619
618 newnode = new.commit()
620 newnode = new.commit()
619
621
620 mapping[old.node()] = [newnode]
622 mapping[old.node()] = [newnode]
621 # Update diff property
623 # Update diff property
622 writediffproperties(unfi[newnode], diffmap[old.node()])
624 writediffproperties(unfi[newnode], diffmap[old.node()])
623 # Remove local tags since it's no longer necessary
625 # Remove local tags since it's no longer necessary
624 tagname = b'D%d' % drevid
626 tagname = b'D%d' % drevid
625 if tagname in repo.tags():
627 if tagname in repo.tags():
626 tags.tag(repo, tagname, nullid, message=None, user=None,
628 tags.tag(repo, tagname, nullid, message=None, user=None,
627 date=None, local=True)
629 date=None, local=True)
628 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
630 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
629 if wnode in mapping:
631 if wnode in mapping:
630 unfi.setparents(mapping[wnode][0])
632 unfi.setparents(mapping[wnode][0])
631
633
632 # Map from "hg:meta" keys to header understood by "hg import". The order is
634 # Map from "hg:meta" keys to header understood by "hg import". The order is
633 # consistent with "hg export" output.
635 # consistent with "hg export" output.
634 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
636 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
635 (b'node', b'Node ID'), (b'parent', b'Parent ')])
637 (b'node', b'Node ID'), (b'parent', b'Parent ')])
636
638
637 def _confirmbeforesend(repo, revs, oldmap):
639 def _confirmbeforesend(repo, revs, oldmap):
638 url, token = readurltoken(repo)
640 url, token = readurltoken(repo)
639 ui = repo.ui
641 ui = repo.ui
640 for rev in revs:
642 for rev in revs:
641 ctx = repo[rev]
643 ctx = repo[rev]
642 desc = ctx.description().splitlines()[0]
644 desc = ctx.description().splitlines()[0]
643 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
645 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
644 if drevid:
646 if drevid:
645 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
647 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
646 else:
648 else:
647 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
649 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
648
650
649 ui.write(_(b'%s - %s: %s\n')
651 ui.write(_(b'%s - %s: %s\n')
650 % (drevdesc,
652 % (drevdesc,
651 ui.label(bytes(ctx), b'phabricator.node'),
653 ui.label(bytes(ctx), b'phabricator.node'),
652 ui.label(desc, b'phabricator.desc')))
654 ui.label(desc, b'phabricator.desc')))
653
655
654 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
656 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
655 b'$$ &Yes $$ &No') % url):
657 b'$$ &Yes $$ &No') % url):
656 return False
658 return False
657
659
658 return True
660 return True
659
661
660 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
662 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
661 b'abandoned'}
663 b'abandoned'}
662
664
663 def _getstatusname(drev):
665 def _getstatusname(drev):
664 """get normalized status name from a Differential Revision"""
666 """get normalized status name from a Differential Revision"""
665 return drev[b'statusName'].replace(b' ', b'').lower()
667 return drev[b'statusName'].replace(b' ', b'').lower()
666
668
667 # Small language to specify differential revisions. Support symbols: (), :X,
669 # Small language to specify differential revisions. Support symbols: (), :X,
668 # +, and -.
670 # +, and -.
669
671
670 _elements = {
672 _elements = {
671 # token-type: binding-strength, primary, prefix, infix, suffix
673 # token-type: binding-strength, primary, prefix, infix, suffix
672 b'(': (12, None, (b'group', 1, b')'), None, None),
674 b'(': (12, None, (b'group', 1, b')'), None, None),
673 b':': (8, None, (b'ancestors', 8), None, None),
675 b':': (8, None, (b'ancestors', 8), None, None),
674 b'&': (5, None, None, (b'and_', 5), None),
676 b'&': (5, None, None, (b'and_', 5), None),
675 b'+': (4, None, None, (b'add', 4), None),
677 b'+': (4, None, None, (b'add', 4), None),
676 b'-': (4, None, None, (b'sub', 4), None),
678 b'-': (4, None, None, (b'sub', 4), None),
677 b')': (0, None, None, None, None),
679 b')': (0, None, None, None, None),
678 b'symbol': (0, b'symbol', None, None, None),
680 b'symbol': (0, b'symbol', None, None, None),
679 b'end': (0, None, None, None, None),
681 b'end': (0, None, None, None, None),
680 }
682 }
681
683
682 def _tokenize(text):
684 def _tokenize(text):
683 view = memoryview(text) # zero-copy slice
685 view = memoryview(text) # zero-copy slice
684 special = b'():+-& '
686 special = b'():+-& '
685 pos = 0
687 pos = 0
686 length = len(text)
688 length = len(text)
687 while pos < length:
689 while pos < length:
688 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
690 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
689 pycompat.iterbytestr(view[pos:])))
691 pycompat.iterbytestr(view[pos:])))
690 if symbol:
692 if symbol:
691 yield (b'symbol', symbol, pos)
693 yield (b'symbol', symbol, pos)
692 pos += len(symbol)
694 pos += len(symbol)
693 else: # special char, ignore space
695 else: # special char, ignore space
694 if text[pos] != b' ':
696 if text[pos] != b' ':
695 yield (text[pos], None, pos)
697 yield (text[pos], None, pos)
696 pos += 1
698 pos += 1
697 yield (b'end', None, pos)
699 yield (b'end', None, pos)
698
700
699 def _parse(text):
701 def _parse(text):
700 tree, pos = parser.parser(_elements).parse(_tokenize(text))
702 tree, pos = parser.parser(_elements).parse(_tokenize(text))
701 if pos != len(text):
703 if pos != len(text):
702 raise error.ParseError(b'invalid token', pos)
704 raise error.ParseError(b'invalid token', pos)
703 return tree
705 return tree
704
706
705 def _parsedrev(symbol):
707 def _parsedrev(symbol):
706 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
708 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
707 if symbol.startswith(b'D') and symbol[1:].isdigit():
709 if symbol.startswith(b'D') and symbol[1:].isdigit():
708 return int(symbol[1:])
710 return int(symbol[1:])
709 if symbol.isdigit():
711 if symbol.isdigit():
710 return int(symbol)
712 return int(symbol)
711
713
712 def _prefetchdrevs(tree):
714 def _prefetchdrevs(tree):
713 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
715 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
714 drevs = set()
716 drevs = set()
715 ancestordrevs = set()
717 ancestordrevs = set()
716 op = tree[0]
718 op = tree[0]
717 if op == b'symbol':
719 if op == b'symbol':
718 r = _parsedrev(tree[1])
720 r = _parsedrev(tree[1])
719 if r:
721 if r:
720 drevs.add(r)
722 drevs.add(r)
721 elif op == b'ancestors':
723 elif op == b'ancestors':
722 r, a = _prefetchdrevs(tree[1])
724 r, a = _prefetchdrevs(tree[1])
723 drevs.update(r)
725 drevs.update(r)
724 ancestordrevs.update(r)
726 ancestordrevs.update(r)
725 ancestordrevs.update(a)
727 ancestordrevs.update(a)
726 else:
728 else:
727 for t in tree[1:]:
729 for t in tree[1:]:
728 r, a = _prefetchdrevs(t)
730 r, a = _prefetchdrevs(t)
729 drevs.update(r)
731 drevs.update(r)
730 ancestordrevs.update(a)
732 ancestordrevs.update(a)
731 return drevs, ancestordrevs
733 return drevs, ancestordrevs
732
734
733 def querydrev(repo, spec):
735 def querydrev(repo, spec):
734 """return a list of "Differential Revision" dicts
736 """return a list of "Differential Revision" dicts
735
737
736 spec is a string using a simple query language, see docstring in phabread
738 spec is a string using a simple query language, see docstring in phabread
737 for details.
739 for details.
738
740
739 A "Differential Revision dict" looks like:
741 A "Differential Revision dict" looks like:
740
742
741 {
743 {
742 "id": "2",
744 "id": "2",
743 "phid": "PHID-DREV-672qvysjcczopag46qty",
745 "phid": "PHID-DREV-672qvysjcczopag46qty",
744 "title": "example",
746 "title": "example",
745 "uri": "https://phab.example.com/D2",
747 "uri": "https://phab.example.com/D2",
746 "dateCreated": "1499181406",
748 "dateCreated": "1499181406",
747 "dateModified": "1499182103",
749 "dateModified": "1499182103",
748 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
750 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
749 "status": "0",
751 "status": "0",
750 "statusName": "Needs Review",
752 "statusName": "Needs Review",
751 "properties": [],
753 "properties": [],
752 "branch": null,
754 "branch": null,
753 "summary": "",
755 "summary": "",
754 "testPlan": "",
756 "testPlan": "",
755 "lineCount": "2",
757 "lineCount": "2",
756 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
757 "diffs": [
759 "diffs": [
758 "3",
760 "3",
759 "4",
761 "4",
760 ],
762 ],
761 "commits": [],
763 "commits": [],
762 "reviewers": [],
764 "reviewers": [],
763 "ccs": [],
765 "ccs": [],
764 "hashes": [],
766 "hashes": [],
765 "auxiliary": {
767 "auxiliary": {
766 "phabricator:projects": [],
768 "phabricator:projects": [],
767 "phabricator:depends-on": [
769 "phabricator:depends-on": [
768 "PHID-DREV-gbapp366kutjebt7agcd"
770 "PHID-DREV-gbapp366kutjebt7agcd"
769 ]
771 ]
770 },
772 },
771 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
773 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
772 "sourcePath": null
774 "sourcePath": null
773 }
775 }
774 """
776 """
775 def fetch(params):
777 def fetch(params):
776 """params -> single drev or None"""
778 """params -> single drev or None"""
777 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
779 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
778 if key in prefetched:
780 if key in prefetched:
779 return prefetched[key]
781 return prefetched[key]
780 drevs = callconduit(repo, b'differential.query', params)
782 drevs = callconduit(repo, b'differential.query', params)
781 # Fill prefetched with the result
783 # Fill prefetched with the result
782 for drev in drevs:
784 for drev in drevs:
783 prefetched[drev[b'phid']] = drev
785 prefetched[drev[b'phid']] = drev
784 prefetched[int(drev[b'id'])] = drev
786 prefetched[int(drev[b'id'])] = drev
785 if key not in prefetched:
787 if key not in prefetched:
786 raise error.Abort(_(b'cannot get Differential Revision %r')
788 raise error.Abort(_(b'cannot get Differential Revision %r')
787 % params)
789 % params)
788 return prefetched[key]
790 return prefetched[key]
789
791
790 def getstack(topdrevids):
792 def getstack(topdrevids):
791 """given a top, get a stack from the bottom, [id] -> [id]"""
793 """given a top, get a stack from the bottom, [id] -> [id]"""
792 visited = set()
794 visited = set()
793 result = []
795 result = []
794 queue = [{b'ids': [i]} for i in topdrevids]
796 queue = [{b'ids': [i]} for i in topdrevids]
795 while queue:
797 while queue:
796 params = queue.pop()
798 params = queue.pop()
797 drev = fetch(params)
799 drev = fetch(params)
798 if drev[b'id'] in visited:
800 if drev[b'id'] in visited:
799 continue
801 continue
800 visited.add(drev[b'id'])
802 visited.add(drev[b'id'])
801 result.append(int(drev[b'id']))
803 result.append(int(drev[b'id']))
802 auxiliary = drev.get(b'auxiliary', {})
804 auxiliary = drev.get(b'auxiliary', {})
803 depends = auxiliary.get(b'phabricator:depends-on', [])
805 depends = auxiliary.get(b'phabricator:depends-on', [])
804 for phid in depends:
806 for phid in depends:
805 queue.append({b'phids': [phid]})
807 queue.append({b'phids': [phid]})
806 result.reverse()
808 result.reverse()
807 return smartset.baseset(result)
809 return smartset.baseset(result)
808
810
809 # Initialize prefetch cache
811 # Initialize prefetch cache
810 prefetched = {} # {id or phid: drev}
812 prefetched = {} # {id or phid: drev}
811
813
812 tree = _parse(spec)
814 tree = _parse(spec)
813 drevs, ancestordrevs = _prefetchdrevs(tree)
815 drevs, ancestordrevs = _prefetchdrevs(tree)
814
816
815 # developer config: phabricator.batchsize
817 # developer config: phabricator.batchsize
816 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
818 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
817
819
818 # Prefetch Differential Revisions in batch
820 # Prefetch Differential Revisions in batch
819 tofetch = set(drevs)
821 tofetch = set(drevs)
820 for r in ancestordrevs:
822 for r in ancestordrevs:
821 tofetch.update(range(max(1, r - batchsize), r + 1))
823 tofetch.update(range(max(1, r - batchsize), r + 1))
822 if drevs:
824 if drevs:
823 fetch({b'ids': list(tofetch)})
825 fetch({b'ids': list(tofetch)})
824 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
826 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
825
827
826 # Walk through the tree, return smartsets
828 # Walk through the tree, return smartsets
827 def walk(tree):
829 def walk(tree):
828 op = tree[0]
830 op = tree[0]
829 if op == b'symbol':
831 if op == b'symbol':
830 drev = _parsedrev(tree[1])
832 drev = _parsedrev(tree[1])
831 if drev:
833 if drev:
832 return smartset.baseset([drev])
834 return smartset.baseset([drev])
833 elif tree[1] in _knownstatusnames:
835 elif tree[1] in _knownstatusnames:
834 drevs = [r for r in validids
836 drevs = [r for r in validids
835 if _getstatusname(prefetched[r]) == tree[1]]
837 if _getstatusname(prefetched[r]) == tree[1]]
836 return smartset.baseset(drevs)
838 return smartset.baseset(drevs)
837 else:
839 else:
838 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
840 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
839 elif op in {b'and_', b'add', b'sub'}:
841 elif op in {b'and_', b'add', b'sub'}:
840 assert len(tree) == 3
842 assert len(tree) == 3
841 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
843 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
842 elif op == b'group':
844 elif op == b'group':
843 return walk(tree[1])
845 return walk(tree[1])
844 elif op == b'ancestors':
846 elif op == b'ancestors':
845 return getstack(walk(tree[1]))
847 return getstack(walk(tree[1]))
846 else:
848 else:
847 raise error.ProgrammingError(b'illegal tree: %r' % tree)
849 raise error.ProgrammingError(b'illegal tree: %r' % tree)
848
850
849 return [prefetched[r] for r in walk(tree)]
851 return [prefetched[r] for r in walk(tree)]
850
852
851 def getdescfromdrev(drev):
853 def getdescfromdrev(drev):
852 """get description (commit message) from "Differential Revision"
854 """get description (commit message) from "Differential Revision"
853
855
854 This is similar to differential.getcommitmessage API. But we only care
856 This is similar to differential.getcommitmessage API. But we only care
855 about limited fields: title, summary, test plan, and URL.
857 about limited fields: title, summary, test plan, and URL.
856 """
858 """
857 title = drev[b'title']
859 title = drev[b'title']
858 summary = drev[b'summary'].rstrip()
860 summary = drev[b'summary'].rstrip()
859 testplan = drev[b'testPlan'].rstrip()
861 testplan = drev[b'testPlan'].rstrip()
860 if testplan:
862 if testplan:
861 testplan = b'Test Plan:\n%s' % testplan
863 testplan = b'Test Plan:\n%s' % testplan
862 uri = b'Differential Revision: %s' % drev[b'uri']
864 uri = b'Differential Revision: %s' % drev[b'uri']
863 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
865 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
864
866
865 def getdiffmeta(diff):
867 def getdiffmeta(diff):
866 """get commit metadata (date, node, user, p1) from a diff object
868 """get commit metadata (date, node, user, p1) from a diff object
867
869
868 The metadata could be "hg:meta", sent by phabsend, like:
870 The metadata could be "hg:meta", sent by phabsend, like:
869
871
870 "properties": {
872 "properties": {
871 "hg:meta": {
873 "hg:meta": {
872 "date": "1499571514 25200",
874 "date": "1499571514 25200",
873 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
875 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
874 "user": "Foo Bar <foo@example.com>",
876 "user": "Foo Bar <foo@example.com>",
875 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
877 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
876 }
878 }
877 }
879 }
878
880
879 Or converted from "local:commits", sent by "arc", like:
881 Or converted from "local:commits", sent by "arc", like:
880
882
881 "properties": {
883 "properties": {
882 "local:commits": {
884 "local:commits": {
883 "98c08acae292b2faf60a279b4189beb6cff1414d": {
885 "98c08acae292b2faf60a279b4189beb6cff1414d": {
884 "author": "Foo Bar",
886 "author": "Foo Bar",
885 "time": 1499546314,
887 "time": 1499546314,
886 "branch": "default",
888 "branch": "default",
887 "tag": "",
889 "tag": "",
888 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
890 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
889 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
891 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
890 "local": "1000",
892 "local": "1000",
891 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
893 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
892 "summary": "...",
894 "summary": "...",
893 "message": "...",
895 "message": "...",
894 "authorEmail": "foo@example.com"
896 "authorEmail": "foo@example.com"
895 }
897 }
896 }
898 }
897 }
899 }
898
900
899 Note: metadata extracted from "local:commits" will lose time zone
901 Note: metadata extracted from "local:commits" will lose time zone
900 information.
902 information.
901 """
903 """
902 props = diff.get(b'properties') or {}
904 props = diff.get(b'properties') or {}
903 meta = props.get(b'hg:meta')
905 meta = props.get(b'hg:meta')
904 if not meta and props.get(b'local:commits'):
906 if not meta and props.get(b'local:commits'):
905 commit = sorted(props[b'local:commits'].values())[0]
907 commit = sorted(props[b'local:commits'].values())[0]
906 meta = {
908 meta = {
907 b'date': b'%d 0' % commit[b'time'],
909 b'date': b'%d 0' % commit[b'time'],
908 b'node': commit[b'rev'],
910 b'node': commit[b'rev'],
909 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
911 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
910 }
912 }
911 if len(commit.get(b'parents', ())) >= 1:
913 if len(commit.get(b'parents', ())) >= 1:
912 meta[b'parent'] = commit[b'parents'][0]
914 meta[b'parent'] = commit[b'parents'][0]
913 return meta or {}
915 return meta or {}
914
916
915 def readpatch(repo, drevs, write):
917 def readpatch(repo, drevs, write):
916 """generate plain-text patch readable by 'hg import'
918 """generate plain-text patch readable by 'hg import'
917
919
918 write is usually ui.write. drevs is what "querydrev" returns, results of
920 write is usually ui.write. drevs is what "querydrev" returns, results of
919 "differential.query".
921 "differential.query".
920 """
922 """
921 # Prefetch hg:meta property for all diffs
923 # Prefetch hg:meta property for all diffs
922 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
924 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
923 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
925 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
924
926
925 # Generate patch for each drev
927 # Generate patch for each drev
926 for drev in drevs:
928 for drev in drevs:
927 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
929 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
928
930
929 diffid = max(int(v) for v in drev[b'diffs'])
931 diffid = max(int(v) for v in drev[b'diffs'])
930 body = callconduit(repo, b'differential.getrawdiff',
932 body = callconduit(repo, b'differential.getrawdiff',
931 {b'diffID': diffid})
933 {b'diffID': diffid})
932 desc = getdescfromdrev(drev)
934 desc = getdescfromdrev(drev)
933 header = b'# HG changeset patch\n'
935 header = b'# HG changeset patch\n'
934
936
935 # Try to preserve metadata from hg:meta property. Write hg patch
937 # Try to preserve metadata from hg:meta property. Write hg patch
936 # headers that can be read by the "import" command. See patchheadermap
938 # headers that can be read by the "import" command. See patchheadermap
937 # and extract in mercurial/patch.py for supported headers.
939 # and extract in mercurial/patch.py for supported headers.
938 meta = getdiffmeta(diffs[b'%d' % diffid])
940 meta = getdiffmeta(diffs[b'%d' % diffid])
939 for k in _metanamemap.keys():
941 for k in _metanamemap.keys():
940 if k in meta:
942 if k in meta:
941 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
943 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
942
944
943 content = b'%s%s\n%s' % (header, desc, body)
945 content = b'%s%s\n%s' % (header, desc, body)
944 write(content)
946 write(content)
945
947
946 @vcrcommand(b'phabread',
948 @vcrcommand(b'phabread',
947 [(b'', b'stack', False, _(b'read dependencies'))],
949 [(b'', b'stack', False, _(b'read dependencies'))],
948 _(b'DREVSPEC [OPTIONS]'),
950 _(b'DREVSPEC [OPTIONS]'),
949 helpcategory=command.CATEGORY_IMPORT_EXPORT)
951 helpcategory=command.CATEGORY_IMPORT_EXPORT)
950 def phabread(ui, repo, spec, **opts):
952 def phabread(ui, repo, spec, **opts):
951 """print patches from Phabricator suitable for importing
953 """print patches from Phabricator suitable for importing
952
954
953 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
955 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
954 the number ``123``. It could also have common operators like ``+``, ``-``,
956 the number ``123``. It could also have common operators like ``+``, ``-``,
955 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
957 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
956 select a stack.
958 select a stack.
957
959
958 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
960 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
959 could be used to filter patches by status. For performance reason, they
961 could be used to filter patches by status. For performance reason, they
960 only represent a subset of non-status selections and cannot be used alone.
962 only represent a subset of non-status selections and cannot be used alone.
961
963
962 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
964 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
963 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
965 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
964 stack up to D9.
966 stack up to D9.
965
967
966 If --stack is given, follow dependencies information and read all patches.
968 If --stack is given, follow dependencies information and read all patches.
967 It is equivalent to the ``:`` operator.
969 It is equivalent to the ``:`` operator.
968 """
970 """
969 opts = pycompat.byteskwargs(opts)
971 opts = pycompat.byteskwargs(opts)
970 if opts.get(b'stack'):
972 if opts.get(b'stack'):
971 spec = b':(%s)' % spec
973 spec = b':(%s)' % spec
972 drevs = querydrev(repo, spec)
974 drevs = querydrev(repo, spec)
973 readpatch(repo, drevs, ui.write)
975 readpatch(repo, drevs, ui.write)
974
976
975 @vcrcommand(b'phabupdate',
977 @vcrcommand(b'phabupdate',
976 [(b'', b'accept', False, _(b'accept revisions')),
978 [(b'', b'accept', False, _(b'accept revisions')),
977 (b'', b'reject', False, _(b'reject revisions')),
979 (b'', b'reject', False, _(b'reject revisions')),
978 (b'', b'abandon', False, _(b'abandon revisions')),
980 (b'', b'abandon', False, _(b'abandon revisions')),
979 (b'', b'reclaim', False, _(b'reclaim revisions')),
981 (b'', b'reclaim', False, _(b'reclaim revisions')),
980 (b'm', b'comment', b'', _(b'comment on the last revision')),
982 (b'm', b'comment', b'', _(b'comment on the last revision')),
981 ], _(b'DREVSPEC [OPTIONS]'),
983 ], _(b'DREVSPEC [OPTIONS]'),
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
984 helpcategory=command.CATEGORY_IMPORT_EXPORT)
983 def phabupdate(ui, repo, spec, **opts):
985 def phabupdate(ui, repo, spec, **opts):
984 """update Differential Revision in batch
986 """update Differential Revision in batch
985
987
986 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
988 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
987 """
989 """
988 opts = pycompat.byteskwargs(opts)
990 opts = pycompat.byteskwargs(opts)
989 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
991 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
990 if len(flags) > 1:
992 if len(flags) > 1:
991 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
993 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
992
994
993 actions = []
995 actions = []
994 for f in flags:
996 for f in flags:
995 actions.append({b'type': f, b'value': b'true'})
997 actions.append({b'type': f, b'value': b'true'})
996
998
997 drevs = querydrev(repo, spec)
999 drevs = querydrev(repo, spec)
998 for i, drev in enumerate(drevs):
1000 for i, drev in enumerate(drevs):
999 if i + 1 == len(drevs) and opts.get(b'comment'):
1001 if i + 1 == len(drevs) and opts.get(b'comment'):
1000 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1002 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1001 if actions:
1003 if actions:
1002 params = {b'objectIdentifier': drev[b'phid'],
1004 params = {b'objectIdentifier': drev[b'phid'],
1003 b'transactions': actions}
1005 b'transactions': actions}
1004 callconduit(repo, b'differential.revision.edit', params)
1006 callconduit(repo, b'differential.revision.edit', params)
1005
1007
1006 templatekeyword = registrar.templatekeyword()
1008 templatekeyword = registrar.templatekeyword()
1007
1009
1008 @templatekeyword(b'phabreview', requires={b'ctx'})
1010 @templatekeyword(b'phabreview', requires={b'ctx'})
1009 def template_review(context, mapping):
1011 def template_review(context, mapping):
1010 """:phabreview: Object describing the review for this changeset.
1012 """:phabreview: Object describing the review for this changeset.
1011 Has attributes `url` and `id`.
1013 Has attributes `url` and `id`.
1012 """
1014 """
1013 ctx = context.resource(mapping, b'ctx')
1015 ctx = context.resource(mapping, b'ctx')
1014 m = _differentialrevisiondescre.search(ctx.description())
1016 m = _differentialrevisiondescre.search(ctx.description())
1015 if m:
1017 if m:
1016 return templateutil.hybriddict({
1018 return templateutil.hybriddict({
1017 b'url': m.group(r'url'),
1019 b'url': m.group(r'url'),
1018 b'id': b"D%s" % m.group(r'id'),
1020 b'id': b"D%s" % m.group(r'id'),
1019 })
1021 })
1020 else:
1022 else:
1021 tags = ctx.repo().nodetags(ctx.node())
1023 tags = ctx.repo().nodetags(ctx.node())
1022 for t in tags:
1024 for t in tags:
1023 if _differentialrevisiontagre.match(t):
1025 if _differentialrevisiontagre.match(t):
1024 url = ctx.repo().ui.config(b'phabricator', b'url')
1026 url = ctx.repo().ui.config(b'phabricator', b'url')
1025 if not url.endswith(b'/'):
1027 if not url.endswith(b'/'):
1026 url += b'/'
1028 url += b'/'
1027 url += t
1029 url += t
1028
1030
1029 return templateutil.hybriddict({
1031 return templateutil.hybriddict({
1030 b'url': url,
1032 b'url': url,
1031 b'id': t,
1033 b'id': t,
1032 })
1034 })
1033 return None
1035 return None
General Comments 0
You need to be logged in to leave comments. Login now