##// END OF EJS Templates
phabricator: don't assume the existence of properties of local:commits...
Ian Moody -
r42385:6dd454e5 default
parent child Browse files
Show More
@@ -1,1035 +1,1038 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'branch': ctx.branch(),
387 b'branch': ctx.branch(),
388 b'node': ctx.hex(),
388 b'node': ctx.hex(),
389 b'parent': ctx.p1().hex(),
389 b'parent': ctx.p1().hex(),
390 }),
390 }),
391 }
391 }
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393
393
394 params = {
394 params = {
395 b'diff_id': diff[b'id'],
395 b'diff_id': diff[b'id'],
396 b'name': b'local:commits',
396 b'name': b'local:commits',
397 b'data': templatefilters.json({
397 b'data': templatefilters.json({
398 ctx.hex(): {
398 ctx.hex(): {
399 b'author': stringutil.person(ctx.user()),
399 b'author': stringutil.person(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
401 b'time': int(ctx.date()[0]),
401 b'time': int(ctx.date()[0]),
402 b'branch': ctx.branch(),
402 b'branch': ctx.branch(),
403 },
403 },
404 }),
404 }),
405 }
405 }
406 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
406 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
407
407
408 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
408 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
409 olddiff=None, actions=None):
409 olddiff=None, actions=None):
410 """create or update a Differential Revision
410 """create or update a Differential Revision
411
411
412 If revid is None, create a new Differential Revision, otherwise update
412 If revid is None, create a new Differential Revision, otherwise update
413 revid. If parentrevid is not None, set it as a dependency.
413 revid. If parentrevid is not None, set it as a dependency.
414
414
415 If oldnode is not None, check if the patch content (without commit message
415 If oldnode is not None, check if the patch content (without commit message
416 and metadata) has changed before creating another diff.
416 and metadata) has changed before creating another diff.
417
417
418 If actions is not None, they will be appended to the transaction.
418 If actions is not None, they will be appended to the transaction.
419 """
419 """
420 repo = ctx.repo()
420 repo = ctx.repo()
421 if oldnode:
421 if oldnode:
422 diffopts = mdiff.diffopts(git=True, context=32767)
422 diffopts = mdiff.diffopts(git=True, context=32767)
423 oldctx = repo.unfiltered()[oldnode]
423 oldctx = repo.unfiltered()[oldnode]
424 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
424 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
425 else:
425 else:
426 neednewdiff = True
426 neednewdiff = True
427
427
428 transactions = []
428 transactions = []
429 if neednewdiff:
429 if neednewdiff:
430 diff = creatediff(ctx)
430 diff = creatediff(ctx)
431 transactions.append({b'type': b'update', b'value': diff[b'phid']})
431 transactions.append({b'type': b'update', b'value': diff[b'phid']})
432 else:
432 else:
433 # Even if we don't need to upload a new diff because the patch content
433 # Even if we don't need to upload a new diff because the patch content
434 # does not change. We might still need to update its metadata so
434 # does not change. We might still need to update its metadata so
435 # pushers could know the correct node metadata.
435 # pushers could know the correct node metadata.
436 assert olddiff
436 assert olddiff
437 diff = olddiff
437 diff = olddiff
438 writediffproperties(ctx, diff)
438 writediffproperties(ctx, diff)
439
439
440 # Use a temporary summary to set dependency. There might be better ways but
440 # Use a temporary summary to set dependency. There might be better ways but
441 # I cannot find them for now. But do not do that if we are updating an
441 # I cannot find them for now. But do not do that if we are updating an
442 # existing revision (revid is not None) since that introduces visible
442 # existing revision (revid is not None) since that introduces visible
443 # churns (someone edited "Summary" twice) on the web page.
443 # churns (someone edited "Summary" twice) on the web page.
444 if parentrevid and revid is None:
444 if parentrevid and revid is None:
445 summary = b'Depends on D%d' % parentrevid
445 summary = b'Depends on D%d' % parentrevid
446 transactions += [{b'type': b'summary', b'value': summary},
446 transactions += [{b'type': b'summary', b'value': summary},
447 {b'type': b'summary', b'value': b' '}]
447 {b'type': b'summary', b'value': b' '}]
448
448
449 if actions:
449 if actions:
450 transactions += actions
450 transactions += actions
451
451
452 # Parse commit message and update related fields.
452 # Parse commit message and update related fields.
453 desc = ctx.description()
453 desc = ctx.description()
454 info = callconduit(repo, b'differential.parsecommitmessage',
454 info = callconduit(repo, b'differential.parsecommitmessage',
455 {b'corpus': desc})
455 {b'corpus': desc})
456 for k, v in info[b'fields'].items():
456 for k, v in info[b'fields'].items():
457 if k in [b'title', b'summary', b'testPlan']:
457 if k in [b'title', b'summary', b'testPlan']:
458 transactions.append({b'type': k, b'value': v})
458 transactions.append({b'type': k, b'value': v})
459
459
460 params = {b'transactions': transactions}
460 params = {b'transactions': transactions}
461 if revid is not None:
461 if revid is not None:
462 # Update an existing Differential Revision
462 # Update an existing Differential Revision
463 params[b'objectIdentifier'] = revid
463 params[b'objectIdentifier'] = revid
464
464
465 revision = callconduit(repo, b'differential.revision.edit', params)
465 revision = callconduit(repo, b'differential.revision.edit', params)
466 if not revision:
466 if not revision:
467 raise error.Abort(_(b'cannot create revision for %s') % ctx)
467 raise error.Abort(_(b'cannot create revision for %s') % ctx)
468
468
469 return revision, diff
469 return revision, diff
470
470
471 def userphids(repo, names):
471 def userphids(repo, names):
472 """convert user names to PHIDs"""
472 """convert user names to PHIDs"""
473 names = [name.lower() for name in names]
473 names = [name.lower() for name in names]
474 query = {b'constraints': {b'usernames': names}}
474 query = {b'constraints': {b'usernames': names}}
475 result = callconduit(repo, b'user.search', query)
475 result = callconduit(repo, b'user.search', query)
476 # username not found is not an error of the API. So check if we have missed
476 # username not found is not an error of the API. So check if we have missed
477 # some names here.
477 # some names here.
478 data = result[b'data']
478 data = result[b'data']
479 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
479 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
480 unresolved = set(names) - resolved
480 unresolved = set(names) - resolved
481 if unresolved:
481 if unresolved:
482 raise error.Abort(_(b'unknown username: %s')
482 raise error.Abort(_(b'unknown username: %s')
483 % b' '.join(sorted(unresolved)))
483 % b' '.join(sorted(unresolved)))
484 return [entry[b'phid'] for entry in data]
484 return [entry[b'phid'] for entry in data]
485
485
486 @vcrcommand(b'phabsend',
486 @vcrcommand(b'phabsend',
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
488 (b'', b'amend', True, _(b'update commit messages')),
488 (b'', b'amend', True, _(b'update commit messages')),
489 (b'', b'reviewer', [], _(b'specify reviewers')),
489 (b'', b'reviewer', [], _(b'specify reviewers')),
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
491 _(b'REV [OPTIONS]'),
491 _(b'REV [OPTIONS]'),
492 helpcategory=command.CATEGORY_IMPORT_EXPORT)
492 helpcategory=command.CATEGORY_IMPORT_EXPORT)
493 def phabsend(ui, repo, *revs, **opts):
493 def phabsend(ui, repo, *revs, **opts):
494 """upload changesets to Phabricator
494 """upload changesets to Phabricator
495
495
496 If there are multiple revisions specified, they will be send as a stack
496 If there are multiple revisions specified, they will be send as a stack
497 with a linear dependencies relationship using the order specified by the
497 with a linear dependencies relationship using the order specified by the
498 revset.
498 revset.
499
499
500 For the first time uploading changesets, local tags will be created to
500 For the first time uploading changesets, local tags will be created to
501 maintain the association. After the first time, phabsend will check
501 maintain the association. After the first time, phabsend will check
502 obsstore and tags information so it can figure out whether to update an
502 obsstore and tags information so it can figure out whether to update an
503 existing Differential Revision, or create a new one.
503 existing Differential Revision, or create a new one.
504
504
505 If --amend is set, update commit messages so they have the
505 If --amend is set, update commit messages so they have the
506 ``Differential Revision`` URL, remove related tags. This is similar to what
506 ``Differential Revision`` URL, remove related tags. This is similar to what
507 arcanist will do, and is more desired in author-push workflows. Otherwise,
507 arcanist will do, and is more desired in author-push workflows. Otherwise,
508 use local tags to record the ``Differential Revision`` association.
508 use local tags to record the ``Differential Revision`` association.
509
509
510 The --confirm option lets you confirm changesets before sending them. You
510 The --confirm option lets you confirm changesets before sending them. You
511 can also add following to your configuration file to make it default
511 can also add following to your configuration file to make it default
512 behaviour::
512 behaviour::
513
513
514 [phabsend]
514 [phabsend]
515 confirm = true
515 confirm = true
516
516
517 phabsend will check obsstore and the above association to decide whether to
517 phabsend will check obsstore and the above association to decide whether to
518 update an existing Differential Revision, or create a new one.
518 update an existing Differential Revision, or create a new one.
519 """
519 """
520 opts = pycompat.byteskwargs(opts)
520 opts = pycompat.byteskwargs(opts)
521 revs = list(revs) + opts.get(b'rev', [])
521 revs = list(revs) + opts.get(b'rev', [])
522 revs = scmutil.revrange(repo, revs)
522 revs = scmutil.revrange(repo, revs)
523
523
524 if not revs:
524 if not revs:
525 raise error.Abort(_(b'phabsend requires at least one changeset'))
525 raise error.Abort(_(b'phabsend requires at least one changeset'))
526 if opts.get(b'amend'):
526 if opts.get(b'amend'):
527 cmdutil.checkunfinished(repo)
527 cmdutil.checkunfinished(repo)
528
528
529 # {newnode: (oldnode, olddiff, olddrev}
529 # {newnode: (oldnode, olddiff, olddrev}
530 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
530 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
531
531
532 confirm = ui.configbool(b'phabsend', b'confirm')
532 confirm = ui.configbool(b'phabsend', b'confirm')
533 confirm |= bool(opts.get(b'confirm'))
533 confirm |= bool(opts.get(b'confirm'))
534 if confirm:
534 if confirm:
535 confirmed = _confirmbeforesend(repo, revs, oldmap)
535 confirmed = _confirmbeforesend(repo, revs, oldmap)
536 if not confirmed:
536 if not confirmed:
537 raise error.Abort(_(b'phabsend cancelled'))
537 raise error.Abort(_(b'phabsend cancelled'))
538
538
539 actions = []
539 actions = []
540 reviewers = opts.get(b'reviewer', [])
540 reviewers = opts.get(b'reviewer', [])
541 if reviewers:
541 if reviewers:
542 phids = userphids(repo, reviewers)
542 phids = userphids(repo, reviewers)
543 actions.append({b'type': b'reviewers.add', b'value': phids})
543 actions.append({b'type': b'reviewers.add', b'value': phids})
544
544
545 drevids = [] # [int]
545 drevids = [] # [int]
546 diffmap = {} # {newnode: diff}
546 diffmap = {} # {newnode: diff}
547
547
548 # Send patches one by one so we know their Differential Revision IDs and
548 # Send patches one by one so we know their Differential Revision IDs and
549 # can provide dependency relationship
549 # can provide dependency relationship
550 lastrevid = None
550 lastrevid = None
551 for rev in revs:
551 for rev in revs:
552 ui.debug(b'sending rev %d\n' % rev)
552 ui.debug(b'sending rev %d\n' % rev)
553 ctx = repo[rev]
553 ctx = repo[rev]
554
554
555 # Get Differential Revision ID
555 # Get Differential Revision ID
556 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
556 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
557 if oldnode != ctx.node() or opts.get(b'amend'):
557 if oldnode != ctx.node() or opts.get(b'amend'):
558 # Create or update Differential Revision
558 # Create or update Differential Revision
559 revision, diff = createdifferentialrevision(
559 revision, diff = createdifferentialrevision(
560 ctx, revid, lastrevid, oldnode, olddiff, actions)
560 ctx, revid, lastrevid, oldnode, olddiff, actions)
561 diffmap[ctx.node()] = diff
561 diffmap[ctx.node()] = diff
562 newrevid = int(revision[b'object'][b'id'])
562 newrevid = int(revision[b'object'][b'id'])
563 if revid:
563 if revid:
564 action = b'updated'
564 action = b'updated'
565 else:
565 else:
566 action = b'created'
566 action = b'created'
567
567
568 # Create a local tag to note the association, if commit message
568 # Create a local tag to note the association, if commit message
569 # does not have it already
569 # does not have it already
570 m = _differentialrevisiondescre.search(ctx.description())
570 m = _differentialrevisiondescre.search(ctx.description())
571 if not m or int(m.group(r'id')) != newrevid:
571 if not m or int(m.group(r'id')) != newrevid:
572 tagname = b'D%d' % newrevid
572 tagname = b'D%d' % newrevid
573 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
573 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
574 date=None, local=True)
574 date=None, local=True)
575 else:
575 else:
576 # Nothing changed. But still set "newrevid" so the next revision
576 # Nothing changed. But still set "newrevid" so the next revision
577 # could depend on this one.
577 # could depend on this one.
578 newrevid = revid
578 newrevid = revid
579 action = b'skipped'
579 action = b'skipped'
580
580
581 actiondesc = ui.label(
581 actiondesc = ui.label(
582 {b'created': _(b'created'),
582 {b'created': _(b'created'),
583 b'skipped': _(b'skipped'),
583 b'skipped': _(b'skipped'),
584 b'updated': _(b'updated')}[action],
584 b'updated': _(b'updated')}[action],
585 b'phabricator.action.%s' % action)
585 b'phabricator.action.%s' % action)
586 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
586 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
587 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
587 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
588 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
588 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
589 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
589 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
590 desc))
590 desc))
591 drevids.append(newrevid)
591 drevids.append(newrevid)
592 lastrevid = newrevid
592 lastrevid = newrevid
593
593
594 # Update commit messages and remove tags
594 # Update commit messages and remove tags
595 if opts.get(b'amend'):
595 if opts.get(b'amend'):
596 unfi = repo.unfiltered()
596 unfi = repo.unfiltered()
597 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
597 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
598 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
598 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
599 wnode = unfi[b'.'].node()
599 wnode = unfi[b'.'].node()
600 mapping = {} # {oldnode: [newnode]}
600 mapping = {} # {oldnode: [newnode]}
601 for i, rev in enumerate(revs):
601 for i, rev in enumerate(revs):
602 old = unfi[rev]
602 old = unfi[rev]
603 drevid = drevids[i]
603 drevid = drevids[i]
604 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
604 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
605 newdesc = getdescfromdrev(drev)
605 newdesc = getdescfromdrev(drev)
606 # Make sure commit message contain "Differential Revision"
606 # Make sure commit message contain "Differential Revision"
607 if old.description() != newdesc:
607 if old.description() != newdesc:
608 if old.phase() == phases.public:
608 if old.phase() == phases.public:
609 ui.warn(_("warning: not updating public commit %s\n")
609 ui.warn(_("warning: not updating public commit %s\n")
610 % scmutil.formatchangeid(old))
610 % scmutil.formatchangeid(old))
611 continue
611 continue
612 parents = [
612 parents = [
613 mapping.get(old.p1().node(), (old.p1(),))[0],
613 mapping.get(old.p1().node(), (old.p1(),))[0],
614 mapping.get(old.p2().node(), (old.p2(),))[0],
614 mapping.get(old.p2().node(), (old.p2(),))[0],
615 ]
615 ]
616 new = context.metadataonlyctx(
616 new = context.metadataonlyctx(
617 repo, old, parents=parents, text=newdesc,
617 repo, old, parents=parents, text=newdesc,
618 user=old.user(), date=old.date(), extra=old.extra())
618 user=old.user(), date=old.date(), extra=old.extra())
619
619
620 newnode = new.commit()
620 newnode = new.commit()
621
621
622 mapping[old.node()] = [newnode]
622 mapping[old.node()] = [newnode]
623 # Update diff property
623 # Update diff property
624 writediffproperties(unfi[newnode], diffmap[old.node()])
624 writediffproperties(unfi[newnode], diffmap[old.node()])
625 # Remove local tags since it's no longer necessary
625 # Remove local tags since it's no longer necessary
626 tagname = b'D%d' % drevid
626 tagname = b'D%d' % drevid
627 if tagname in repo.tags():
627 if tagname in repo.tags():
628 tags.tag(repo, tagname, nullid, message=None, user=None,
628 tags.tag(repo, tagname, nullid, message=None, user=None,
629 date=None, local=True)
629 date=None, local=True)
630 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
630 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
631 if wnode in mapping:
631 if wnode in mapping:
632 unfi.setparents(mapping[wnode][0])
632 unfi.setparents(mapping[wnode][0])
633
633
634 # Map from "hg:meta" keys to header understood by "hg import". The order is
634 # Map from "hg:meta" keys to header understood by "hg import". The order is
635 # consistent with "hg export" output.
635 # consistent with "hg export" output.
636 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
636 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
637 (b'node', b'Node ID'), (b'parent', b'Parent ')])
637 (b'node', b'Node ID'), (b'parent', b'Parent ')])
638
638
639 def _confirmbeforesend(repo, revs, oldmap):
639 def _confirmbeforesend(repo, revs, oldmap):
640 url, token = readurltoken(repo)
640 url, token = readurltoken(repo)
641 ui = repo.ui
641 ui = repo.ui
642 for rev in revs:
642 for rev in revs:
643 ctx = repo[rev]
643 ctx = repo[rev]
644 desc = ctx.description().splitlines()[0]
644 desc = ctx.description().splitlines()[0]
645 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
645 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
646 if drevid:
646 if drevid:
647 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
647 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
648 else:
648 else:
649 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
649 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
650
650
651 ui.write(_(b'%s - %s: %s\n')
651 ui.write(_(b'%s - %s: %s\n')
652 % (drevdesc,
652 % (drevdesc,
653 ui.label(bytes(ctx), b'phabricator.node'),
653 ui.label(bytes(ctx), b'phabricator.node'),
654 ui.label(desc, b'phabricator.desc')))
654 ui.label(desc, b'phabricator.desc')))
655
655
656 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
656 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
657 b'$$ &Yes $$ &No') % url):
657 b'$$ &Yes $$ &No') % url):
658 return False
658 return False
659
659
660 return True
660 return True
661
661
662 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
662 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
663 b'abandoned'}
663 b'abandoned'}
664
664
665 def _getstatusname(drev):
665 def _getstatusname(drev):
666 """get normalized status name from a Differential Revision"""
666 """get normalized status name from a Differential Revision"""
667 return drev[b'statusName'].replace(b' ', b'').lower()
667 return drev[b'statusName'].replace(b' ', b'').lower()
668
668
669 # Small language to specify differential revisions. Support symbols: (), :X,
669 # Small language to specify differential revisions. Support symbols: (), :X,
670 # +, and -.
670 # +, and -.
671
671
672 _elements = {
672 _elements = {
673 # token-type: binding-strength, primary, prefix, infix, suffix
673 # token-type: binding-strength, primary, prefix, infix, suffix
674 b'(': (12, None, (b'group', 1, b')'), None, None),
674 b'(': (12, None, (b'group', 1, b')'), None, None),
675 b':': (8, None, (b'ancestors', 8), None, None),
675 b':': (8, None, (b'ancestors', 8), None, None),
676 b'&': (5, None, None, (b'and_', 5), None),
676 b'&': (5, None, None, (b'and_', 5), None),
677 b'+': (4, None, None, (b'add', 4), None),
677 b'+': (4, None, None, (b'add', 4), None),
678 b'-': (4, None, None, (b'sub', 4), None),
678 b'-': (4, None, None, (b'sub', 4), None),
679 b')': (0, None, None, None, None),
679 b')': (0, None, None, None, None),
680 b'symbol': (0, b'symbol', None, None, None),
680 b'symbol': (0, b'symbol', None, None, None),
681 b'end': (0, None, None, None, None),
681 b'end': (0, None, None, None, None),
682 }
682 }
683
683
684 def _tokenize(text):
684 def _tokenize(text):
685 view = memoryview(text) # zero-copy slice
685 view = memoryview(text) # zero-copy slice
686 special = b'():+-& '
686 special = b'():+-& '
687 pos = 0
687 pos = 0
688 length = len(text)
688 length = len(text)
689 while pos < length:
689 while pos < length:
690 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
690 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
691 pycompat.iterbytestr(view[pos:])))
691 pycompat.iterbytestr(view[pos:])))
692 if symbol:
692 if symbol:
693 yield (b'symbol', symbol, pos)
693 yield (b'symbol', symbol, pos)
694 pos += len(symbol)
694 pos += len(symbol)
695 else: # special char, ignore space
695 else: # special char, ignore space
696 if text[pos] != b' ':
696 if text[pos] != b' ':
697 yield (text[pos], None, pos)
697 yield (text[pos], None, pos)
698 pos += 1
698 pos += 1
699 yield (b'end', None, pos)
699 yield (b'end', None, pos)
700
700
701 def _parse(text):
701 def _parse(text):
702 tree, pos = parser.parser(_elements).parse(_tokenize(text))
702 tree, pos = parser.parser(_elements).parse(_tokenize(text))
703 if pos != len(text):
703 if pos != len(text):
704 raise error.ParseError(b'invalid token', pos)
704 raise error.ParseError(b'invalid token', pos)
705 return tree
705 return tree
706
706
707 def _parsedrev(symbol):
707 def _parsedrev(symbol):
708 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
708 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
709 if symbol.startswith(b'D') and symbol[1:].isdigit():
709 if symbol.startswith(b'D') and symbol[1:].isdigit():
710 return int(symbol[1:])
710 return int(symbol[1:])
711 if symbol.isdigit():
711 if symbol.isdigit():
712 return int(symbol)
712 return int(symbol)
713
713
714 def _prefetchdrevs(tree):
714 def _prefetchdrevs(tree):
715 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
715 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
716 drevs = set()
716 drevs = set()
717 ancestordrevs = set()
717 ancestordrevs = set()
718 op = tree[0]
718 op = tree[0]
719 if op == b'symbol':
719 if op == b'symbol':
720 r = _parsedrev(tree[1])
720 r = _parsedrev(tree[1])
721 if r:
721 if r:
722 drevs.add(r)
722 drevs.add(r)
723 elif op == b'ancestors':
723 elif op == b'ancestors':
724 r, a = _prefetchdrevs(tree[1])
724 r, a = _prefetchdrevs(tree[1])
725 drevs.update(r)
725 drevs.update(r)
726 ancestordrevs.update(r)
726 ancestordrevs.update(r)
727 ancestordrevs.update(a)
727 ancestordrevs.update(a)
728 else:
728 else:
729 for t in tree[1:]:
729 for t in tree[1:]:
730 r, a = _prefetchdrevs(t)
730 r, a = _prefetchdrevs(t)
731 drevs.update(r)
731 drevs.update(r)
732 ancestordrevs.update(a)
732 ancestordrevs.update(a)
733 return drevs, ancestordrevs
733 return drevs, ancestordrevs
734
734
735 def querydrev(repo, spec):
735 def querydrev(repo, spec):
736 """return a list of "Differential Revision" dicts
736 """return a list of "Differential Revision" dicts
737
737
738 spec is a string using a simple query language, see docstring in phabread
738 spec is a string using a simple query language, see docstring in phabread
739 for details.
739 for details.
740
740
741 A "Differential Revision dict" looks like:
741 A "Differential Revision dict" looks like:
742
742
743 {
743 {
744 "id": "2",
744 "id": "2",
745 "phid": "PHID-DREV-672qvysjcczopag46qty",
745 "phid": "PHID-DREV-672qvysjcczopag46qty",
746 "title": "example",
746 "title": "example",
747 "uri": "https://phab.example.com/D2",
747 "uri": "https://phab.example.com/D2",
748 "dateCreated": "1499181406",
748 "dateCreated": "1499181406",
749 "dateModified": "1499182103",
749 "dateModified": "1499182103",
750 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
750 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
751 "status": "0",
751 "status": "0",
752 "statusName": "Needs Review",
752 "statusName": "Needs Review",
753 "properties": [],
753 "properties": [],
754 "branch": null,
754 "branch": null,
755 "summary": "",
755 "summary": "",
756 "testPlan": "",
756 "testPlan": "",
757 "lineCount": "2",
757 "lineCount": "2",
758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
759 "diffs": [
759 "diffs": [
760 "3",
760 "3",
761 "4",
761 "4",
762 ],
762 ],
763 "commits": [],
763 "commits": [],
764 "reviewers": [],
764 "reviewers": [],
765 "ccs": [],
765 "ccs": [],
766 "hashes": [],
766 "hashes": [],
767 "auxiliary": {
767 "auxiliary": {
768 "phabricator:projects": [],
768 "phabricator:projects": [],
769 "phabricator:depends-on": [
769 "phabricator:depends-on": [
770 "PHID-DREV-gbapp366kutjebt7agcd"
770 "PHID-DREV-gbapp366kutjebt7agcd"
771 ]
771 ]
772 },
772 },
773 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
773 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
774 "sourcePath": null
774 "sourcePath": null
775 }
775 }
776 """
776 """
777 def fetch(params):
777 def fetch(params):
778 """params -> single drev or None"""
778 """params -> single drev or None"""
779 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
779 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
780 if key in prefetched:
780 if key in prefetched:
781 return prefetched[key]
781 return prefetched[key]
782 drevs = callconduit(repo, b'differential.query', params)
782 drevs = callconduit(repo, b'differential.query', params)
783 # Fill prefetched with the result
783 # Fill prefetched with the result
784 for drev in drevs:
784 for drev in drevs:
785 prefetched[drev[b'phid']] = drev
785 prefetched[drev[b'phid']] = drev
786 prefetched[int(drev[b'id'])] = drev
786 prefetched[int(drev[b'id'])] = drev
787 if key not in prefetched:
787 if key not in prefetched:
788 raise error.Abort(_(b'cannot get Differential Revision %r')
788 raise error.Abort(_(b'cannot get Differential Revision %r')
789 % params)
789 % params)
790 return prefetched[key]
790 return prefetched[key]
791
791
792 def getstack(topdrevids):
792 def getstack(topdrevids):
793 """given a top, get a stack from the bottom, [id] -> [id]"""
793 """given a top, get a stack from the bottom, [id] -> [id]"""
794 visited = set()
794 visited = set()
795 result = []
795 result = []
796 queue = [{b'ids': [i]} for i in topdrevids]
796 queue = [{b'ids': [i]} for i in topdrevids]
797 while queue:
797 while queue:
798 params = queue.pop()
798 params = queue.pop()
799 drev = fetch(params)
799 drev = fetch(params)
800 if drev[b'id'] in visited:
800 if drev[b'id'] in visited:
801 continue
801 continue
802 visited.add(drev[b'id'])
802 visited.add(drev[b'id'])
803 result.append(int(drev[b'id']))
803 result.append(int(drev[b'id']))
804 auxiliary = drev.get(b'auxiliary', {})
804 auxiliary = drev.get(b'auxiliary', {})
805 depends = auxiliary.get(b'phabricator:depends-on', [])
805 depends = auxiliary.get(b'phabricator:depends-on', [])
806 for phid in depends:
806 for phid in depends:
807 queue.append({b'phids': [phid]})
807 queue.append({b'phids': [phid]})
808 result.reverse()
808 result.reverse()
809 return smartset.baseset(result)
809 return smartset.baseset(result)
810
810
811 # Initialize prefetch cache
811 # Initialize prefetch cache
812 prefetched = {} # {id or phid: drev}
812 prefetched = {} # {id or phid: drev}
813
813
814 tree = _parse(spec)
814 tree = _parse(spec)
815 drevs, ancestordrevs = _prefetchdrevs(tree)
815 drevs, ancestordrevs = _prefetchdrevs(tree)
816
816
817 # developer config: phabricator.batchsize
817 # developer config: phabricator.batchsize
818 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
818 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
819
819
820 # Prefetch Differential Revisions in batch
820 # Prefetch Differential Revisions in batch
821 tofetch = set(drevs)
821 tofetch = set(drevs)
822 for r in ancestordrevs:
822 for r in ancestordrevs:
823 tofetch.update(range(max(1, r - batchsize), r + 1))
823 tofetch.update(range(max(1, r - batchsize), r + 1))
824 if drevs:
824 if drevs:
825 fetch({b'ids': list(tofetch)})
825 fetch({b'ids': list(tofetch)})
826 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
826 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
827
827
828 # Walk through the tree, return smartsets
828 # Walk through the tree, return smartsets
829 def walk(tree):
829 def walk(tree):
830 op = tree[0]
830 op = tree[0]
831 if op == b'symbol':
831 if op == b'symbol':
832 drev = _parsedrev(tree[1])
832 drev = _parsedrev(tree[1])
833 if drev:
833 if drev:
834 return smartset.baseset([drev])
834 return smartset.baseset([drev])
835 elif tree[1] in _knownstatusnames:
835 elif tree[1] in _knownstatusnames:
836 drevs = [r for r in validids
836 drevs = [r for r in validids
837 if _getstatusname(prefetched[r]) == tree[1]]
837 if _getstatusname(prefetched[r]) == tree[1]]
838 return smartset.baseset(drevs)
838 return smartset.baseset(drevs)
839 else:
839 else:
840 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
840 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
841 elif op in {b'and_', b'add', b'sub'}:
841 elif op in {b'and_', b'add', b'sub'}:
842 assert len(tree) == 3
842 assert len(tree) == 3
843 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
843 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
844 elif op == b'group':
844 elif op == b'group':
845 return walk(tree[1])
845 return walk(tree[1])
846 elif op == b'ancestors':
846 elif op == b'ancestors':
847 return getstack(walk(tree[1]))
847 return getstack(walk(tree[1]))
848 else:
848 else:
849 raise error.ProgrammingError(b'illegal tree: %r' % tree)
849 raise error.ProgrammingError(b'illegal tree: %r' % tree)
850
850
851 return [prefetched[r] for r in walk(tree)]
851 return [prefetched[r] for r in walk(tree)]
852
852
853 def getdescfromdrev(drev):
853 def getdescfromdrev(drev):
854 """get description (commit message) from "Differential Revision"
854 """get description (commit message) from "Differential Revision"
855
855
856 This is similar to differential.getcommitmessage API. But we only care
856 This is similar to differential.getcommitmessage API. But we only care
857 about limited fields: title, summary, test plan, and URL.
857 about limited fields: title, summary, test plan, and URL.
858 """
858 """
859 title = drev[b'title']
859 title = drev[b'title']
860 summary = drev[b'summary'].rstrip()
860 summary = drev[b'summary'].rstrip()
861 testplan = drev[b'testPlan'].rstrip()
861 testplan = drev[b'testPlan'].rstrip()
862 if testplan:
862 if testplan:
863 testplan = b'Test Plan:\n%s' % testplan
863 testplan = b'Test Plan:\n%s' % testplan
864 uri = b'Differential Revision: %s' % drev[b'uri']
864 uri = b'Differential Revision: %s' % drev[b'uri']
865 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
865 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
866
866
867 def getdiffmeta(diff):
867 def getdiffmeta(diff):
868 """get commit metadata (date, node, user, p1) from a diff object
868 """get commit metadata (date, node, user, p1) from a diff object
869
869
870 The metadata could be "hg:meta", sent by phabsend, like:
870 The metadata could be "hg:meta", sent by phabsend, like:
871
871
872 "properties": {
872 "properties": {
873 "hg:meta": {
873 "hg:meta": {
874 "date": "1499571514 25200",
874 "date": "1499571514 25200",
875 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
875 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "user": "Foo Bar <foo@example.com>",
876 "user": "Foo Bar <foo@example.com>",
877 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
877 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
878 }
878 }
879 }
879 }
880
880
881 Or converted from "local:commits", sent by "arc", like:
881 Or converted from "local:commits", sent by "arc", like:
882
882
883 "properties": {
883 "properties": {
884 "local:commits": {
884 "local:commits": {
885 "98c08acae292b2faf60a279b4189beb6cff1414d": {
885 "98c08acae292b2faf60a279b4189beb6cff1414d": {
886 "author": "Foo Bar",
886 "author": "Foo Bar",
887 "time": 1499546314,
887 "time": 1499546314,
888 "branch": "default",
888 "branch": "default",
889 "tag": "",
889 "tag": "",
890 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
890 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
891 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
891 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "local": "1000",
892 "local": "1000",
893 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
893 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
894 "summary": "...",
894 "summary": "...",
895 "message": "...",
895 "message": "...",
896 "authorEmail": "foo@example.com"
896 "authorEmail": "foo@example.com"
897 }
897 }
898 }
898 }
899 }
899 }
900
900
901 Note: metadata extracted from "local:commits" will lose time zone
901 Note: metadata extracted from "local:commits" will lose time zone
902 information.
902 information.
903 """
903 """
904 props = diff.get(b'properties') or {}
904 props = diff.get(b'properties') or {}
905 meta = props.get(b'hg:meta')
905 meta = props.get(b'hg:meta')
906 if not meta and props.get(b'local:commits'):
906 if not meta and props.get(b'local:commits'):
907 commit = sorted(props[b'local:commits'].values())[0]
907 commit = sorted(props[b'local:commits'].values())[0]
908 meta = {
908 meta = {}
909 b'date': b'%d 0' % commit[b'time'],
909 if b'author' in commit and b'authorEmail' in commit:
910 b'node': commit[b'rev'],
910 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
911 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']),
911 commit[b'authorEmail'])
912 }
912 if b'time' in commit:
913 meta[b'date'] = b'%d 0' % commit[b'time']
914 if b'rev' in commit:
915 meta[b'node'] = commit[b'rev']
913 if len(commit.get(b'parents', ())) >= 1:
916 if len(commit.get(b'parents', ())) >= 1:
914 meta[b'parent'] = commit[b'parents'][0]
917 meta[b'parent'] = commit[b'parents'][0]
915 return meta or {}
918 return meta or {}
916
919
917 def readpatch(repo, drevs, write):
920 def readpatch(repo, drevs, write):
918 """generate plain-text patch readable by 'hg import'
921 """generate plain-text patch readable by 'hg import'
919
922
920 write is usually ui.write. drevs is what "querydrev" returns, results of
923 write is usually ui.write. drevs is what "querydrev" returns, results of
921 "differential.query".
924 "differential.query".
922 """
925 """
923 # Prefetch hg:meta property for all diffs
926 # Prefetch hg:meta property for all diffs
924 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
927 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
925 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
928 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
926
929
927 # Generate patch for each drev
930 # Generate patch for each drev
928 for drev in drevs:
931 for drev in drevs:
929 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
932 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
930
933
931 diffid = max(int(v) for v in drev[b'diffs'])
934 diffid = max(int(v) for v in drev[b'diffs'])
932 body = callconduit(repo, b'differential.getrawdiff',
935 body = callconduit(repo, b'differential.getrawdiff',
933 {b'diffID': diffid})
936 {b'diffID': diffid})
934 desc = getdescfromdrev(drev)
937 desc = getdescfromdrev(drev)
935 header = b'# HG changeset patch\n'
938 header = b'# HG changeset patch\n'
936
939
937 # Try to preserve metadata from hg:meta property. Write hg patch
940 # Try to preserve metadata from hg:meta property. Write hg patch
938 # headers that can be read by the "import" command. See patchheadermap
941 # headers that can be read by the "import" command. See patchheadermap
939 # and extract in mercurial/patch.py for supported headers.
942 # and extract in mercurial/patch.py for supported headers.
940 meta = getdiffmeta(diffs[b'%d' % diffid])
943 meta = getdiffmeta(diffs[b'%d' % diffid])
941 for k in _metanamemap.keys():
944 for k in _metanamemap.keys():
942 if k in meta:
945 if k in meta:
943 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
946 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
944
947
945 content = b'%s%s\n%s' % (header, desc, body)
948 content = b'%s%s\n%s' % (header, desc, body)
946 write(content)
949 write(content)
947
950
948 @vcrcommand(b'phabread',
951 @vcrcommand(b'phabread',
949 [(b'', b'stack', False, _(b'read dependencies'))],
952 [(b'', b'stack', False, _(b'read dependencies'))],
950 _(b'DREVSPEC [OPTIONS]'),
953 _(b'DREVSPEC [OPTIONS]'),
951 helpcategory=command.CATEGORY_IMPORT_EXPORT)
954 helpcategory=command.CATEGORY_IMPORT_EXPORT)
952 def phabread(ui, repo, spec, **opts):
955 def phabread(ui, repo, spec, **opts):
953 """print patches from Phabricator suitable for importing
956 """print patches from Phabricator suitable for importing
954
957
955 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
958 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
956 the number ``123``. It could also have common operators like ``+``, ``-``,
959 the number ``123``. It could also have common operators like ``+``, ``-``,
957 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
960 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
958 select a stack.
961 select a stack.
959
962
960 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
963 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
961 could be used to filter patches by status. For performance reason, they
964 could be used to filter patches by status. For performance reason, they
962 only represent a subset of non-status selections and cannot be used alone.
965 only represent a subset of non-status selections and cannot be used alone.
963
966
964 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
967 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
965 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
968 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
966 stack up to D9.
969 stack up to D9.
967
970
968 If --stack is given, follow dependencies information and read all patches.
971 If --stack is given, follow dependencies information and read all patches.
969 It is equivalent to the ``:`` operator.
972 It is equivalent to the ``:`` operator.
970 """
973 """
971 opts = pycompat.byteskwargs(opts)
974 opts = pycompat.byteskwargs(opts)
972 if opts.get(b'stack'):
975 if opts.get(b'stack'):
973 spec = b':(%s)' % spec
976 spec = b':(%s)' % spec
974 drevs = querydrev(repo, spec)
977 drevs = querydrev(repo, spec)
975 readpatch(repo, drevs, ui.write)
978 readpatch(repo, drevs, ui.write)
976
979
977 @vcrcommand(b'phabupdate',
980 @vcrcommand(b'phabupdate',
978 [(b'', b'accept', False, _(b'accept revisions')),
981 [(b'', b'accept', False, _(b'accept revisions')),
979 (b'', b'reject', False, _(b'reject revisions')),
982 (b'', b'reject', False, _(b'reject revisions')),
980 (b'', b'abandon', False, _(b'abandon revisions')),
983 (b'', b'abandon', False, _(b'abandon revisions')),
981 (b'', b'reclaim', False, _(b'reclaim revisions')),
984 (b'', b'reclaim', False, _(b'reclaim revisions')),
982 (b'm', b'comment', b'', _(b'comment on the last revision')),
985 (b'm', b'comment', b'', _(b'comment on the last revision')),
983 ], _(b'DREVSPEC [OPTIONS]'),
986 ], _(b'DREVSPEC [OPTIONS]'),
984 helpcategory=command.CATEGORY_IMPORT_EXPORT)
987 helpcategory=command.CATEGORY_IMPORT_EXPORT)
985 def phabupdate(ui, repo, spec, **opts):
988 def phabupdate(ui, repo, spec, **opts):
986 """update Differential Revision in batch
989 """update Differential Revision in batch
987
990
988 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
991 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
989 """
992 """
990 opts = pycompat.byteskwargs(opts)
993 opts = pycompat.byteskwargs(opts)
991 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
994 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
992 if len(flags) > 1:
995 if len(flags) > 1:
993 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
996 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
994
997
995 actions = []
998 actions = []
996 for f in flags:
999 for f in flags:
997 actions.append({b'type': f, b'value': b'true'})
1000 actions.append({b'type': f, b'value': b'true'})
998
1001
999 drevs = querydrev(repo, spec)
1002 drevs = querydrev(repo, spec)
1000 for i, drev in enumerate(drevs):
1003 for i, drev in enumerate(drevs):
1001 if i + 1 == len(drevs) and opts.get(b'comment'):
1004 if i + 1 == len(drevs) and opts.get(b'comment'):
1002 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1005 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1003 if actions:
1006 if actions:
1004 params = {b'objectIdentifier': drev[b'phid'],
1007 params = {b'objectIdentifier': drev[b'phid'],
1005 b'transactions': actions}
1008 b'transactions': actions}
1006 callconduit(repo, b'differential.revision.edit', params)
1009 callconduit(repo, b'differential.revision.edit', params)
1007
1010
1008 templatekeyword = registrar.templatekeyword()
1011 templatekeyword = registrar.templatekeyword()
1009
1012
1010 @templatekeyword(b'phabreview', requires={b'ctx'})
1013 @templatekeyword(b'phabreview', requires={b'ctx'})
1011 def template_review(context, mapping):
1014 def template_review(context, mapping):
1012 """:phabreview: Object describing the review for this changeset.
1015 """:phabreview: Object describing the review for this changeset.
1013 Has attributes `url` and `id`.
1016 Has attributes `url` and `id`.
1014 """
1017 """
1015 ctx = context.resource(mapping, b'ctx')
1018 ctx = context.resource(mapping, b'ctx')
1016 m = _differentialrevisiondescre.search(ctx.description())
1019 m = _differentialrevisiondescre.search(ctx.description())
1017 if m:
1020 if m:
1018 return templateutil.hybriddict({
1021 return templateutil.hybriddict({
1019 b'url': m.group(r'url'),
1022 b'url': m.group(r'url'),
1020 b'id': b"D%s" % m.group(r'id'),
1023 b'id': b"D%s" % m.group(r'id'),
1021 })
1024 })
1022 else:
1025 else:
1023 tags = ctx.repo().nodetags(ctx.node())
1026 tags = ctx.repo().nodetags(ctx.node())
1024 for t in tags:
1027 for t in tags:
1025 if _differentialrevisiontagre.match(t):
1028 if _differentialrevisiontagre.match(t):
1026 url = ctx.repo().ui.config(b'phabricator', b'url')
1029 url = ctx.repo().ui.config(b'phabricator', b'url')
1027 if not url.endswith(b'/'):
1030 if not url.endswith(b'/'):
1028 url += b'/'
1031 url += b'/'
1029 url += t
1032 url += t
1030
1033
1031 return templateutil.hybriddict({
1034 return templateutil.hybriddict({
1032 b'url': url,
1035 b'url': url,
1033 b'id': t,
1036 b'id': t,
1034 })
1037 })
1035 return None
1038 return None
General Comments 0
You need to be logged in to leave comments. Login now