##// END OF EJS Templates
phabricator: include commit (node) and parent in the local:commits metadata...
Ian Moody -
r42441:9421d7e1 default
parent child Browse files
Show More
@@ -1,1041 +1,1043 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'branch': ctx.branch(),
387 b'branch': ctx.branch(),
388 b'node': ctx.hex(),
388 b'node': ctx.hex(),
389 b'parent': ctx.p1().hex(),
389 b'parent': ctx.p1().hex(),
390 }),
390 }),
391 }
391 }
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393
393
394 params = {
394 params = {
395 b'diff_id': diff[b'id'],
395 b'diff_id': diff[b'id'],
396 b'name': b'local:commits',
396 b'name': b'local:commits',
397 b'data': templatefilters.json({
397 b'data': templatefilters.json({
398 ctx.hex(): {
398 ctx.hex(): {
399 b'author': stringutil.person(ctx.user()),
399 b'author': stringutil.person(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
401 b'time': int(ctx.date()[0]),
401 b'time': int(ctx.date()[0]),
402 b'commit': ctx.hex(),
403 b'parents': [ctx.p1().hex()],
402 b'branch': ctx.branch(),
404 b'branch': ctx.branch(),
403 },
405 },
404 }),
406 }),
405 }
407 }
406 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
408 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
407
409
408 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
410 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
409 olddiff=None, actions=None):
411 olddiff=None, actions=None):
410 """create or update a Differential Revision
412 """create or update a Differential Revision
411
413
412 If revid is None, create a new Differential Revision, otherwise update
414 If revid is None, create a new Differential Revision, otherwise update
413 revid. If parentrevid is not None, set it as a dependency.
415 revid. If parentrevid is not None, set it as a dependency.
414
416
415 If oldnode is not None, check if the patch content (without commit message
417 If oldnode is not None, check if the patch content (without commit message
416 and metadata) has changed before creating another diff.
418 and metadata) has changed before creating another diff.
417
419
418 If actions is not None, they will be appended to the transaction.
420 If actions is not None, they will be appended to the transaction.
419 """
421 """
420 repo = ctx.repo()
422 repo = ctx.repo()
421 if oldnode:
423 if oldnode:
422 diffopts = mdiff.diffopts(git=True, context=32767)
424 diffopts = mdiff.diffopts(git=True, context=32767)
423 oldctx = repo.unfiltered()[oldnode]
425 oldctx = repo.unfiltered()[oldnode]
424 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
426 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
425 else:
427 else:
426 neednewdiff = True
428 neednewdiff = True
427
429
428 transactions = []
430 transactions = []
429 if neednewdiff:
431 if neednewdiff:
430 diff = creatediff(ctx)
432 diff = creatediff(ctx)
431 transactions.append({b'type': b'update', b'value': diff[b'phid']})
433 transactions.append({b'type': b'update', b'value': diff[b'phid']})
432 else:
434 else:
433 # Even if we don't need to upload a new diff because the patch content
435 # Even if we don't need to upload a new diff because the patch content
434 # does not change. We might still need to update its metadata so
436 # does not change. We might still need to update its metadata so
435 # pushers could know the correct node metadata.
437 # pushers could know the correct node metadata.
436 assert olddiff
438 assert olddiff
437 diff = olddiff
439 diff = olddiff
438 writediffproperties(ctx, diff)
440 writediffproperties(ctx, diff)
439
441
440 # Use a temporary summary to set dependency. There might be better ways but
442 # Use a temporary summary to set dependency. There might be better ways but
441 # I cannot find them for now. But do not do that if we are updating an
443 # I cannot find them for now. But do not do that if we are updating an
442 # existing revision (revid is not None) since that introduces visible
444 # existing revision (revid is not None) since that introduces visible
443 # churns (someone edited "Summary" twice) on the web page.
445 # churns (someone edited "Summary" twice) on the web page.
444 if parentrevid and revid is None:
446 if parentrevid and revid is None:
445 summary = b'Depends on D%d' % parentrevid
447 summary = b'Depends on D%d' % parentrevid
446 transactions += [{b'type': b'summary', b'value': summary},
448 transactions += [{b'type': b'summary', b'value': summary},
447 {b'type': b'summary', b'value': b' '}]
449 {b'type': b'summary', b'value': b' '}]
448
450
449 if actions:
451 if actions:
450 transactions += actions
452 transactions += actions
451
453
452 # Parse commit message and update related fields.
454 # Parse commit message and update related fields.
453 desc = ctx.description()
455 desc = ctx.description()
454 info = callconduit(repo, b'differential.parsecommitmessage',
456 info = callconduit(repo, b'differential.parsecommitmessage',
455 {b'corpus': desc})
457 {b'corpus': desc})
456 for k, v in info[b'fields'].items():
458 for k, v in info[b'fields'].items():
457 if k in [b'title', b'summary', b'testPlan']:
459 if k in [b'title', b'summary', b'testPlan']:
458 transactions.append({b'type': k, b'value': v})
460 transactions.append({b'type': k, b'value': v})
459
461
460 params = {b'transactions': transactions}
462 params = {b'transactions': transactions}
461 if revid is not None:
463 if revid is not None:
462 # Update an existing Differential Revision
464 # Update an existing Differential Revision
463 params[b'objectIdentifier'] = revid
465 params[b'objectIdentifier'] = revid
464
466
465 revision = callconduit(repo, b'differential.revision.edit', params)
467 revision = callconduit(repo, b'differential.revision.edit', params)
466 if not revision:
468 if not revision:
467 raise error.Abort(_(b'cannot create revision for %s') % ctx)
469 raise error.Abort(_(b'cannot create revision for %s') % ctx)
468
470
469 return revision, diff
471 return revision, diff
470
472
471 def userphids(repo, names):
473 def userphids(repo, names):
472 """convert user names to PHIDs"""
474 """convert user names to PHIDs"""
473 names = [name.lower() for name in names]
475 names = [name.lower() for name in names]
474 query = {b'constraints': {b'usernames': names}}
476 query = {b'constraints': {b'usernames': names}}
475 result = callconduit(repo, b'user.search', query)
477 result = callconduit(repo, b'user.search', query)
476 # username not found is not an error of the API. So check if we have missed
478 # username not found is not an error of the API. So check if we have missed
477 # some names here.
479 # some names here.
478 data = result[b'data']
480 data = result[b'data']
479 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
481 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
480 unresolved = set(names) - resolved
482 unresolved = set(names) - resolved
481 if unresolved:
483 if unresolved:
482 raise error.Abort(_(b'unknown username: %s')
484 raise error.Abort(_(b'unknown username: %s')
483 % b' '.join(sorted(unresolved)))
485 % b' '.join(sorted(unresolved)))
484 return [entry[b'phid'] for entry in data]
486 return [entry[b'phid'] for entry in data]
485
487
486 @vcrcommand(b'phabsend',
488 @vcrcommand(b'phabsend',
487 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
489 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
488 (b'', b'amend', True, _(b'update commit messages')),
490 (b'', b'amend', True, _(b'update commit messages')),
489 (b'', b'reviewer', [], _(b'specify reviewers')),
491 (b'', b'reviewer', [], _(b'specify reviewers')),
490 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
492 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
491 _(b'REV [OPTIONS]'),
493 _(b'REV [OPTIONS]'),
492 helpcategory=command.CATEGORY_IMPORT_EXPORT)
494 helpcategory=command.CATEGORY_IMPORT_EXPORT)
493 def phabsend(ui, repo, *revs, **opts):
495 def phabsend(ui, repo, *revs, **opts):
494 """upload changesets to Phabricator
496 """upload changesets to Phabricator
495
497
496 If there are multiple revisions specified, they will be send as a stack
498 If there are multiple revisions specified, they will be send as a stack
497 with a linear dependencies relationship using the order specified by the
499 with a linear dependencies relationship using the order specified by the
498 revset.
500 revset.
499
501
500 For the first time uploading changesets, local tags will be created to
502 For the first time uploading changesets, local tags will be created to
501 maintain the association. After the first time, phabsend will check
503 maintain the association. After the first time, phabsend will check
502 obsstore and tags information so it can figure out whether to update an
504 obsstore and tags information so it can figure out whether to update an
503 existing Differential Revision, or create a new one.
505 existing Differential Revision, or create a new one.
504
506
505 If --amend is set, update commit messages so they have the
507 If --amend is set, update commit messages so they have the
506 ``Differential Revision`` URL, remove related tags. This is similar to what
508 ``Differential Revision`` URL, remove related tags. This is similar to what
507 arcanist will do, and is more desired in author-push workflows. Otherwise,
509 arcanist will do, and is more desired in author-push workflows. Otherwise,
508 use local tags to record the ``Differential Revision`` association.
510 use local tags to record the ``Differential Revision`` association.
509
511
510 The --confirm option lets you confirm changesets before sending them. You
512 The --confirm option lets you confirm changesets before sending them. You
511 can also add following to your configuration file to make it default
513 can also add following to your configuration file to make it default
512 behaviour::
514 behaviour::
513
515
514 [phabsend]
516 [phabsend]
515 confirm = true
517 confirm = true
516
518
517 phabsend will check obsstore and the above association to decide whether to
519 phabsend will check obsstore and the above association to decide whether to
518 update an existing Differential Revision, or create a new one.
520 update an existing Differential Revision, or create a new one.
519 """
521 """
520 opts = pycompat.byteskwargs(opts)
522 opts = pycompat.byteskwargs(opts)
521 revs = list(revs) + opts.get(b'rev', [])
523 revs = list(revs) + opts.get(b'rev', [])
522 revs = scmutil.revrange(repo, revs)
524 revs = scmutil.revrange(repo, revs)
523
525
524 if not revs:
526 if not revs:
525 raise error.Abort(_(b'phabsend requires at least one changeset'))
527 raise error.Abort(_(b'phabsend requires at least one changeset'))
526 if opts.get(b'amend'):
528 if opts.get(b'amend'):
527 cmdutil.checkunfinished(repo)
529 cmdutil.checkunfinished(repo)
528
530
529 # {newnode: (oldnode, olddiff, olddrev}
531 # {newnode: (oldnode, olddiff, olddrev}
530 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
532 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
531
533
532 confirm = ui.configbool(b'phabsend', b'confirm')
534 confirm = ui.configbool(b'phabsend', b'confirm')
533 confirm |= bool(opts.get(b'confirm'))
535 confirm |= bool(opts.get(b'confirm'))
534 if confirm:
536 if confirm:
535 confirmed = _confirmbeforesend(repo, revs, oldmap)
537 confirmed = _confirmbeforesend(repo, revs, oldmap)
536 if not confirmed:
538 if not confirmed:
537 raise error.Abort(_(b'phabsend cancelled'))
539 raise error.Abort(_(b'phabsend cancelled'))
538
540
539 actions = []
541 actions = []
540 reviewers = opts.get(b'reviewer', [])
542 reviewers = opts.get(b'reviewer', [])
541 if reviewers:
543 if reviewers:
542 phids = userphids(repo, reviewers)
544 phids = userphids(repo, reviewers)
543 actions.append({b'type': b'reviewers.add', b'value': phids})
545 actions.append({b'type': b'reviewers.add', b'value': phids})
544
546
545 drevids = [] # [int]
547 drevids = [] # [int]
546 diffmap = {} # {newnode: diff}
548 diffmap = {} # {newnode: diff}
547
549
548 # Send patches one by one so we know their Differential Revision IDs and
550 # Send patches one by one so we know their Differential Revision IDs and
549 # can provide dependency relationship
551 # can provide dependency relationship
550 lastrevid = None
552 lastrevid = None
551 for rev in revs:
553 for rev in revs:
552 ui.debug(b'sending rev %d\n' % rev)
554 ui.debug(b'sending rev %d\n' % rev)
553 ctx = repo[rev]
555 ctx = repo[rev]
554
556
555 # Get Differential Revision ID
557 # Get Differential Revision ID
556 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
558 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
557 if oldnode != ctx.node() or opts.get(b'amend'):
559 if oldnode != ctx.node() or opts.get(b'amend'):
558 # Create or update Differential Revision
560 # Create or update Differential Revision
559 revision, diff = createdifferentialrevision(
561 revision, diff = createdifferentialrevision(
560 ctx, revid, lastrevid, oldnode, olddiff, actions)
562 ctx, revid, lastrevid, oldnode, olddiff, actions)
561 diffmap[ctx.node()] = diff
563 diffmap[ctx.node()] = diff
562 newrevid = int(revision[b'object'][b'id'])
564 newrevid = int(revision[b'object'][b'id'])
563 if revid:
565 if revid:
564 action = b'updated'
566 action = b'updated'
565 else:
567 else:
566 action = b'created'
568 action = b'created'
567
569
568 # Create a local tag to note the association, if commit message
570 # Create a local tag to note the association, if commit message
569 # does not have it already
571 # does not have it already
570 m = _differentialrevisiondescre.search(ctx.description())
572 m = _differentialrevisiondescre.search(ctx.description())
571 if not m or int(m.group(r'id')) != newrevid:
573 if not m or int(m.group(r'id')) != newrevid:
572 tagname = b'D%d' % newrevid
574 tagname = b'D%d' % newrevid
573 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
575 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
574 date=None, local=True)
576 date=None, local=True)
575 else:
577 else:
576 # Nothing changed. But still set "newrevid" so the next revision
578 # Nothing changed. But still set "newrevid" so the next revision
577 # could depend on this one.
579 # could depend on this one.
578 newrevid = revid
580 newrevid = revid
579 action = b'skipped'
581 action = b'skipped'
580
582
581 actiondesc = ui.label(
583 actiondesc = ui.label(
582 {b'created': _(b'created'),
584 {b'created': _(b'created'),
583 b'skipped': _(b'skipped'),
585 b'skipped': _(b'skipped'),
584 b'updated': _(b'updated')}[action],
586 b'updated': _(b'updated')}[action],
585 b'phabricator.action.%s' % action)
587 b'phabricator.action.%s' % action)
586 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
588 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
587 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
589 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
588 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
590 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
589 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
591 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
590 desc))
592 desc))
591 drevids.append(newrevid)
593 drevids.append(newrevid)
592 lastrevid = newrevid
594 lastrevid = newrevid
593
595
594 # Update commit messages and remove tags
596 # Update commit messages and remove tags
595 if opts.get(b'amend'):
597 if opts.get(b'amend'):
596 unfi = repo.unfiltered()
598 unfi = repo.unfiltered()
597 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
599 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
598 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
600 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
599 wnode = unfi[b'.'].node()
601 wnode = unfi[b'.'].node()
600 mapping = {} # {oldnode: [newnode]}
602 mapping = {} # {oldnode: [newnode]}
601 for i, rev in enumerate(revs):
603 for i, rev in enumerate(revs):
602 old = unfi[rev]
604 old = unfi[rev]
603 drevid = drevids[i]
605 drevid = drevids[i]
604 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
606 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
605 newdesc = getdescfromdrev(drev)
607 newdesc = getdescfromdrev(drev)
606 # Make sure commit message contain "Differential Revision"
608 # Make sure commit message contain "Differential Revision"
607 if old.description() != newdesc:
609 if old.description() != newdesc:
608 if old.phase() == phases.public:
610 if old.phase() == phases.public:
609 ui.warn(_("warning: not updating public commit %s\n")
611 ui.warn(_("warning: not updating public commit %s\n")
610 % scmutil.formatchangeid(old))
612 % scmutil.formatchangeid(old))
611 continue
613 continue
612 parents = [
614 parents = [
613 mapping.get(old.p1().node(), (old.p1(),))[0],
615 mapping.get(old.p1().node(), (old.p1(),))[0],
614 mapping.get(old.p2().node(), (old.p2(),))[0],
616 mapping.get(old.p2().node(), (old.p2(),))[0],
615 ]
617 ]
616 new = context.metadataonlyctx(
618 new = context.metadataonlyctx(
617 repo, old, parents=parents, text=newdesc,
619 repo, old, parents=parents, text=newdesc,
618 user=old.user(), date=old.date(), extra=old.extra())
620 user=old.user(), date=old.date(), extra=old.extra())
619
621
620 newnode = new.commit()
622 newnode = new.commit()
621
623
622 mapping[old.node()] = [newnode]
624 mapping[old.node()] = [newnode]
623 # Update diff property
625 # Update diff property
624 writediffproperties(unfi[newnode], diffmap[old.node()])
626 writediffproperties(unfi[newnode], diffmap[old.node()])
625 # Remove local tags since it's no longer necessary
627 # Remove local tags since it's no longer necessary
626 tagname = b'D%d' % drevid
628 tagname = b'D%d' % drevid
627 if tagname in repo.tags():
629 if tagname in repo.tags():
628 tags.tag(repo, tagname, nullid, message=None, user=None,
630 tags.tag(repo, tagname, nullid, message=None, user=None,
629 date=None, local=True)
631 date=None, local=True)
630 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
632 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
631 if wnode in mapping:
633 if wnode in mapping:
632 unfi.setparents(mapping[wnode][0])
634 unfi.setparents(mapping[wnode][0])
633
635
634 # Map from "hg:meta" keys to header understood by "hg import". The order is
636 # Map from "hg:meta" keys to header understood by "hg import". The order is
635 # consistent with "hg export" output.
637 # consistent with "hg export" output.
636 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
638 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
637 (b'node', b'Node ID'), (b'parent', b'Parent ')])
639 (b'node', b'Node ID'), (b'parent', b'Parent ')])
638
640
639 def _confirmbeforesend(repo, revs, oldmap):
641 def _confirmbeforesend(repo, revs, oldmap):
640 url, token = readurltoken(repo)
642 url, token = readurltoken(repo)
641 ui = repo.ui
643 ui = repo.ui
642 for rev in revs:
644 for rev in revs:
643 ctx = repo[rev]
645 ctx = repo[rev]
644 desc = ctx.description().splitlines()[0]
646 desc = ctx.description().splitlines()[0]
645 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
647 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
646 if drevid:
648 if drevid:
647 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
649 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
648 else:
650 else:
649 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
651 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
650
652
651 ui.write(_(b'%s - %s: %s\n')
653 ui.write(_(b'%s - %s: %s\n')
652 % (drevdesc,
654 % (drevdesc,
653 ui.label(bytes(ctx), b'phabricator.node'),
655 ui.label(bytes(ctx), b'phabricator.node'),
654 ui.label(desc, b'phabricator.desc')))
656 ui.label(desc, b'phabricator.desc')))
655
657
656 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
658 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
657 b'$$ &Yes $$ &No') % url):
659 b'$$ &Yes $$ &No') % url):
658 return False
660 return False
659
661
660 return True
662 return True
661
663
662 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
664 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
663 b'abandoned'}
665 b'abandoned'}
664
666
665 def _getstatusname(drev):
667 def _getstatusname(drev):
666 """get normalized status name from a Differential Revision"""
668 """get normalized status name from a Differential Revision"""
667 return drev[b'statusName'].replace(b' ', b'').lower()
669 return drev[b'statusName'].replace(b' ', b'').lower()
668
670
669 # Small language to specify differential revisions. Support symbols: (), :X,
671 # Small language to specify differential revisions. Support symbols: (), :X,
670 # +, and -.
672 # +, and -.
671
673
672 _elements = {
674 _elements = {
673 # token-type: binding-strength, primary, prefix, infix, suffix
675 # token-type: binding-strength, primary, prefix, infix, suffix
674 b'(': (12, None, (b'group', 1, b')'), None, None),
676 b'(': (12, None, (b'group', 1, b')'), None, None),
675 b':': (8, None, (b'ancestors', 8), None, None),
677 b':': (8, None, (b'ancestors', 8), None, None),
676 b'&': (5, None, None, (b'and_', 5), None),
678 b'&': (5, None, None, (b'and_', 5), None),
677 b'+': (4, None, None, (b'add', 4), None),
679 b'+': (4, None, None, (b'add', 4), None),
678 b'-': (4, None, None, (b'sub', 4), None),
680 b'-': (4, None, None, (b'sub', 4), None),
679 b')': (0, None, None, None, None),
681 b')': (0, None, None, None, None),
680 b'symbol': (0, b'symbol', None, None, None),
682 b'symbol': (0, b'symbol', None, None, None),
681 b'end': (0, None, None, None, None),
683 b'end': (0, None, None, None, None),
682 }
684 }
683
685
684 def _tokenize(text):
686 def _tokenize(text):
685 view = memoryview(text) # zero-copy slice
687 view = memoryview(text) # zero-copy slice
686 special = b'():+-& '
688 special = b'():+-& '
687 pos = 0
689 pos = 0
688 length = len(text)
690 length = len(text)
689 while pos < length:
691 while pos < length:
690 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
692 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
691 pycompat.iterbytestr(view[pos:])))
693 pycompat.iterbytestr(view[pos:])))
692 if symbol:
694 if symbol:
693 yield (b'symbol', symbol, pos)
695 yield (b'symbol', symbol, pos)
694 pos += len(symbol)
696 pos += len(symbol)
695 else: # special char, ignore space
697 else: # special char, ignore space
696 if text[pos] != b' ':
698 if text[pos] != b' ':
697 yield (text[pos], None, pos)
699 yield (text[pos], None, pos)
698 pos += 1
700 pos += 1
699 yield (b'end', None, pos)
701 yield (b'end', None, pos)
700
702
701 def _parse(text):
703 def _parse(text):
702 tree, pos = parser.parser(_elements).parse(_tokenize(text))
704 tree, pos = parser.parser(_elements).parse(_tokenize(text))
703 if pos != len(text):
705 if pos != len(text):
704 raise error.ParseError(b'invalid token', pos)
706 raise error.ParseError(b'invalid token', pos)
705 return tree
707 return tree
706
708
707 def _parsedrev(symbol):
709 def _parsedrev(symbol):
708 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
710 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
709 if symbol.startswith(b'D') and symbol[1:].isdigit():
711 if symbol.startswith(b'D') and symbol[1:].isdigit():
710 return int(symbol[1:])
712 return int(symbol[1:])
711 if symbol.isdigit():
713 if symbol.isdigit():
712 return int(symbol)
714 return int(symbol)
713
715
714 def _prefetchdrevs(tree):
716 def _prefetchdrevs(tree):
715 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
717 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
716 drevs = set()
718 drevs = set()
717 ancestordrevs = set()
719 ancestordrevs = set()
718 op = tree[0]
720 op = tree[0]
719 if op == b'symbol':
721 if op == b'symbol':
720 r = _parsedrev(tree[1])
722 r = _parsedrev(tree[1])
721 if r:
723 if r:
722 drevs.add(r)
724 drevs.add(r)
723 elif op == b'ancestors':
725 elif op == b'ancestors':
724 r, a = _prefetchdrevs(tree[1])
726 r, a = _prefetchdrevs(tree[1])
725 drevs.update(r)
727 drevs.update(r)
726 ancestordrevs.update(r)
728 ancestordrevs.update(r)
727 ancestordrevs.update(a)
729 ancestordrevs.update(a)
728 else:
730 else:
729 for t in tree[1:]:
731 for t in tree[1:]:
730 r, a = _prefetchdrevs(t)
732 r, a = _prefetchdrevs(t)
731 drevs.update(r)
733 drevs.update(r)
732 ancestordrevs.update(a)
734 ancestordrevs.update(a)
733 return drevs, ancestordrevs
735 return drevs, ancestordrevs
734
736
735 def querydrev(repo, spec):
737 def querydrev(repo, spec):
736 """return a list of "Differential Revision" dicts
738 """return a list of "Differential Revision" dicts
737
739
738 spec is a string using a simple query language, see docstring in phabread
740 spec is a string using a simple query language, see docstring in phabread
739 for details.
741 for details.
740
742
741 A "Differential Revision dict" looks like:
743 A "Differential Revision dict" looks like:
742
744
743 {
745 {
744 "id": "2",
746 "id": "2",
745 "phid": "PHID-DREV-672qvysjcczopag46qty",
747 "phid": "PHID-DREV-672qvysjcczopag46qty",
746 "title": "example",
748 "title": "example",
747 "uri": "https://phab.example.com/D2",
749 "uri": "https://phab.example.com/D2",
748 "dateCreated": "1499181406",
750 "dateCreated": "1499181406",
749 "dateModified": "1499182103",
751 "dateModified": "1499182103",
750 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
752 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
751 "status": "0",
753 "status": "0",
752 "statusName": "Needs Review",
754 "statusName": "Needs Review",
753 "properties": [],
755 "properties": [],
754 "branch": null,
756 "branch": null,
755 "summary": "",
757 "summary": "",
756 "testPlan": "",
758 "testPlan": "",
757 "lineCount": "2",
759 "lineCount": "2",
758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
760 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
759 "diffs": [
761 "diffs": [
760 "3",
762 "3",
761 "4",
763 "4",
762 ],
764 ],
763 "commits": [],
765 "commits": [],
764 "reviewers": [],
766 "reviewers": [],
765 "ccs": [],
767 "ccs": [],
766 "hashes": [],
768 "hashes": [],
767 "auxiliary": {
769 "auxiliary": {
768 "phabricator:projects": [],
770 "phabricator:projects": [],
769 "phabricator:depends-on": [
771 "phabricator:depends-on": [
770 "PHID-DREV-gbapp366kutjebt7agcd"
772 "PHID-DREV-gbapp366kutjebt7agcd"
771 ]
773 ]
772 },
774 },
773 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
775 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
774 "sourcePath": null
776 "sourcePath": null
775 }
777 }
776 """
778 """
777 def fetch(params):
779 def fetch(params):
778 """params -> single drev or None"""
780 """params -> single drev or None"""
779 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
781 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
780 if key in prefetched:
782 if key in prefetched:
781 return prefetched[key]
783 return prefetched[key]
782 drevs = callconduit(repo, b'differential.query', params)
784 drevs = callconduit(repo, b'differential.query', params)
783 # Fill prefetched with the result
785 # Fill prefetched with the result
784 for drev in drevs:
786 for drev in drevs:
785 prefetched[drev[b'phid']] = drev
787 prefetched[drev[b'phid']] = drev
786 prefetched[int(drev[b'id'])] = drev
788 prefetched[int(drev[b'id'])] = drev
787 if key not in prefetched:
789 if key not in prefetched:
788 raise error.Abort(_(b'cannot get Differential Revision %r')
790 raise error.Abort(_(b'cannot get Differential Revision %r')
789 % params)
791 % params)
790 return prefetched[key]
792 return prefetched[key]
791
793
792 def getstack(topdrevids):
794 def getstack(topdrevids):
793 """given a top, get a stack from the bottom, [id] -> [id]"""
795 """given a top, get a stack from the bottom, [id] -> [id]"""
794 visited = set()
796 visited = set()
795 result = []
797 result = []
796 queue = [{b'ids': [i]} for i in topdrevids]
798 queue = [{b'ids': [i]} for i in topdrevids]
797 while queue:
799 while queue:
798 params = queue.pop()
800 params = queue.pop()
799 drev = fetch(params)
801 drev = fetch(params)
800 if drev[b'id'] in visited:
802 if drev[b'id'] in visited:
801 continue
803 continue
802 visited.add(drev[b'id'])
804 visited.add(drev[b'id'])
803 result.append(int(drev[b'id']))
805 result.append(int(drev[b'id']))
804 auxiliary = drev.get(b'auxiliary', {})
806 auxiliary = drev.get(b'auxiliary', {})
805 depends = auxiliary.get(b'phabricator:depends-on', [])
807 depends = auxiliary.get(b'phabricator:depends-on', [])
806 for phid in depends:
808 for phid in depends:
807 queue.append({b'phids': [phid]})
809 queue.append({b'phids': [phid]})
808 result.reverse()
810 result.reverse()
809 return smartset.baseset(result)
811 return smartset.baseset(result)
810
812
811 # Initialize prefetch cache
813 # Initialize prefetch cache
812 prefetched = {} # {id or phid: drev}
814 prefetched = {} # {id or phid: drev}
813
815
814 tree = _parse(spec)
816 tree = _parse(spec)
815 drevs, ancestordrevs = _prefetchdrevs(tree)
817 drevs, ancestordrevs = _prefetchdrevs(tree)
816
818
817 # developer config: phabricator.batchsize
819 # developer config: phabricator.batchsize
818 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
820 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
819
821
820 # Prefetch Differential Revisions in batch
822 # Prefetch Differential Revisions in batch
821 tofetch = set(drevs)
823 tofetch = set(drevs)
822 for r in ancestordrevs:
824 for r in ancestordrevs:
823 tofetch.update(range(max(1, r - batchsize), r + 1))
825 tofetch.update(range(max(1, r - batchsize), r + 1))
824 if drevs:
826 if drevs:
825 fetch({b'ids': list(tofetch)})
827 fetch({b'ids': list(tofetch)})
826 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
828 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
827
829
828 # Walk through the tree, return smartsets
830 # Walk through the tree, return smartsets
829 def walk(tree):
831 def walk(tree):
830 op = tree[0]
832 op = tree[0]
831 if op == b'symbol':
833 if op == b'symbol':
832 drev = _parsedrev(tree[1])
834 drev = _parsedrev(tree[1])
833 if drev:
835 if drev:
834 return smartset.baseset([drev])
836 return smartset.baseset([drev])
835 elif tree[1] in _knownstatusnames:
837 elif tree[1] in _knownstatusnames:
836 drevs = [r for r in validids
838 drevs = [r for r in validids
837 if _getstatusname(prefetched[r]) == tree[1]]
839 if _getstatusname(prefetched[r]) == tree[1]]
838 return smartset.baseset(drevs)
840 return smartset.baseset(drevs)
839 else:
841 else:
840 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
842 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
841 elif op in {b'and_', b'add', b'sub'}:
843 elif op in {b'and_', b'add', b'sub'}:
842 assert len(tree) == 3
844 assert len(tree) == 3
843 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
845 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
844 elif op == b'group':
846 elif op == b'group':
845 return walk(tree[1])
847 return walk(tree[1])
846 elif op == b'ancestors':
848 elif op == b'ancestors':
847 return getstack(walk(tree[1]))
849 return getstack(walk(tree[1]))
848 else:
850 else:
849 raise error.ProgrammingError(b'illegal tree: %r' % tree)
851 raise error.ProgrammingError(b'illegal tree: %r' % tree)
850
852
851 return [prefetched[r] for r in walk(tree)]
853 return [prefetched[r] for r in walk(tree)]
852
854
853 def getdescfromdrev(drev):
855 def getdescfromdrev(drev):
854 """get description (commit message) from "Differential Revision"
856 """get description (commit message) from "Differential Revision"
855
857
856 This is similar to differential.getcommitmessage API. But we only care
858 This is similar to differential.getcommitmessage API. But we only care
857 about limited fields: title, summary, test plan, and URL.
859 about limited fields: title, summary, test plan, and URL.
858 """
860 """
859 title = drev[b'title']
861 title = drev[b'title']
860 summary = drev[b'summary'].rstrip()
862 summary = drev[b'summary'].rstrip()
861 testplan = drev[b'testPlan'].rstrip()
863 testplan = drev[b'testPlan'].rstrip()
862 if testplan:
864 if testplan:
863 testplan = b'Test Plan:\n%s' % testplan
865 testplan = b'Test Plan:\n%s' % testplan
864 uri = b'Differential Revision: %s' % drev[b'uri']
866 uri = b'Differential Revision: %s' % drev[b'uri']
865 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
867 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
866
868
867 def getdiffmeta(diff):
869 def getdiffmeta(diff):
868 """get commit metadata (date, node, user, p1) from a diff object
870 """get commit metadata (date, node, user, p1) from a diff object
869
871
870 The metadata could be "hg:meta", sent by phabsend, like:
872 The metadata could be "hg:meta", sent by phabsend, like:
871
873
872 "properties": {
874 "properties": {
873 "hg:meta": {
875 "hg:meta": {
874 "date": "1499571514 25200",
876 "date": "1499571514 25200",
875 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
876 "user": "Foo Bar <foo@example.com>",
878 "user": "Foo Bar <foo@example.com>",
877 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
879 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
878 }
880 }
879 }
881 }
880
882
881 Or converted from "local:commits", sent by "arc", like:
883 Or converted from "local:commits", sent by "arc", like:
882
884
883 "properties": {
885 "properties": {
884 "local:commits": {
886 "local:commits": {
885 "98c08acae292b2faf60a279b4189beb6cff1414d": {
887 "98c08acae292b2faf60a279b4189beb6cff1414d": {
886 "author": "Foo Bar",
888 "author": "Foo Bar",
887 "time": 1499546314,
889 "time": 1499546314,
888 "branch": "default",
890 "branch": "default",
889 "tag": "",
891 "tag": "",
890 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
891 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
893 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "local": "1000",
894 "local": "1000",
893 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
895 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
894 "summary": "...",
896 "summary": "...",
895 "message": "...",
897 "message": "...",
896 "authorEmail": "foo@example.com"
898 "authorEmail": "foo@example.com"
897 }
899 }
898 }
900 }
899 }
901 }
900
902
901 Note: metadata extracted from "local:commits" will lose time zone
903 Note: metadata extracted from "local:commits" will lose time zone
902 information.
904 information.
903 """
905 """
904 props = diff.get(b'properties') or {}
906 props = diff.get(b'properties') or {}
905 meta = props.get(b'hg:meta')
907 meta = props.get(b'hg:meta')
906 if not meta and props.get(b'local:commits'):
908 if not meta and props.get(b'local:commits'):
907 commit = sorted(props[b'local:commits'].values())[0]
909 commit = sorted(props[b'local:commits'].values())[0]
908 meta = {}
910 meta = {}
909 if b'author' in commit and b'authorEmail' in commit:
911 if b'author' in commit and b'authorEmail' in commit:
910 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
912 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
911 commit[b'authorEmail'])
913 commit[b'authorEmail'])
912 if b'time' in commit:
914 if b'time' in commit:
913 meta[b'date'] = b'%d 0' % commit[b'time']
915 meta[b'date'] = b'%d 0' % commit[b'time']
914 if b'branch' in commit:
916 if b'branch' in commit:
915 meta[b'branch'] = commit[b'branch']
917 meta[b'branch'] = commit[b'branch']
916 node = commit.get(b'commit', commit.get(b'rev'))
918 node = commit.get(b'commit', commit.get(b'rev'))
917 if node:
919 if node:
918 meta[b'node'] = node
920 meta[b'node'] = node
919 if len(commit.get(b'parents', ())) >= 1:
921 if len(commit.get(b'parents', ())) >= 1:
920 meta[b'parent'] = commit[b'parents'][0]
922 meta[b'parent'] = commit[b'parents'][0]
921 return meta or {}
923 return meta or {}
922
924
923 def readpatch(repo, drevs, write):
925 def readpatch(repo, drevs, write):
924 """generate plain-text patch readable by 'hg import'
926 """generate plain-text patch readable by 'hg import'
925
927
926 write is usually ui.write. drevs is what "querydrev" returns, results of
928 write is usually ui.write. drevs is what "querydrev" returns, results of
927 "differential.query".
929 "differential.query".
928 """
930 """
929 # Prefetch hg:meta property for all diffs
931 # Prefetch hg:meta property for all diffs
930 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
932 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
931 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
933 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
932
934
933 # Generate patch for each drev
935 # Generate patch for each drev
934 for drev in drevs:
936 for drev in drevs:
935 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
937 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
936
938
937 diffid = max(int(v) for v in drev[b'diffs'])
939 diffid = max(int(v) for v in drev[b'diffs'])
938 body = callconduit(repo, b'differential.getrawdiff',
940 body = callconduit(repo, b'differential.getrawdiff',
939 {b'diffID': diffid})
941 {b'diffID': diffid})
940 desc = getdescfromdrev(drev)
942 desc = getdescfromdrev(drev)
941 header = b'# HG changeset patch\n'
943 header = b'# HG changeset patch\n'
942
944
943 # Try to preserve metadata from hg:meta property. Write hg patch
945 # Try to preserve metadata from hg:meta property. Write hg patch
944 # headers that can be read by the "import" command. See patchheadermap
946 # headers that can be read by the "import" command. See patchheadermap
945 # and extract in mercurial/patch.py for supported headers.
947 # and extract in mercurial/patch.py for supported headers.
946 meta = getdiffmeta(diffs[b'%d' % diffid])
948 meta = getdiffmeta(diffs[b'%d' % diffid])
947 for k in _metanamemap.keys():
949 for k in _metanamemap.keys():
948 if k in meta:
950 if k in meta:
949 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
951 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
950
952
951 content = b'%s%s\n%s' % (header, desc, body)
953 content = b'%s%s\n%s' % (header, desc, body)
952 write(content)
954 write(content)
953
955
954 @vcrcommand(b'phabread',
956 @vcrcommand(b'phabread',
955 [(b'', b'stack', False, _(b'read dependencies'))],
957 [(b'', b'stack', False, _(b'read dependencies'))],
956 _(b'DREVSPEC [OPTIONS]'),
958 _(b'DREVSPEC [OPTIONS]'),
957 helpcategory=command.CATEGORY_IMPORT_EXPORT)
959 helpcategory=command.CATEGORY_IMPORT_EXPORT)
958 def phabread(ui, repo, spec, **opts):
960 def phabread(ui, repo, spec, **opts):
959 """print patches from Phabricator suitable for importing
961 """print patches from Phabricator suitable for importing
960
962
961 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
963 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
962 the number ``123``. It could also have common operators like ``+``, ``-``,
964 the number ``123``. It could also have common operators like ``+``, ``-``,
963 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
965 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
964 select a stack.
966 select a stack.
965
967
966 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
968 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
967 could be used to filter patches by status. For performance reason, they
969 could be used to filter patches by status. For performance reason, they
968 only represent a subset of non-status selections and cannot be used alone.
970 only represent a subset of non-status selections and cannot be used alone.
969
971
970 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
972 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
971 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
973 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
972 stack up to D9.
974 stack up to D9.
973
975
974 If --stack is given, follow dependencies information and read all patches.
976 If --stack is given, follow dependencies information and read all patches.
975 It is equivalent to the ``:`` operator.
977 It is equivalent to the ``:`` operator.
976 """
978 """
977 opts = pycompat.byteskwargs(opts)
979 opts = pycompat.byteskwargs(opts)
978 if opts.get(b'stack'):
980 if opts.get(b'stack'):
979 spec = b':(%s)' % spec
981 spec = b':(%s)' % spec
980 drevs = querydrev(repo, spec)
982 drevs = querydrev(repo, spec)
981 readpatch(repo, drevs, ui.write)
983 readpatch(repo, drevs, ui.write)
982
984
983 @vcrcommand(b'phabupdate',
985 @vcrcommand(b'phabupdate',
984 [(b'', b'accept', False, _(b'accept revisions')),
986 [(b'', b'accept', False, _(b'accept revisions')),
985 (b'', b'reject', False, _(b'reject revisions')),
987 (b'', b'reject', False, _(b'reject revisions')),
986 (b'', b'abandon', False, _(b'abandon revisions')),
988 (b'', b'abandon', False, _(b'abandon revisions')),
987 (b'', b'reclaim', False, _(b'reclaim revisions')),
989 (b'', b'reclaim', False, _(b'reclaim revisions')),
988 (b'm', b'comment', b'', _(b'comment on the last revision')),
990 (b'm', b'comment', b'', _(b'comment on the last revision')),
989 ], _(b'DREVSPEC [OPTIONS]'),
991 ], _(b'DREVSPEC [OPTIONS]'),
990 helpcategory=command.CATEGORY_IMPORT_EXPORT)
992 helpcategory=command.CATEGORY_IMPORT_EXPORT)
991 def phabupdate(ui, repo, spec, **opts):
993 def phabupdate(ui, repo, spec, **opts):
992 """update Differential Revision in batch
994 """update Differential Revision in batch
993
995
994 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
996 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
995 """
997 """
996 opts = pycompat.byteskwargs(opts)
998 opts = pycompat.byteskwargs(opts)
997 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
999 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
998 if len(flags) > 1:
1000 if len(flags) > 1:
999 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1001 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1000
1002
1001 actions = []
1003 actions = []
1002 for f in flags:
1004 for f in flags:
1003 actions.append({b'type': f, b'value': b'true'})
1005 actions.append({b'type': f, b'value': b'true'})
1004
1006
1005 drevs = querydrev(repo, spec)
1007 drevs = querydrev(repo, spec)
1006 for i, drev in enumerate(drevs):
1008 for i, drev in enumerate(drevs):
1007 if i + 1 == len(drevs) and opts.get(b'comment'):
1009 if i + 1 == len(drevs) and opts.get(b'comment'):
1008 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1010 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1009 if actions:
1011 if actions:
1010 params = {b'objectIdentifier': drev[b'phid'],
1012 params = {b'objectIdentifier': drev[b'phid'],
1011 b'transactions': actions}
1013 b'transactions': actions}
1012 callconduit(repo, b'differential.revision.edit', params)
1014 callconduit(repo, b'differential.revision.edit', params)
1013
1015
1014 templatekeyword = registrar.templatekeyword()
1016 templatekeyword = registrar.templatekeyword()
1015
1017
1016 @templatekeyword(b'phabreview', requires={b'ctx'})
1018 @templatekeyword(b'phabreview', requires={b'ctx'})
1017 def template_review(context, mapping):
1019 def template_review(context, mapping):
1018 """:phabreview: Object describing the review for this changeset.
1020 """:phabreview: Object describing the review for this changeset.
1019 Has attributes `url` and `id`.
1021 Has attributes `url` and `id`.
1020 """
1022 """
1021 ctx = context.resource(mapping, b'ctx')
1023 ctx = context.resource(mapping, b'ctx')
1022 m = _differentialrevisiondescre.search(ctx.description())
1024 m = _differentialrevisiondescre.search(ctx.description())
1023 if m:
1025 if m:
1024 return templateutil.hybriddict({
1026 return templateutil.hybriddict({
1025 b'url': m.group(r'url'),
1027 b'url': m.group(r'url'),
1026 b'id': b"D%s" % m.group(r'id'),
1028 b'id': b"D%s" % m.group(r'id'),
1027 })
1029 })
1028 else:
1030 else:
1029 tags = ctx.repo().nodetags(ctx.node())
1031 tags = ctx.repo().nodetags(ctx.node())
1030 for t in tags:
1032 for t in tags:
1031 if _differentialrevisiontagre.match(t):
1033 if _differentialrevisiontagre.match(t):
1032 url = ctx.repo().ui.config(b'phabricator', b'url')
1034 url = ctx.repo().ui.config(b'phabricator', b'url')
1033 if not url.endswith(b'/'):
1035 if not url.endswith(b'/'):
1034 url += b'/'
1036 url += b'/'
1035 url += t
1037 url += t
1036
1038
1037 return templateutil.hybriddict({
1039 return templateutil.hybriddict({
1038 b'url': url,
1040 b'url': url,
1039 b'id': t,
1041 b'id': t,
1040 })
1042 })
1041 return None
1043 return None
General Comments 0
You need to be logged in to leave comments. Login now