##// END OF EJS Templates
phabricator: include branch in the phabread output...
Ian Moody -
r42443:231334c1 default
parent child Browse files
Show More
@@ -1,1052 +1,1053 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'branch': ctx.branch(),
387 b'branch': ctx.branch(),
388 b'node': ctx.hex(),
388 b'node': ctx.hex(),
389 b'parent': ctx.p1().hex(),
389 b'parent': ctx.p1().hex(),
390 }),
390 }),
391 }
391 }
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393
393
394 params = {
394 params = {
395 b'diff_id': diff[b'id'],
395 b'diff_id': diff[b'id'],
396 b'name': b'local:commits',
396 b'name': b'local:commits',
397 b'data': templatefilters.json({
397 b'data': templatefilters.json({
398 ctx.hex(): {
398 ctx.hex(): {
399 b'author': stringutil.person(ctx.user()),
399 b'author': stringutil.person(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
401 b'time': int(ctx.date()[0]),
401 b'time': int(ctx.date()[0]),
402 b'commit': ctx.hex(),
402 b'commit': ctx.hex(),
403 b'parents': [ctx.p1().hex()],
403 b'parents': [ctx.p1().hex()],
404 b'branch': ctx.branch(),
404 b'branch': ctx.branch(),
405 },
405 },
406 }),
406 }),
407 }
407 }
408 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
408 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
409
409
410 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
410 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
411 olddiff=None, actions=None):
411 olddiff=None, actions=None):
412 """create or update a Differential Revision
412 """create or update a Differential Revision
413
413
414 If revid is None, create a new Differential Revision, otherwise update
414 If revid is None, create a new Differential Revision, otherwise update
415 revid. If parentrevid is not None, set it as a dependency.
415 revid. If parentrevid is not None, set it as a dependency.
416
416
417 If oldnode is not None, check if the patch content (without commit message
417 If oldnode is not None, check if the patch content (without commit message
418 and metadata) has changed before creating another diff.
418 and metadata) has changed before creating another diff.
419
419
420 If actions is not None, they will be appended to the transaction.
420 If actions is not None, they will be appended to the transaction.
421 """
421 """
422 repo = ctx.repo()
422 repo = ctx.repo()
423 if oldnode:
423 if oldnode:
424 diffopts = mdiff.diffopts(git=True, context=32767)
424 diffopts = mdiff.diffopts(git=True, context=32767)
425 oldctx = repo.unfiltered()[oldnode]
425 oldctx = repo.unfiltered()[oldnode]
426 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
426 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
427 else:
427 else:
428 neednewdiff = True
428 neednewdiff = True
429
429
430 transactions = []
430 transactions = []
431 if neednewdiff:
431 if neednewdiff:
432 diff = creatediff(ctx)
432 diff = creatediff(ctx)
433 transactions.append({b'type': b'update', b'value': diff[b'phid']})
433 transactions.append({b'type': b'update', b'value': diff[b'phid']})
434 else:
434 else:
435 # Even if we don't need to upload a new diff because the patch content
435 # Even if we don't need to upload a new diff because the patch content
436 # does not change. We might still need to update its metadata so
436 # does not change. We might still need to update its metadata so
437 # pushers could know the correct node metadata.
437 # pushers could know the correct node metadata.
438 assert olddiff
438 assert olddiff
439 diff = olddiff
439 diff = olddiff
440 writediffproperties(ctx, diff)
440 writediffproperties(ctx, diff)
441
441
442 # Use a temporary summary to set dependency. There might be better ways but
442 # Use a temporary summary to set dependency. There might be better ways but
443 # I cannot find them for now. But do not do that if we are updating an
443 # I cannot find them for now. But do not do that if we are updating an
444 # existing revision (revid is not None) since that introduces visible
444 # existing revision (revid is not None) since that introduces visible
445 # churns (someone edited "Summary" twice) on the web page.
445 # churns (someone edited "Summary" twice) on the web page.
446 if parentrevid and revid is None:
446 if parentrevid and revid is None:
447 summary = b'Depends on D%d' % parentrevid
447 summary = b'Depends on D%d' % parentrevid
448 transactions += [{b'type': b'summary', b'value': summary},
448 transactions += [{b'type': b'summary', b'value': summary},
449 {b'type': b'summary', b'value': b' '}]
449 {b'type': b'summary', b'value': b' '}]
450
450
451 if actions:
451 if actions:
452 transactions += actions
452 transactions += actions
453
453
454 # Parse commit message and update related fields.
454 # Parse commit message and update related fields.
455 desc = ctx.description()
455 desc = ctx.description()
456 info = callconduit(repo, b'differential.parsecommitmessage',
456 info = callconduit(repo, b'differential.parsecommitmessage',
457 {b'corpus': desc})
457 {b'corpus': desc})
458 for k, v in info[b'fields'].items():
458 for k, v in info[b'fields'].items():
459 if k in [b'title', b'summary', b'testPlan']:
459 if k in [b'title', b'summary', b'testPlan']:
460 transactions.append({b'type': k, b'value': v})
460 transactions.append({b'type': k, b'value': v})
461
461
462 params = {b'transactions': transactions}
462 params = {b'transactions': transactions}
463 if revid is not None:
463 if revid is not None:
464 # Update an existing Differential Revision
464 # Update an existing Differential Revision
465 params[b'objectIdentifier'] = revid
465 params[b'objectIdentifier'] = revid
466
466
467 revision = callconduit(repo, b'differential.revision.edit', params)
467 revision = callconduit(repo, b'differential.revision.edit', params)
468 if not revision:
468 if not revision:
469 raise error.Abort(_(b'cannot create revision for %s') % ctx)
469 raise error.Abort(_(b'cannot create revision for %s') % ctx)
470
470
471 return revision, diff
471 return revision, diff
472
472
473 def userphids(repo, names):
473 def userphids(repo, names):
474 """convert user names to PHIDs"""
474 """convert user names to PHIDs"""
475 names = [name.lower() for name in names]
475 names = [name.lower() for name in names]
476 query = {b'constraints': {b'usernames': names}}
476 query = {b'constraints': {b'usernames': names}}
477 result = callconduit(repo, b'user.search', query)
477 result = callconduit(repo, b'user.search', query)
478 # username not found is not an error of the API. So check if we have missed
478 # username not found is not an error of the API. So check if we have missed
479 # some names here.
479 # some names here.
480 data = result[b'data']
480 data = result[b'data']
481 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
481 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
482 unresolved = set(names) - resolved
482 unresolved = set(names) - resolved
483 if unresolved:
483 if unresolved:
484 raise error.Abort(_(b'unknown username: %s')
484 raise error.Abort(_(b'unknown username: %s')
485 % b' '.join(sorted(unresolved)))
485 % b' '.join(sorted(unresolved)))
486 return [entry[b'phid'] for entry in data]
486 return [entry[b'phid'] for entry in data]
487
487
488 @vcrcommand(b'phabsend',
488 @vcrcommand(b'phabsend',
489 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
489 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
490 (b'', b'amend', True, _(b'update commit messages')),
490 (b'', b'amend', True, _(b'update commit messages')),
491 (b'', b'reviewer', [], _(b'specify reviewers')),
491 (b'', b'reviewer', [], _(b'specify reviewers')),
492 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
492 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
493 _(b'REV [OPTIONS]'),
493 _(b'REV [OPTIONS]'),
494 helpcategory=command.CATEGORY_IMPORT_EXPORT)
494 helpcategory=command.CATEGORY_IMPORT_EXPORT)
495 def phabsend(ui, repo, *revs, **opts):
495 def phabsend(ui, repo, *revs, **opts):
496 """upload changesets to Phabricator
496 """upload changesets to Phabricator
497
497
498 If there are multiple revisions specified, they will be send as a stack
498 If there are multiple revisions specified, they will be send as a stack
499 with a linear dependencies relationship using the order specified by the
499 with a linear dependencies relationship using the order specified by the
500 revset.
500 revset.
501
501
502 For the first time uploading changesets, local tags will be created to
502 For the first time uploading changesets, local tags will be created to
503 maintain the association. After the first time, phabsend will check
503 maintain the association. After the first time, phabsend will check
504 obsstore and tags information so it can figure out whether to update an
504 obsstore and tags information so it can figure out whether to update an
505 existing Differential Revision, or create a new one.
505 existing Differential Revision, or create a new one.
506
506
507 If --amend is set, update commit messages so they have the
507 If --amend is set, update commit messages so they have the
508 ``Differential Revision`` URL, remove related tags. This is similar to what
508 ``Differential Revision`` URL, remove related tags. This is similar to what
509 arcanist will do, and is more desired in author-push workflows. Otherwise,
509 arcanist will do, and is more desired in author-push workflows. Otherwise,
510 use local tags to record the ``Differential Revision`` association.
510 use local tags to record the ``Differential Revision`` association.
511
511
512 The --confirm option lets you confirm changesets before sending them. You
512 The --confirm option lets you confirm changesets before sending them. You
513 can also add following to your configuration file to make it default
513 can also add following to your configuration file to make it default
514 behaviour::
514 behaviour::
515
515
516 [phabsend]
516 [phabsend]
517 confirm = true
517 confirm = true
518
518
519 phabsend will check obsstore and the above association to decide whether to
519 phabsend will check obsstore and the above association to decide whether to
520 update an existing Differential Revision, or create a new one.
520 update an existing Differential Revision, or create a new one.
521 """
521 """
522 opts = pycompat.byteskwargs(opts)
522 opts = pycompat.byteskwargs(opts)
523 revs = list(revs) + opts.get(b'rev', [])
523 revs = list(revs) + opts.get(b'rev', [])
524 revs = scmutil.revrange(repo, revs)
524 revs = scmutil.revrange(repo, revs)
525
525
526 if not revs:
526 if not revs:
527 raise error.Abort(_(b'phabsend requires at least one changeset'))
527 raise error.Abort(_(b'phabsend requires at least one changeset'))
528 if opts.get(b'amend'):
528 if opts.get(b'amend'):
529 cmdutil.checkunfinished(repo)
529 cmdutil.checkunfinished(repo)
530
530
531 # {newnode: (oldnode, olddiff, olddrev}
531 # {newnode: (oldnode, olddiff, olddrev}
532 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
532 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
533
533
534 confirm = ui.configbool(b'phabsend', b'confirm')
534 confirm = ui.configbool(b'phabsend', b'confirm')
535 confirm |= bool(opts.get(b'confirm'))
535 confirm |= bool(opts.get(b'confirm'))
536 if confirm:
536 if confirm:
537 confirmed = _confirmbeforesend(repo, revs, oldmap)
537 confirmed = _confirmbeforesend(repo, revs, oldmap)
538 if not confirmed:
538 if not confirmed:
539 raise error.Abort(_(b'phabsend cancelled'))
539 raise error.Abort(_(b'phabsend cancelled'))
540
540
541 actions = []
541 actions = []
542 reviewers = opts.get(b'reviewer', [])
542 reviewers = opts.get(b'reviewer', [])
543 if reviewers:
543 if reviewers:
544 phids = userphids(repo, reviewers)
544 phids = userphids(repo, reviewers)
545 actions.append({b'type': b'reviewers.add', b'value': phids})
545 actions.append({b'type': b'reviewers.add', b'value': phids})
546
546
547 drevids = [] # [int]
547 drevids = [] # [int]
548 diffmap = {} # {newnode: diff}
548 diffmap = {} # {newnode: diff}
549
549
550 # Send patches one by one so we know their Differential Revision IDs and
550 # Send patches one by one so we know their Differential Revision IDs and
551 # can provide dependency relationship
551 # can provide dependency relationship
552 lastrevid = None
552 lastrevid = None
553 for rev in revs:
553 for rev in revs:
554 ui.debug(b'sending rev %d\n' % rev)
554 ui.debug(b'sending rev %d\n' % rev)
555 ctx = repo[rev]
555 ctx = repo[rev]
556
556
557 # Get Differential Revision ID
557 # Get Differential Revision ID
558 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
558 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
559 if oldnode != ctx.node() or opts.get(b'amend'):
559 if oldnode != ctx.node() or opts.get(b'amend'):
560 # Create or update Differential Revision
560 # Create or update Differential Revision
561 revision, diff = createdifferentialrevision(
561 revision, diff = createdifferentialrevision(
562 ctx, revid, lastrevid, oldnode, olddiff, actions)
562 ctx, revid, lastrevid, oldnode, olddiff, actions)
563 diffmap[ctx.node()] = diff
563 diffmap[ctx.node()] = diff
564 newrevid = int(revision[b'object'][b'id'])
564 newrevid = int(revision[b'object'][b'id'])
565 if revid:
565 if revid:
566 action = b'updated'
566 action = b'updated'
567 else:
567 else:
568 action = b'created'
568 action = b'created'
569
569
570 # Create a local tag to note the association, if commit message
570 # Create a local tag to note the association, if commit message
571 # does not have it already
571 # does not have it already
572 m = _differentialrevisiondescre.search(ctx.description())
572 m = _differentialrevisiondescre.search(ctx.description())
573 if not m or int(m.group(r'id')) != newrevid:
573 if not m or int(m.group(r'id')) != newrevid:
574 tagname = b'D%d' % newrevid
574 tagname = b'D%d' % newrevid
575 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
575 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
576 date=None, local=True)
576 date=None, local=True)
577 else:
577 else:
578 # Nothing changed. But still set "newrevid" so the next revision
578 # Nothing changed. But still set "newrevid" so the next revision
579 # could depend on this one.
579 # could depend on this one.
580 newrevid = revid
580 newrevid = revid
581 action = b'skipped'
581 action = b'skipped'
582
582
583 actiondesc = ui.label(
583 actiondesc = ui.label(
584 {b'created': _(b'created'),
584 {b'created': _(b'created'),
585 b'skipped': _(b'skipped'),
585 b'skipped': _(b'skipped'),
586 b'updated': _(b'updated')}[action],
586 b'updated': _(b'updated')}[action],
587 b'phabricator.action.%s' % action)
587 b'phabricator.action.%s' % action)
588 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
588 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
589 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
589 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
590 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
590 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
591 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
591 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
592 desc))
592 desc))
593 drevids.append(newrevid)
593 drevids.append(newrevid)
594 lastrevid = newrevid
594 lastrevid = newrevid
595
595
596 # Update commit messages and remove tags
596 # Update commit messages and remove tags
597 if opts.get(b'amend'):
597 if opts.get(b'amend'):
598 unfi = repo.unfiltered()
598 unfi = repo.unfiltered()
599 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
599 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
600 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
600 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
601 wnode = unfi[b'.'].node()
601 wnode = unfi[b'.'].node()
602 mapping = {} # {oldnode: [newnode]}
602 mapping = {} # {oldnode: [newnode]}
603 for i, rev in enumerate(revs):
603 for i, rev in enumerate(revs):
604 old = unfi[rev]
604 old = unfi[rev]
605 drevid = drevids[i]
605 drevid = drevids[i]
606 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
606 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
607 newdesc = getdescfromdrev(drev)
607 newdesc = getdescfromdrev(drev)
608 # Make sure commit message contain "Differential Revision"
608 # Make sure commit message contain "Differential Revision"
609 if old.description() != newdesc:
609 if old.description() != newdesc:
610 if old.phase() == phases.public:
610 if old.phase() == phases.public:
611 ui.warn(_("warning: not updating public commit %s\n")
611 ui.warn(_("warning: not updating public commit %s\n")
612 % scmutil.formatchangeid(old))
612 % scmutil.formatchangeid(old))
613 continue
613 continue
614 parents = [
614 parents = [
615 mapping.get(old.p1().node(), (old.p1(),))[0],
615 mapping.get(old.p1().node(), (old.p1(),))[0],
616 mapping.get(old.p2().node(), (old.p2(),))[0],
616 mapping.get(old.p2().node(), (old.p2(),))[0],
617 ]
617 ]
618 new = context.metadataonlyctx(
618 new = context.metadataonlyctx(
619 repo, old, parents=parents, text=newdesc,
619 repo, old, parents=parents, text=newdesc,
620 user=old.user(), date=old.date(), extra=old.extra())
620 user=old.user(), date=old.date(), extra=old.extra())
621
621
622 newnode = new.commit()
622 newnode = new.commit()
623
623
624 mapping[old.node()] = [newnode]
624 mapping[old.node()] = [newnode]
625 # Update diff property
625 # Update diff property
626 writediffproperties(unfi[newnode], diffmap[old.node()])
626 writediffproperties(unfi[newnode], diffmap[old.node()])
627 # Remove local tags since it's no longer necessary
627 # Remove local tags since it's no longer necessary
628 tagname = b'D%d' % drevid
628 tagname = b'D%d' % drevid
629 if tagname in repo.tags():
629 if tagname in repo.tags():
630 tags.tag(repo, tagname, nullid, message=None, user=None,
630 tags.tag(repo, tagname, nullid, message=None, user=None,
631 date=None, local=True)
631 date=None, local=True)
632 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
632 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
633 if wnode in mapping:
633 if wnode in mapping:
634 unfi.setparents(mapping[wnode][0])
634 unfi.setparents(mapping[wnode][0])
635
635
636 # Map from "hg:meta" keys to header understood by "hg import". The order is
636 # Map from "hg:meta" keys to header understood by "hg import". The order is
637 # consistent with "hg export" output.
637 # consistent with "hg export" output.
638 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
638 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
639 (b'node', b'Node ID'), (b'parent', b'Parent ')])
639 (b'branch', b'Branch'), (b'node', b'Node ID'),
640 (b'parent', b'Parent ')])
640
641
641 def _confirmbeforesend(repo, revs, oldmap):
642 def _confirmbeforesend(repo, revs, oldmap):
642 url, token = readurltoken(repo)
643 url, token = readurltoken(repo)
643 ui = repo.ui
644 ui = repo.ui
644 for rev in revs:
645 for rev in revs:
645 ctx = repo[rev]
646 ctx = repo[rev]
646 desc = ctx.description().splitlines()[0]
647 desc = ctx.description().splitlines()[0]
647 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
648 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
648 if drevid:
649 if drevid:
649 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
650 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
650 else:
651 else:
651 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
652 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
652
653
653 ui.write(_(b'%s - %s: %s\n')
654 ui.write(_(b'%s - %s: %s\n')
654 % (drevdesc,
655 % (drevdesc,
655 ui.label(bytes(ctx), b'phabricator.node'),
656 ui.label(bytes(ctx), b'phabricator.node'),
656 ui.label(desc, b'phabricator.desc')))
657 ui.label(desc, b'phabricator.desc')))
657
658
658 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
659 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
659 b'$$ &Yes $$ &No') % url):
660 b'$$ &Yes $$ &No') % url):
660 return False
661 return False
661
662
662 return True
663 return True
663
664
664 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
665 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
665 b'abandoned'}
666 b'abandoned'}
666
667
667 def _getstatusname(drev):
668 def _getstatusname(drev):
668 """get normalized status name from a Differential Revision"""
669 """get normalized status name from a Differential Revision"""
669 return drev[b'statusName'].replace(b' ', b'').lower()
670 return drev[b'statusName'].replace(b' ', b'').lower()
670
671
671 # Small language to specify differential revisions. Support symbols: (), :X,
672 # Small language to specify differential revisions. Support symbols: (), :X,
672 # +, and -.
673 # +, and -.
673
674
674 _elements = {
675 _elements = {
675 # token-type: binding-strength, primary, prefix, infix, suffix
676 # token-type: binding-strength, primary, prefix, infix, suffix
676 b'(': (12, None, (b'group', 1, b')'), None, None),
677 b'(': (12, None, (b'group', 1, b')'), None, None),
677 b':': (8, None, (b'ancestors', 8), None, None),
678 b':': (8, None, (b'ancestors', 8), None, None),
678 b'&': (5, None, None, (b'and_', 5), None),
679 b'&': (5, None, None, (b'and_', 5), None),
679 b'+': (4, None, None, (b'add', 4), None),
680 b'+': (4, None, None, (b'add', 4), None),
680 b'-': (4, None, None, (b'sub', 4), None),
681 b'-': (4, None, None, (b'sub', 4), None),
681 b')': (0, None, None, None, None),
682 b')': (0, None, None, None, None),
682 b'symbol': (0, b'symbol', None, None, None),
683 b'symbol': (0, b'symbol', None, None, None),
683 b'end': (0, None, None, None, None),
684 b'end': (0, None, None, None, None),
684 }
685 }
685
686
686 def _tokenize(text):
687 def _tokenize(text):
687 view = memoryview(text) # zero-copy slice
688 view = memoryview(text) # zero-copy slice
688 special = b'():+-& '
689 special = b'():+-& '
689 pos = 0
690 pos = 0
690 length = len(text)
691 length = len(text)
691 while pos < length:
692 while pos < length:
692 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
693 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
693 pycompat.iterbytestr(view[pos:])))
694 pycompat.iterbytestr(view[pos:])))
694 if symbol:
695 if symbol:
695 yield (b'symbol', symbol, pos)
696 yield (b'symbol', symbol, pos)
696 pos += len(symbol)
697 pos += len(symbol)
697 else: # special char, ignore space
698 else: # special char, ignore space
698 if text[pos] != b' ':
699 if text[pos] != b' ':
699 yield (text[pos], None, pos)
700 yield (text[pos], None, pos)
700 pos += 1
701 pos += 1
701 yield (b'end', None, pos)
702 yield (b'end', None, pos)
702
703
703 def _parse(text):
704 def _parse(text):
704 tree, pos = parser.parser(_elements).parse(_tokenize(text))
705 tree, pos = parser.parser(_elements).parse(_tokenize(text))
705 if pos != len(text):
706 if pos != len(text):
706 raise error.ParseError(b'invalid token', pos)
707 raise error.ParseError(b'invalid token', pos)
707 return tree
708 return tree
708
709
709 def _parsedrev(symbol):
710 def _parsedrev(symbol):
710 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
711 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
711 if symbol.startswith(b'D') and symbol[1:].isdigit():
712 if symbol.startswith(b'D') and symbol[1:].isdigit():
712 return int(symbol[1:])
713 return int(symbol[1:])
713 if symbol.isdigit():
714 if symbol.isdigit():
714 return int(symbol)
715 return int(symbol)
715
716
716 def _prefetchdrevs(tree):
717 def _prefetchdrevs(tree):
717 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
718 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
718 drevs = set()
719 drevs = set()
719 ancestordrevs = set()
720 ancestordrevs = set()
720 op = tree[0]
721 op = tree[0]
721 if op == b'symbol':
722 if op == b'symbol':
722 r = _parsedrev(tree[1])
723 r = _parsedrev(tree[1])
723 if r:
724 if r:
724 drevs.add(r)
725 drevs.add(r)
725 elif op == b'ancestors':
726 elif op == b'ancestors':
726 r, a = _prefetchdrevs(tree[1])
727 r, a = _prefetchdrevs(tree[1])
727 drevs.update(r)
728 drevs.update(r)
728 ancestordrevs.update(r)
729 ancestordrevs.update(r)
729 ancestordrevs.update(a)
730 ancestordrevs.update(a)
730 else:
731 else:
731 for t in tree[1:]:
732 for t in tree[1:]:
732 r, a = _prefetchdrevs(t)
733 r, a = _prefetchdrevs(t)
733 drevs.update(r)
734 drevs.update(r)
734 ancestordrevs.update(a)
735 ancestordrevs.update(a)
735 return drevs, ancestordrevs
736 return drevs, ancestordrevs
736
737
737 def querydrev(repo, spec):
738 def querydrev(repo, spec):
738 """return a list of "Differential Revision" dicts
739 """return a list of "Differential Revision" dicts
739
740
740 spec is a string using a simple query language, see docstring in phabread
741 spec is a string using a simple query language, see docstring in phabread
741 for details.
742 for details.
742
743
743 A "Differential Revision dict" looks like:
744 A "Differential Revision dict" looks like:
744
745
745 {
746 {
746 "id": "2",
747 "id": "2",
747 "phid": "PHID-DREV-672qvysjcczopag46qty",
748 "phid": "PHID-DREV-672qvysjcczopag46qty",
748 "title": "example",
749 "title": "example",
749 "uri": "https://phab.example.com/D2",
750 "uri": "https://phab.example.com/D2",
750 "dateCreated": "1499181406",
751 "dateCreated": "1499181406",
751 "dateModified": "1499182103",
752 "dateModified": "1499182103",
752 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
753 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
753 "status": "0",
754 "status": "0",
754 "statusName": "Needs Review",
755 "statusName": "Needs Review",
755 "properties": [],
756 "properties": [],
756 "branch": null,
757 "branch": null,
757 "summary": "",
758 "summary": "",
758 "testPlan": "",
759 "testPlan": "",
759 "lineCount": "2",
760 "lineCount": "2",
760 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
761 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
761 "diffs": [
762 "diffs": [
762 "3",
763 "3",
763 "4",
764 "4",
764 ],
765 ],
765 "commits": [],
766 "commits": [],
766 "reviewers": [],
767 "reviewers": [],
767 "ccs": [],
768 "ccs": [],
768 "hashes": [],
769 "hashes": [],
769 "auxiliary": {
770 "auxiliary": {
770 "phabricator:projects": [],
771 "phabricator:projects": [],
771 "phabricator:depends-on": [
772 "phabricator:depends-on": [
772 "PHID-DREV-gbapp366kutjebt7agcd"
773 "PHID-DREV-gbapp366kutjebt7agcd"
773 ]
774 ]
774 },
775 },
775 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
776 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
776 "sourcePath": null
777 "sourcePath": null
777 }
778 }
778 """
779 """
779 def fetch(params):
780 def fetch(params):
780 """params -> single drev or None"""
781 """params -> single drev or None"""
781 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
782 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
782 if key in prefetched:
783 if key in prefetched:
783 return prefetched[key]
784 return prefetched[key]
784 drevs = callconduit(repo, b'differential.query', params)
785 drevs = callconduit(repo, b'differential.query', params)
785 # Fill prefetched with the result
786 # Fill prefetched with the result
786 for drev in drevs:
787 for drev in drevs:
787 prefetched[drev[b'phid']] = drev
788 prefetched[drev[b'phid']] = drev
788 prefetched[int(drev[b'id'])] = drev
789 prefetched[int(drev[b'id'])] = drev
789 if key not in prefetched:
790 if key not in prefetched:
790 raise error.Abort(_(b'cannot get Differential Revision %r')
791 raise error.Abort(_(b'cannot get Differential Revision %r')
791 % params)
792 % params)
792 return prefetched[key]
793 return prefetched[key]
793
794
794 def getstack(topdrevids):
795 def getstack(topdrevids):
795 """given a top, get a stack from the bottom, [id] -> [id]"""
796 """given a top, get a stack from the bottom, [id] -> [id]"""
796 visited = set()
797 visited = set()
797 result = []
798 result = []
798 queue = [{b'ids': [i]} for i in topdrevids]
799 queue = [{b'ids': [i]} for i in topdrevids]
799 while queue:
800 while queue:
800 params = queue.pop()
801 params = queue.pop()
801 drev = fetch(params)
802 drev = fetch(params)
802 if drev[b'id'] in visited:
803 if drev[b'id'] in visited:
803 continue
804 continue
804 visited.add(drev[b'id'])
805 visited.add(drev[b'id'])
805 result.append(int(drev[b'id']))
806 result.append(int(drev[b'id']))
806 auxiliary = drev.get(b'auxiliary', {})
807 auxiliary = drev.get(b'auxiliary', {})
807 depends = auxiliary.get(b'phabricator:depends-on', [])
808 depends = auxiliary.get(b'phabricator:depends-on', [])
808 for phid in depends:
809 for phid in depends:
809 queue.append({b'phids': [phid]})
810 queue.append({b'phids': [phid]})
810 result.reverse()
811 result.reverse()
811 return smartset.baseset(result)
812 return smartset.baseset(result)
812
813
813 # Initialize prefetch cache
814 # Initialize prefetch cache
814 prefetched = {} # {id or phid: drev}
815 prefetched = {} # {id or phid: drev}
815
816
816 tree = _parse(spec)
817 tree = _parse(spec)
817 drevs, ancestordrevs = _prefetchdrevs(tree)
818 drevs, ancestordrevs = _prefetchdrevs(tree)
818
819
819 # developer config: phabricator.batchsize
820 # developer config: phabricator.batchsize
820 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
821 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
821
822
822 # Prefetch Differential Revisions in batch
823 # Prefetch Differential Revisions in batch
823 tofetch = set(drevs)
824 tofetch = set(drevs)
824 for r in ancestordrevs:
825 for r in ancestordrevs:
825 tofetch.update(range(max(1, r - batchsize), r + 1))
826 tofetch.update(range(max(1, r - batchsize), r + 1))
826 if drevs:
827 if drevs:
827 fetch({b'ids': list(tofetch)})
828 fetch({b'ids': list(tofetch)})
828 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
829 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
829
830
830 # Walk through the tree, return smartsets
831 # Walk through the tree, return smartsets
831 def walk(tree):
832 def walk(tree):
832 op = tree[0]
833 op = tree[0]
833 if op == b'symbol':
834 if op == b'symbol':
834 drev = _parsedrev(tree[1])
835 drev = _parsedrev(tree[1])
835 if drev:
836 if drev:
836 return smartset.baseset([drev])
837 return smartset.baseset([drev])
837 elif tree[1] in _knownstatusnames:
838 elif tree[1] in _knownstatusnames:
838 drevs = [r for r in validids
839 drevs = [r for r in validids
839 if _getstatusname(prefetched[r]) == tree[1]]
840 if _getstatusname(prefetched[r]) == tree[1]]
840 return smartset.baseset(drevs)
841 return smartset.baseset(drevs)
841 else:
842 else:
842 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
843 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
843 elif op in {b'and_', b'add', b'sub'}:
844 elif op in {b'and_', b'add', b'sub'}:
844 assert len(tree) == 3
845 assert len(tree) == 3
845 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
846 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
846 elif op == b'group':
847 elif op == b'group':
847 return walk(tree[1])
848 return walk(tree[1])
848 elif op == b'ancestors':
849 elif op == b'ancestors':
849 return getstack(walk(tree[1]))
850 return getstack(walk(tree[1]))
850 else:
851 else:
851 raise error.ProgrammingError(b'illegal tree: %r' % tree)
852 raise error.ProgrammingError(b'illegal tree: %r' % tree)
852
853
853 return [prefetched[r] for r in walk(tree)]
854 return [prefetched[r] for r in walk(tree)]
854
855
855 def getdescfromdrev(drev):
856 def getdescfromdrev(drev):
856 """get description (commit message) from "Differential Revision"
857 """get description (commit message) from "Differential Revision"
857
858
858 This is similar to differential.getcommitmessage API. But we only care
859 This is similar to differential.getcommitmessage API. But we only care
859 about limited fields: title, summary, test plan, and URL.
860 about limited fields: title, summary, test plan, and URL.
860 """
861 """
861 title = drev[b'title']
862 title = drev[b'title']
862 summary = drev[b'summary'].rstrip()
863 summary = drev[b'summary'].rstrip()
863 testplan = drev[b'testPlan'].rstrip()
864 testplan = drev[b'testPlan'].rstrip()
864 if testplan:
865 if testplan:
865 testplan = b'Test Plan:\n%s' % testplan
866 testplan = b'Test Plan:\n%s' % testplan
866 uri = b'Differential Revision: %s' % drev[b'uri']
867 uri = b'Differential Revision: %s' % drev[b'uri']
867 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
868 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
868
869
869 def getdiffmeta(diff):
870 def getdiffmeta(diff):
870 """get commit metadata (date, node, user, p1) from a diff object
871 """get commit metadata (date, node, user, p1) from a diff object
871
872
872 The metadata could be "hg:meta", sent by phabsend, like:
873 The metadata could be "hg:meta", sent by phabsend, like:
873
874
874 "properties": {
875 "properties": {
875 "hg:meta": {
876 "hg:meta": {
876 "date": "1499571514 25200",
877 "date": "1499571514 25200",
877 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "user": "Foo Bar <foo@example.com>",
879 "user": "Foo Bar <foo@example.com>",
879 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
880 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
880 }
881 }
881 }
882 }
882
883
883 Or converted from "local:commits", sent by "arc", like:
884 Or converted from "local:commits", sent by "arc", like:
884
885
885 "properties": {
886 "properties": {
886 "local:commits": {
887 "local:commits": {
887 "98c08acae292b2faf60a279b4189beb6cff1414d": {
888 "98c08acae292b2faf60a279b4189beb6cff1414d": {
888 "author": "Foo Bar",
889 "author": "Foo Bar",
889 "time": 1499546314,
890 "time": 1499546314,
890 "branch": "default",
891 "branch": "default",
891 "tag": "",
892 "tag": "",
892 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
893 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
893 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
894 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
894 "local": "1000",
895 "local": "1000",
895 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
896 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
896 "summary": "...",
897 "summary": "...",
897 "message": "...",
898 "message": "...",
898 "authorEmail": "foo@example.com"
899 "authorEmail": "foo@example.com"
899 }
900 }
900 }
901 }
901 }
902 }
902
903
903 Note: metadata extracted from "local:commits" will lose time zone
904 Note: metadata extracted from "local:commits" will lose time zone
904 information.
905 information.
905 """
906 """
906 props = diff.get(b'properties') or {}
907 props = diff.get(b'properties') or {}
907 meta = props.get(b'hg:meta')
908 meta = props.get(b'hg:meta')
908 if not meta:
909 if not meta:
909 if props.get(b'local:commits'):
910 if props.get(b'local:commits'):
910 commit = sorted(props[b'local:commits'].values())[0]
911 commit = sorted(props[b'local:commits'].values())[0]
911 meta = {}
912 meta = {}
912 if b'author' in commit and b'authorEmail' in commit:
913 if b'author' in commit and b'authorEmail' in commit:
913 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
914 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
914 commit[b'authorEmail'])
915 commit[b'authorEmail'])
915 if b'time' in commit:
916 if b'time' in commit:
916 meta[b'date'] = b'%d 0' % commit[b'time']
917 meta[b'date'] = b'%d 0' % commit[b'time']
917 if b'branch' in commit:
918 if b'branch' in commit:
918 meta[b'branch'] = commit[b'branch']
919 meta[b'branch'] = commit[b'branch']
919 node = commit.get(b'commit', commit.get(b'rev'))
920 node = commit.get(b'commit', commit.get(b'rev'))
920 if node:
921 if node:
921 meta[b'node'] = node
922 meta[b'node'] = node
922 if len(commit.get(b'parents', ())) >= 1:
923 if len(commit.get(b'parents', ())) >= 1:
923 meta[b'parent'] = commit[b'parents'][0]
924 meta[b'parent'] = commit[b'parents'][0]
924 else:
925 else:
925 meta = {}
926 meta = {}
926 if b'date' not in meta and b'dateCreated' in diff:
927 if b'date' not in meta and b'dateCreated' in diff:
927 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
928 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
928 if b'branch' not in meta and diff.get(b'branch'):
929 if b'branch' not in meta and diff.get(b'branch'):
929 meta[b'branch'] = diff[b'branch']
930 meta[b'branch'] = diff[b'branch']
930 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
931 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
931 meta[b'parent'] = diff[b'sourceControlBaseRevision']
932 meta[b'parent'] = diff[b'sourceControlBaseRevision']
932 return meta
933 return meta
933
934
934 def readpatch(repo, drevs, write):
935 def readpatch(repo, drevs, write):
935 """generate plain-text patch readable by 'hg import'
936 """generate plain-text patch readable by 'hg import'
936
937
937 write is usually ui.write. drevs is what "querydrev" returns, results of
938 write is usually ui.write. drevs is what "querydrev" returns, results of
938 "differential.query".
939 "differential.query".
939 """
940 """
940 # Prefetch hg:meta property for all diffs
941 # Prefetch hg:meta property for all diffs
941 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
942 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
942 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
943 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
943
944
944 # Generate patch for each drev
945 # Generate patch for each drev
945 for drev in drevs:
946 for drev in drevs:
946 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
947 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
947
948
948 diffid = max(int(v) for v in drev[b'diffs'])
949 diffid = max(int(v) for v in drev[b'diffs'])
949 body = callconduit(repo, b'differential.getrawdiff',
950 body = callconduit(repo, b'differential.getrawdiff',
950 {b'diffID': diffid})
951 {b'diffID': diffid})
951 desc = getdescfromdrev(drev)
952 desc = getdescfromdrev(drev)
952 header = b'# HG changeset patch\n'
953 header = b'# HG changeset patch\n'
953
954
954 # Try to preserve metadata from hg:meta property. Write hg patch
955 # Try to preserve metadata from hg:meta property. Write hg patch
955 # headers that can be read by the "import" command. See patchheadermap
956 # headers that can be read by the "import" command. See patchheadermap
956 # and extract in mercurial/patch.py for supported headers.
957 # and extract in mercurial/patch.py for supported headers.
957 meta = getdiffmeta(diffs[b'%d' % diffid])
958 meta = getdiffmeta(diffs[b'%d' % diffid])
958 for k in _metanamemap.keys():
959 for k in _metanamemap.keys():
959 if k in meta:
960 if k in meta:
960 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
961 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
961
962
962 content = b'%s%s\n%s' % (header, desc, body)
963 content = b'%s%s\n%s' % (header, desc, body)
963 write(content)
964 write(content)
964
965
965 @vcrcommand(b'phabread',
966 @vcrcommand(b'phabread',
966 [(b'', b'stack', False, _(b'read dependencies'))],
967 [(b'', b'stack', False, _(b'read dependencies'))],
967 _(b'DREVSPEC [OPTIONS]'),
968 _(b'DREVSPEC [OPTIONS]'),
968 helpcategory=command.CATEGORY_IMPORT_EXPORT)
969 helpcategory=command.CATEGORY_IMPORT_EXPORT)
969 def phabread(ui, repo, spec, **opts):
970 def phabread(ui, repo, spec, **opts):
970 """print patches from Phabricator suitable for importing
971 """print patches from Phabricator suitable for importing
971
972
972 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
973 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
973 the number ``123``. It could also have common operators like ``+``, ``-``,
974 the number ``123``. It could also have common operators like ``+``, ``-``,
974 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
975 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
975 select a stack.
976 select a stack.
976
977
977 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
978 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
978 could be used to filter patches by status. For performance reason, they
979 could be used to filter patches by status. For performance reason, they
979 only represent a subset of non-status selections and cannot be used alone.
980 only represent a subset of non-status selections and cannot be used alone.
980
981
981 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
982 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
982 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
983 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
983 stack up to D9.
984 stack up to D9.
984
985
985 If --stack is given, follow dependencies information and read all patches.
986 If --stack is given, follow dependencies information and read all patches.
986 It is equivalent to the ``:`` operator.
987 It is equivalent to the ``:`` operator.
987 """
988 """
988 opts = pycompat.byteskwargs(opts)
989 opts = pycompat.byteskwargs(opts)
989 if opts.get(b'stack'):
990 if opts.get(b'stack'):
990 spec = b':(%s)' % spec
991 spec = b':(%s)' % spec
991 drevs = querydrev(repo, spec)
992 drevs = querydrev(repo, spec)
992 readpatch(repo, drevs, ui.write)
993 readpatch(repo, drevs, ui.write)
993
994
994 @vcrcommand(b'phabupdate',
995 @vcrcommand(b'phabupdate',
995 [(b'', b'accept', False, _(b'accept revisions')),
996 [(b'', b'accept', False, _(b'accept revisions')),
996 (b'', b'reject', False, _(b'reject revisions')),
997 (b'', b'reject', False, _(b'reject revisions')),
997 (b'', b'abandon', False, _(b'abandon revisions')),
998 (b'', b'abandon', False, _(b'abandon revisions')),
998 (b'', b'reclaim', False, _(b'reclaim revisions')),
999 (b'', b'reclaim', False, _(b'reclaim revisions')),
999 (b'm', b'comment', b'', _(b'comment on the last revision')),
1000 (b'm', b'comment', b'', _(b'comment on the last revision')),
1000 ], _(b'DREVSPEC [OPTIONS]'),
1001 ], _(b'DREVSPEC [OPTIONS]'),
1001 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1002 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1002 def phabupdate(ui, repo, spec, **opts):
1003 def phabupdate(ui, repo, spec, **opts):
1003 """update Differential Revision in batch
1004 """update Differential Revision in batch
1004
1005
1005 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1006 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1006 """
1007 """
1007 opts = pycompat.byteskwargs(opts)
1008 opts = pycompat.byteskwargs(opts)
1008 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1009 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1009 if len(flags) > 1:
1010 if len(flags) > 1:
1010 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1011 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1011
1012
1012 actions = []
1013 actions = []
1013 for f in flags:
1014 for f in flags:
1014 actions.append({b'type': f, b'value': b'true'})
1015 actions.append({b'type': f, b'value': b'true'})
1015
1016
1016 drevs = querydrev(repo, spec)
1017 drevs = querydrev(repo, spec)
1017 for i, drev in enumerate(drevs):
1018 for i, drev in enumerate(drevs):
1018 if i + 1 == len(drevs) and opts.get(b'comment'):
1019 if i + 1 == len(drevs) and opts.get(b'comment'):
1019 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1020 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1020 if actions:
1021 if actions:
1021 params = {b'objectIdentifier': drev[b'phid'],
1022 params = {b'objectIdentifier': drev[b'phid'],
1022 b'transactions': actions}
1023 b'transactions': actions}
1023 callconduit(repo, b'differential.revision.edit', params)
1024 callconduit(repo, b'differential.revision.edit', params)
1024
1025
1025 templatekeyword = registrar.templatekeyword()
1026 templatekeyword = registrar.templatekeyword()
1026
1027
1027 @templatekeyword(b'phabreview', requires={b'ctx'})
1028 @templatekeyword(b'phabreview', requires={b'ctx'})
1028 def template_review(context, mapping):
1029 def template_review(context, mapping):
1029 """:phabreview: Object describing the review for this changeset.
1030 """:phabreview: Object describing the review for this changeset.
1030 Has attributes `url` and `id`.
1031 Has attributes `url` and `id`.
1031 """
1032 """
1032 ctx = context.resource(mapping, b'ctx')
1033 ctx = context.resource(mapping, b'ctx')
1033 m = _differentialrevisiondescre.search(ctx.description())
1034 m = _differentialrevisiondescre.search(ctx.description())
1034 if m:
1035 if m:
1035 return templateutil.hybriddict({
1036 return templateutil.hybriddict({
1036 b'url': m.group(r'url'),
1037 b'url': m.group(r'url'),
1037 b'id': b"D%s" % m.group(r'id'),
1038 b'id': b"D%s" % m.group(r'id'),
1038 })
1039 })
1039 else:
1040 else:
1040 tags = ctx.repo().nodetags(ctx.node())
1041 tags = ctx.repo().nodetags(ctx.node())
1041 for t in tags:
1042 for t in tags:
1042 if _differentialrevisiontagre.match(t):
1043 if _differentialrevisiontagre.match(t):
1043 url = ctx.repo().ui.config(b'phabricator', b'url')
1044 url = ctx.repo().ui.config(b'phabricator', b'url')
1044 if not url.endswith(b'/'):
1045 if not url.endswith(b'/'):
1045 url += b'/'
1046 url += b'/'
1046 url += t
1047 url += t
1047
1048
1048 return templateutil.hybriddict({
1049 return templateutil.hybriddict({
1049 b'url': url,
1050 b'url': url,
1050 b'id': t,
1051 b'id': t,
1051 })
1052 })
1052 return None
1053 return None
General Comments 0
You need to be logged in to leave comments. Login now