##// END OF EJS Templates
phabricator: make `hg debugcallconduit` work outside a hg repo...
Pulkit Goyal -
r43013:16312ea4 default
parent child Browse files
Show More
@@ -1,1066 +1,1067 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def hgmatcher(r1, r2):
130 def hgmatcher(r1, r2):
131 if r1.uri != r2.uri or r1.method != r2.method:
131 if r1.uri != r2.uri or r1.method != r2.method:
132 return False
132 return False
133 r1params = r1.body.split(b'&')
133 r1params = r1.body.split(b'&')
134 r2params = r2.body.split(b'&')
134 r2params = r2.body.split(b'&')
135 return set(r1params) == set(r2params)
135 return set(r1params) == set(r2params)
136
136
137 def decorate(fn):
137 def decorate(fn):
138 def inner(*args, **kwargs):
138 def inner(*args, **kwargs):
139 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
139 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
140 if cassette:
140 if cassette:
141 import hgdemandimport
141 import hgdemandimport
142 with hgdemandimport.deactivated():
142 with hgdemandimport.deactivated():
143 import vcr as vcrmod
143 import vcr as vcrmod
144 import vcr.stubs as stubs
144 import vcr.stubs as stubs
145 vcr = vcrmod.VCR(
145 vcr = vcrmod.VCR(
146 serializer=r'json',
146 serializer=r'json',
147 custom_patches=[
147 custom_patches=[
148 (urlmod, r'httpconnection',
148 (urlmod, r'httpconnection',
149 stubs.VCRHTTPConnection),
149 stubs.VCRHTTPConnection),
150 (urlmod, r'httpsconnection',
150 (urlmod, r'httpsconnection',
151 stubs.VCRHTTPSConnection),
151 stubs.VCRHTTPSConnection),
152 ])
152 ])
153 vcr.register_matcher(r'hgmatcher', hgmatcher)
153 vcr.register_matcher(r'hgmatcher', hgmatcher)
154 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
154 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
155 return fn(*args, **kwargs)
155 return fn(*args, **kwargs)
156 return fn(*args, **kwargs)
156 return fn(*args, **kwargs)
157 inner.__name__ = fn.__name__
157 inner.__name__ = fn.__name__
158 inner.__doc__ = fn.__doc__
158 inner.__doc__ = fn.__doc__
159 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
159 return command(name, fullflags, spec, helpcategory=helpcategory,
160 optionalrepo=optionalrepo)(inner)
160 return decorate
161 return decorate
161
162
162 def urlencodenested(params):
163 def urlencodenested(params):
163 """like urlencode, but works with nested parameters.
164 """like urlencode, but works with nested parameters.
164
165
165 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
166 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
166 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
167 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
167 urlencode. Note: the encoding is consistent with PHP's http_build_query.
168 urlencode. Note: the encoding is consistent with PHP's http_build_query.
168 """
169 """
169 flatparams = util.sortdict()
170 flatparams = util.sortdict()
170 def process(prefix, obj):
171 def process(prefix, obj):
171 if isinstance(obj, bool):
172 if isinstance(obj, bool):
172 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
173 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
173 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
174 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
174 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
175 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
175 if items is None:
176 if items is None:
176 flatparams[prefix] = obj
177 flatparams[prefix] = obj
177 else:
178 else:
178 for k, v in items(obj):
179 for k, v in items(obj):
179 if prefix:
180 if prefix:
180 process(b'%s[%s]' % (prefix, k), v)
181 process(b'%s[%s]' % (prefix, k), v)
181 else:
182 else:
182 process(k, v)
183 process(k, v)
183 process(b'', params)
184 process(b'', params)
184 return util.urlreq.urlencode(flatparams)
185 return util.urlreq.urlencode(flatparams)
185
186
186 def readurltoken(ui):
187 def readurltoken(ui):
187 """return conduit url, token and make sure they exist
188 """return conduit url, token and make sure they exist
188
189
189 Currently read from [auth] config section. In the future, it might
190 Currently read from [auth] config section. In the future, it might
190 make sense to read from .arcconfig and .arcrc as well.
191 make sense to read from .arcconfig and .arcrc as well.
191 """
192 """
192 url = ui.config(b'phabricator', b'url')
193 url = ui.config(b'phabricator', b'url')
193 if not url:
194 if not url:
194 raise error.Abort(_(b'config %s.%s is required')
195 raise error.Abort(_(b'config %s.%s is required')
195 % (b'phabricator', b'url'))
196 % (b'phabricator', b'url'))
196
197
197 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
198 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
198 token = None
199 token = None
199
200
200 if res:
201 if res:
201 group, auth = res
202 group, auth = res
202
203
203 ui.debug(b"using auth.%s.* for authentication\n" % group)
204 ui.debug(b"using auth.%s.* for authentication\n" % group)
204
205
205 token = auth.get(b'phabtoken')
206 token = auth.get(b'phabtoken')
206
207
207 if not token:
208 if not token:
208 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
209 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
209 % (url,))
210 % (url,))
210
211
211 return url, token
212 return url, token
212
213
213 def callconduit(ui, name, params):
214 def callconduit(ui, name, params):
214 """call Conduit API, params is a dict. return json.loads result, or None"""
215 """call Conduit API, params is a dict. return json.loads result, or None"""
215 host, token = readurltoken(ui)
216 host, token = readurltoken(ui)
216 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
217 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
217 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
218 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
218 params = params.copy()
219 params = params.copy()
219 params[b'api.token'] = token
220 params[b'api.token'] = token
220 data = urlencodenested(params)
221 data = urlencodenested(params)
221 curlcmd = ui.config(b'phabricator', b'curlcmd')
222 curlcmd = ui.config(b'phabricator', b'curlcmd')
222 if curlcmd:
223 if curlcmd:
223 sin, sout = procutil.popen2(b'%s -d @- %s'
224 sin, sout = procutil.popen2(b'%s -d @- %s'
224 % (curlcmd, procutil.shellquote(url)))
225 % (curlcmd, procutil.shellquote(url)))
225 sin.write(data)
226 sin.write(data)
226 sin.close()
227 sin.close()
227 body = sout.read()
228 body = sout.read()
228 else:
229 else:
229 urlopener = urlmod.opener(ui, authinfo)
230 urlopener = urlmod.opener(ui, authinfo)
230 request = util.urlreq.request(pycompat.strurl(url), data=data)
231 request = util.urlreq.request(pycompat.strurl(url), data=data)
231 with contextlib.closing(urlopener.open(request)) as rsp:
232 with contextlib.closing(urlopener.open(request)) as rsp:
232 body = rsp.read()
233 body = rsp.read()
233 ui.debug(b'Conduit Response: %s\n' % body)
234 ui.debug(b'Conduit Response: %s\n' % body)
234 parsed = pycompat.rapply(
235 parsed = pycompat.rapply(
235 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
236 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
236 else x,
237 else x,
237 json.loads(body)
238 json.loads(body)
238 )
239 )
239 if parsed.get(b'error_code'):
240 if parsed.get(b'error_code'):
240 msg = (_(b'Conduit Error (%s): %s')
241 msg = (_(b'Conduit Error (%s): %s')
241 % (parsed[b'error_code'], parsed[b'error_info']))
242 % (parsed[b'error_code'], parsed[b'error_info']))
242 raise error.Abort(msg)
243 raise error.Abort(msg)
243 return parsed[b'result']
244 return parsed[b'result']
244
245
245 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
246 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
246 def debugcallconduit(ui, repo, name):
247 def debugcallconduit(ui, repo, name):
247 """call Conduit API
248 """call Conduit API
248
249
249 Call parameters are read from stdin as a JSON blob. Result will be written
250 Call parameters are read from stdin as a JSON blob. Result will be written
250 to stdout as a JSON blob.
251 to stdout as a JSON blob.
251 """
252 """
252 # json.loads only accepts bytes from 3.6+
253 # json.loads only accepts bytes from 3.6+
253 rawparams = encoding.unifromlocal(ui.fin.read())
254 rawparams = encoding.unifromlocal(ui.fin.read())
254 # json.loads only returns unicode strings
255 # json.loads only returns unicode strings
255 params = pycompat.rapply(lambda x:
256 params = pycompat.rapply(lambda x:
256 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
257 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
257 json.loads(rawparams)
258 json.loads(rawparams)
258 )
259 )
259 # json.dumps only accepts unicode strings
260 # json.dumps only accepts unicode strings
260 result = pycompat.rapply(lambda x:
261 result = pycompat.rapply(lambda x:
261 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
262 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
262 callconduit(ui, name, params)
263 callconduit(ui, name, params)
263 )
264 )
264 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
265 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
265 ui.write(b'%s\n' % encoding.unitolocal(s))
266 ui.write(b'%s\n' % encoding.unitolocal(s))
266
267
267 def getrepophid(repo):
268 def getrepophid(repo):
268 """given callsign, return repository PHID or None"""
269 """given callsign, return repository PHID or None"""
269 # developer config: phabricator.repophid
270 # developer config: phabricator.repophid
270 repophid = repo.ui.config(b'phabricator', b'repophid')
271 repophid = repo.ui.config(b'phabricator', b'repophid')
271 if repophid:
272 if repophid:
272 return repophid
273 return repophid
273 callsign = repo.ui.config(b'phabricator', b'callsign')
274 callsign = repo.ui.config(b'phabricator', b'callsign')
274 if not callsign:
275 if not callsign:
275 return None
276 return None
276 query = callconduit(repo.ui, b'diffusion.repository.search',
277 query = callconduit(repo.ui, b'diffusion.repository.search',
277 {b'constraints': {b'callsigns': [callsign]}})
278 {b'constraints': {b'callsigns': [callsign]}})
278 if len(query[b'data']) == 0:
279 if len(query[b'data']) == 0:
279 return None
280 return None
280 repophid = query[b'data'][0][b'phid']
281 repophid = query[b'data'][0][b'phid']
281 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
282 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
282 return repophid
283 return repophid
283
284
284 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
285 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
285 _differentialrevisiondescre = re.compile(
286 _differentialrevisiondescre = re.compile(
286 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
287 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
287
288
288 def getoldnodedrevmap(repo, nodelist):
289 def getoldnodedrevmap(repo, nodelist):
289 """find previous nodes that has been sent to Phabricator
290 """find previous nodes that has been sent to Phabricator
290
291
291 return {node: (oldnode, Differential diff, Differential Revision ID)}
292 return {node: (oldnode, Differential diff, Differential Revision ID)}
292 for node in nodelist with known previous sent versions, or associated
293 for node in nodelist with known previous sent versions, or associated
293 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
294 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
294 be ``None``.
295 be ``None``.
295
296
296 Examines commit messages like "Differential Revision:" to get the
297 Examines commit messages like "Differential Revision:" to get the
297 association information.
298 association information.
298
299
299 If such commit message line is not found, examines all precursors and their
300 If such commit message line is not found, examines all precursors and their
300 tags. Tags with format like "D1234" are considered a match and the node
301 tags. Tags with format like "D1234" are considered a match and the node
301 with that tag, and the number after "D" (ex. 1234) will be returned.
302 with that tag, and the number after "D" (ex. 1234) will be returned.
302
303
303 The ``old node``, if not None, is guaranteed to be the last diff of
304 The ``old node``, if not None, is guaranteed to be the last diff of
304 corresponding Differential Revision, and exist in the repo.
305 corresponding Differential Revision, and exist in the repo.
305 """
306 """
306 unfi = repo.unfiltered()
307 unfi = repo.unfiltered()
307 nodemap = unfi.changelog.nodemap
308 nodemap = unfi.changelog.nodemap
308
309
309 result = {} # {node: (oldnode?, lastdiff?, drev)}
310 result = {} # {node: (oldnode?, lastdiff?, drev)}
310 toconfirm = {} # {node: (force, {precnode}, drev)}
311 toconfirm = {} # {node: (force, {precnode}, drev)}
311 for node in nodelist:
312 for node in nodelist:
312 ctx = unfi[node]
313 ctx = unfi[node]
313 # For tags like "D123", put them into "toconfirm" to verify later
314 # For tags like "D123", put them into "toconfirm" to verify later
314 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
315 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
315 for n in precnodes:
316 for n in precnodes:
316 if n in nodemap:
317 if n in nodemap:
317 for tag in unfi.nodetags(n):
318 for tag in unfi.nodetags(n):
318 m = _differentialrevisiontagre.match(tag)
319 m = _differentialrevisiontagre.match(tag)
319 if m:
320 if m:
320 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
321 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
321 continue
322 continue
322
323
323 # Check commit message
324 # Check commit message
324 m = _differentialrevisiondescre.search(ctx.description())
325 m = _differentialrevisiondescre.search(ctx.description())
325 if m:
326 if m:
326 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
327 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
327
328
328 # Double check if tags are genuine by collecting all old nodes from
329 # Double check if tags are genuine by collecting all old nodes from
329 # Phabricator, and expect precursors overlap with it.
330 # Phabricator, and expect precursors overlap with it.
330 if toconfirm:
331 if toconfirm:
331 drevs = [drev for force, precs, drev in toconfirm.values()]
332 drevs = [drev for force, precs, drev in toconfirm.values()]
332 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
333 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
333 {b'revisionIDs': drevs})
334 {b'revisionIDs': drevs})
334 getnode = lambda d: bin(
335 getnode = lambda d: bin(
335 getdiffmeta(d).get(b'node', b'')) or None
336 getdiffmeta(d).get(b'node', b'')) or None
336 for newnode, (force, precset, drev) in toconfirm.items():
337 for newnode, (force, precset, drev) in toconfirm.items():
337 diffs = [d for d in alldiffs.values()
338 diffs = [d for d in alldiffs.values()
338 if int(d[b'revisionID']) == drev]
339 if int(d[b'revisionID']) == drev]
339
340
340 # "precursors" as known by Phabricator
341 # "precursors" as known by Phabricator
341 phprecset = set(getnode(d) for d in diffs)
342 phprecset = set(getnode(d) for d in diffs)
342
343
343 # Ignore if precursors (Phabricator and local repo) do not overlap,
344 # Ignore if precursors (Phabricator and local repo) do not overlap,
344 # and force is not set (when commit message says nothing)
345 # and force is not set (when commit message says nothing)
345 if not force and not bool(phprecset & precset):
346 if not force and not bool(phprecset & precset):
346 tagname = b'D%d' % drev
347 tagname = b'D%d' % drev
347 tags.tag(repo, tagname, nullid, message=None, user=None,
348 tags.tag(repo, tagname, nullid, message=None, user=None,
348 date=None, local=True)
349 date=None, local=True)
349 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
350 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
350 b'Differential history\n') % drev)
351 b'Differential history\n') % drev)
351 continue
352 continue
352
353
353 # Find the last node using Phabricator metadata, and make sure it
354 # Find the last node using Phabricator metadata, and make sure it
354 # exists in the repo
355 # exists in the repo
355 oldnode = lastdiff = None
356 oldnode = lastdiff = None
356 if diffs:
357 if diffs:
357 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
358 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
358 oldnode = getnode(lastdiff)
359 oldnode = getnode(lastdiff)
359 if oldnode and oldnode not in nodemap:
360 if oldnode and oldnode not in nodemap:
360 oldnode = None
361 oldnode = None
361
362
362 result[newnode] = (oldnode, lastdiff, drev)
363 result[newnode] = (oldnode, lastdiff, drev)
363
364
364 return result
365 return result
365
366
366 def getdiff(ctx, diffopts):
367 def getdiff(ctx, diffopts):
367 """plain-text diff without header (user, commit message, etc)"""
368 """plain-text diff without header (user, commit message, etc)"""
368 output = util.stringio()
369 output = util.stringio()
369 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
370 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
370 None, opts=diffopts):
371 None, opts=diffopts):
371 output.write(chunk)
372 output.write(chunk)
372 return output.getvalue()
373 return output.getvalue()
373
374
374 def creatediff(ctx):
375 def creatediff(ctx):
375 """create a Differential Diff"""
376 """create a Differential Diff"""
376 repo = ctx.repo()
377 repo = ctx.repo()
377 repophid = getrepophid(repo)
378 repophid = getrepophid(repo)
378 # Create a "Differential Diff" via "differential.createrawdiff" API
379 # Create a "Differential Diff" via "differential.createrawdiff" API
379 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
380 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
380 if repophid:
381 if repophid:
381 params[b'repositoryPHID'] = repophid
382 params[b'repositoryPHID'] = repophid
382 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
383 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
383 if not diff:
384 if not diff:
384 raise error.Abort(_(b'cannot create diff for %s') % ctx)
385 raise error.Abort(_(b'cannot create diff for %s') % ctx)
385 return diff
386 return diff
386
387
387 def writediffproperties(ctx, diff):
388 def writediffproperties(ctx, diff):
388 """write metadata to diff so patches could be applied losslessly"""
389 """write metadata to diff so patches could be applied losslessly"""
389 params = {
390 params = {
390 b'diff_id': diff[b'id'],
391 b'diff_id': diff[b'id'],
391 b'name': b'hg:meta',
392 b'name': b'hg:meta',
392 b'data': templatefilters.json({
393 b'data': templatefilters.json({
393 b'user': ctx.user(),
394 b'user': ctx.user(),
394 b'date': b'%d %d' % ctx.date(),
395 b'date': b'%d %d' % ctx.date(),
395 b'branch': ctx.branch(),
396 b'branch': ctx.branch(),
396 b'node': ctx.hex(),
397 b'node': ctx.hex(),
397 b'parent': ctx.p1().hex(),
398 b'parent': ctx.p1().hex(),
398 }),
399 }),
399 }
400 }
400 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
401 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
401
402
402 params = {
403 params = {
403 b'diff_id': diff[b'id'],
404 b'diff_id': diff[b'id'],
404 b'name': b'local:commits',
405 b'name': b'local:commits',
405 b'data': templatefilters.json({
406 b'data': templatefilters.json({
406 ctx.hex(): {
407 ctx.hex(): {
407 b'author': stringutil.person(ctx.user()),
408 b'author': stringutil.person(ctx.user()),
408 b'authorEmail': stringutil.email(ctx.user()),
409 b'authorEmail': stringutil.email(ctx.user()),
409 b'time': int(ctx.date()[0]),
410 b'time': int(ctx.date()[0]),
410 b'commit': ctx.hex(),
411 b'commit': ctx.hex(),
411 b'parents': [ctx.p1().hex()],
412 b'parents': [ctx.p1().hex()],
412 b'branch': ctx.branch(),
413 b'branch': ctx.branch(),
413 },
414 },
414 }),
415 }),
415 }
416 }
416 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
417 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
417
418
418 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
419 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
419 olddiff=None, actions=None, comment=None):
420 olddiff=None, actions=None, comment=None):
420 """create or update a Differential Revision
421 """create or update a Differential Revision
421
422
422 If revid is None, create a new Differential Revision, otherwise update
423 If revid is None, create a new Differential Revision, otherwise update
423 revid. If parentrevid is not None, set it as a dependency.
424 revid. If parentrevid is not None, set it as a dependency.
424
425
425 If oldnode is not None, check if the patch content (without commit message
426 If oldnode is not None, check if the patch content (without commit message
426 and metadata) has changed before creating another diff.
427 and metadata) has changed before creating another diff.
427
428
428 If actions is not None, they will be appended to the transaction.
429 If actions is not None, they will be appended to the transaction.
429 """
430 """
430 repo = ctx.repo()
431 repo = ctx.repo()
431 if oldnode:
432 if oldnode:
432 diffopts = mdiff.diffopts(git=True, context=32767)
433 diffopts = mdiff.diffopts(git=True, context=32767)
433 oldctx = repo.unfiltered()[oldnode]
434 oldctx = repo.unfiltered()[oldnode]
434 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
435 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
435 else:
436 else:
436 neednewdiff = True
437 neednewdiff = True
437
438
438 transactions = []
439 transactions = []
439 if neednewdiff:
440 if neednewdiff:
440 diff = creatediff(ctx)
441 diff = creatediff(ctx)
441 transactions.append({b'type': b'update', b'value': diff[b'phid']})
442 transactions.append({b'type': b'update', b'value': diff[b'phid']})
442 if comment:
443 if comment:
443 transactions.append({b'type': b'comment', b'value': comment})
444 transactions.append({b'type': b'comment', b'value': comment})
444 else:
445 else:
445 # Even if we don't need to upload a new diff because the patch content
446 # Even if we don't need to upload a new diff because the patch content
446 # does not change. We might still need to update its metadata so
447 # does not change. We might still need to update its metadata so
447 # pushers could know the correct node metadata.
448 # pushers could know the correct node metadata.
448 assert olddiff
449 assert olddiff
449 diff = olddiff
450 diff = olddiff
450 writediffproperties(ctx, diff)
451 writediffproperties(ctx, diff)
451
452
452 # Use a temporary summary to set dependency. There might be better ways but
453 # Use a temporary summary to set dependency. There might be better ways but
453 # I cannot find them for now. But do not do that if we are updating an
454 # I cannot find them for now. But do not do that if we are updating an
454 # existing revision (revid is not None) since that introduces visible
455 # existing revision (revid is not None) since that introduces visible
455 # churns (someone edited "Summary" twice) on the web page.
456 # churns (someone edited "Summary" twice) on the web page.
456 if parentrevid and revid is None:
457 if parentrevid and revid is None:
457 summary = b'Depends on D%d' % parentrevid
458 summary = b'Depends on D%d' % parentrevid
458 transactions += [{b'type': b'summary', b'value': summary},
459 transactions += [{b'type': b'summary', b'value': summary},
459 {b'type': b'summary', b'value': b' '}]
460 {b'type': b'summary', b'value': b' '}]
460
461
461 if actions:
462 if actions:
462 transactions += actions
463 transactions += actions
463
464
464 # Parse commit message and update related fields.
465 # Parse commit message and update related fields.
465 desc = ctx.description()
466 desc = ctx.description()
466 info = callconduit(repo.ui, b'differential.parsecommitmessage',
467 info = callconduit(repo.ui, b'differential.parsecommitmessage',
467 {b'corpus': desc})
468 {b'corpus': desc})
468 for k, v in info[b'fields'].items():
469 for k, v in info[b'fields'].items():
469 if k in [b'title', b'summary', b'testPlan']:
470 if k in [b'title', b'summary', b'testPlan']:
470 transactions.append({b'type': k, b'value': v})
471 transactions.append({b'type': k, b'value': v})
471
472
472 params = {b'transactions': transactions}
473 params = {b'transactions': transactions}
473 if revid is not None:
474 if revid is not None:
474 # Update an existing Differential Revision
475 # Update an existing Differential Revision
475 params[b'objectIdentifier'] = revid
476 params[b'objectIdentifier'] = revid
476
477
477 revision = callconduit(repo.ui, b'differential.revision.edit', params)
478 revision = callconduit(repo.ui, b'differential.revision.edit', params)
478 if not revision:
479 if not revision:
479 raise error.Abort(_(b'cannot create revision for %s') % ctx)
480 raise error.Abort(_(b'cannot create revision for %s') % ctx)
480
481
481 return revision, diff
482 return revision, diff
482
483
483 def userphids(repo, names):
484 def userphids(repo, names):
484 """convert user names to PHIDs"""
485 """convert user names to PHIDs"""
485 names = [name.lower() for name in names]
486 names = [name.lower() for name in names]
486 query = {b'constraints': {b'usernames': names}}
487 query = {b'constraints': {b'usernames': names}}
487 result = callconduit(repo.ui, b'user.search', query)
488 result = callconduit(repo.ui, b'user.search', query)
488 # username not found is not an error of the API. So check if we have missed
489 # username not found is not an error of the API. So check if we have missed
489 # some names here.
490 # some names here.
490 data = result[b'data']
491 data = result[b'data']
491 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
492 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
492 unresolved = set(names) - resolved
493 unresolved = set(names) - resolved
493 if unresolved:
494 if unresolved:
494 raise error.Abort(_(b'unknown username: %s')
495 raise error.Abort(_(b'unknown username: %s')
495 % b' '.join(sorted(unresolved)))
496 % b' '.join(sorted(unresolved)))
496 return [entry[b'phid'] for entry in data]
497 return [entry[b'phid'] for entry in data]
497
498
498 @vcrcommand(b'phabsend',
499 @vcrcommand(b'phabsend',
499 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
500 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
500 (b'', b'amend', True, _(b'update commit messages')),
501 (b'', b'amend', True, _(b'update commit messages')),
501 (b'', b'reviewer', [], _(b'specify reviewers')),
502 (b'', b'reviewer', [], _(b'specify reviewers')),
502 (b'm', b'comment', b'',
503 (b'm', b'comment', b'',
503 _(b'add a comment to Revisions with new/updated Diffs')),
504 _(b'add a comment to Revisions with new/updated Diffs')),
504 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
505 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
505 _(b'REV [OPTIONS]'),
506 _(b'REV [OPTIONS]'),
506 helpcategory=command.CATEGORY_IMPORT_EXPORT)
507 helpcategory=command.CATEGORY_IMPORT_EXPORT)
507 def phabsend(ui, repo, *revs, **opts):
508 def phabsend(ui, repo, *revs, **opts):
508 """upload changesets to Phabricator
509 """upload changesets to Phabricator
509
510
510 If there are multiple revisions specified, they will be send as a stack
511 If there are multiple revisions specified, they will be send as a stack
511 with a linear dependencies relationship using the order specified by the
512 with a linear dependencies relationship using the order specified by the
512 revset.
513 revset.
513
514
514 For the first time uploading changesets, local tags will be created to
515 For the first time uploading changesets, local tags will be created to
515 maintain the association. After the first time, phabsend will check
516 maintain the association. After the first time, phabsend will check
516 obsstore and tags information so it can figure out whether to update an
517 obsstore and tags information so it can figure out whether to update an
517 existing Differential Revision, or create a new one.
518 existing Differential Revision, or create a new one.
518
519
519 If --amend is set, update commit messages so they have the
520 If --amend is set, update commit messages so they have the
520 ``Differential Revision`` URL, remove related tags. This is similar to what
521 ``Differential Revision`` URL, remove related tags. This is similar to what
521 arcanist will do, and is more desired in author-push workflows. Otherwise,
522 arcanist will do, and is more desired in author-push workflows. Otherwise,
522 use local tags to record the ``Differential Revision`` association.
523 use local tags to record the ``Differential Revision`` association.
523
524
524 The --confirm option lets you confirm changesets before sending them. You
525 The --confirm option lets you confirm changesets before sending them. You
525 can also add following to your configuration file to make it default
526 can also add following to your configuration file to make it default
526 behaviour::
527 behaviour::
527
528
528 [phabsend]
529 [phabsend]
529 confirm = true
530 confirm = true
530
531
531 phabsend will check obsstore and the above association to decide whether to
532 phabsend will check obsstore and the above association to decide whether to
532 update an existing Differential Revision, or create a new one.
533 update an existing Differential Revision, or create a new one.
533 """
534 """
534 opts = pycompat.byteskwargs(opts)
535 opts = pycompat.byteskwargs(opts)
535 revs = list(revs) + opts.get(b'rev', [])
536 revs = list(revs) + opts.get(b'rev', [])
536 revs = scmutil.revrange(repo, revs)
537 revs = scmutil.revrange(repo, revs)
537
538
538 if not revs:
539 if not revs:
539 raise error.Abort(_(b'phabsend requires at least one changeset'))
540 raise error.Abort(_(b'phabsend requires at least one changeset'))
540 if opts.get(b'amend'):
541 if opts.get(b'amend'):
541 cmdutil.checkunfinished(repo)
542 cmdutil.checkunfinished(repo)
542
543
543 # {newnode: (oldnode, olddiff, olddrev}
544 # {newnode: (oldnode, olddiff, olddrev}
544 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
545 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
545
546
546 confirm = ui.configbool(b'phabsend', b'confirm')
547 confirm = ui.configbool(b'phabsend', b'confirm')
547 confirm |= bool(opts.get(b'confirm'))
548 confirm |= bool(opts.get(b'confirm'))
548 if confirm:
549 if confirm:
549 confirmed = _confirmbeforesend(repo, revs, oldmap)
550 confirmed = _confirmbeforesend(repo, revs, oldmap)
550 if not confirmed:
551 if not confirmed:
551 raise error.Abort(_(b'phabsend cancelled'))
552 raise error.Abort(_(b'phabsend cancelled'))
552
553
553 actions = []
554 actions = []
554 reviewers = opts.get(b'reviewer', [])
555 reviewers = opts.get(b'reviewer', [])
555 if reviewers:
556 if reviewers:
556 phids = userphids(repo, reviewers)
557 phids = userphids(repo, reviewers)
557 actions.append({b'type': b'reviewers.add', b'value': phids})
558 actions.append({b'type': b'reviewers.add', b'value': phids})
558
559
559 drevids = [] # [int]
560 drevids = [] # [int]
560 diffmap = {} # {newnode: diff}
561 diffmap = {} # {newnode: diff}
561
562
562 # Send patches one by one so we know their Differential Revision IDs and
563 # Send patches one by one so we know their Differential Revision IDs and
563 # can provide dependency relationship
564 # can provide dependency relationship
564 lastrevid = None
565 lastrevid = None
565 for rev in revs:
566 for rev in revs:
566 ui.debug(b'sending rev %d\n' % rev)
567 ui.debug(b'sending rev %d\n' % rev)
567 ctx = repo[rev]
568 ctx = repo[rev]
568
569
569 # Get Differential Revision ID
570 # Get Differential Revision ID
570 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
571 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
571 if oldnode != ctx.node() or opts.get(b'amend'):
572 if oldnode != ctx.node() or opts.get(b'amend'):
572 # Create or update Differential Revision
573 # Create or update Differential Revision
573 revision, diff = createdifferentialrevision(
574 revision, diff = createdifferentialrevision(
574 ctx, revid, lastrevid, oldnode, olddiff, actions,
575 ctx, revid, lastrevid, oldnode, olddiff, actions,
575 opts.get(b'comment'))
576 opts.get(b'comment'))
576 diffmap[ctx.node()] = diff
577 diffmap[ctx.node()] = diff
577 newrevid = int(revision[b'object'][b'id'])
578 newrevid = int(revision[b'object'][b'id'])
578 if revid:
579 if revid:
579 action = b'updated'
580 action = b'updated'
580 else:
581 else:
581 action = b'created'
582 action = b'created'
582
583
583 # Create a local tag to note the association, if commit message
584 # Create a local tag to note the association, if commit message
584 # does not have it already
585 # does not have it already
585 m = _differentialrevisiondescre.search(ctx.description())
586 m = _differentialrevisiondescre.search(ctx.description())
586 if not m or int(m.group(r'id')) != newrevid:
587 if not m or int(m.group(r'id')) != newrevid:
587 tagname = b'D%d' % newrevid
588 tagname = b'D%d' % newrevid
588 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
589 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
589 date=None, local=True)
590 date=None, local=True)
590 else:
591 else:
591 # Nothing changed. But still set "newrevid" so the next revision
592 # Nothing changed. But still set "newrevid" so the next revision
592 # could depend on this one.
593 # could depend on this one.
593 newrevid = revid
594 newrevid = revid
594 action = b'skipped'
595 action = b'skipped'
595
596
596 actiondesc = ui.label(
597 actiondesc = ui.label(
597 {b'created': _(b'created'),
598 {b'created': _(b'created'),
598 b'skipped': _(b'skipped'),
599 b'skipped': _(b'skipped'),
599 b'updated': _(b'updated')}[action],
600 b'updated': _(b'updated')}[action],
600 b'phabricator.action.%s' % action)
601 b'phabricator.action.%s' % action)
601 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
602 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
602 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
603 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
603 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
604 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
604 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
605 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
605 desc))
606 desc))
606 drevids.append(newrevid)
607 drevids.append(newrevid)
607 lastrevid = newrevid
608 lastrevid = newrevid
608
609
609 # Update commit messages and remove tags
610 # Update commit messages and remove tags
610 if opts.get(b'amend'):
611 if opts.get(b'amend'):
611 unfi = repo.unfiltered()
612 unfi = repo.unfiltered()
612 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
613 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
613 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
614 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
614 wnode = unfi[b'.'].node()
615 wnode = unfi[b'.'].node()
615 mapping = {} # {oldnode: [newnode]}
616 mapping = {} # {oldnode: [newnode]}
616 for i, rev in enumerate(revs):
617 for i, rev in enumerate(revs):
617 old = unfi[rev]
618 old = unfi[rev]
618 drevid = drevids[i]
619 drevid = drevids[i]
619 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
620 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
620 newdesc = getdescfromdrev(drev)
621 newdesc = getdescfromdrev(drev)
621 # Make sure commit message contain "Differential Revision"
622 # Make sure commit message contain "Differential Revision"
622 if old.description() != newdesc:
623 if old.description() != newdesc:
623 if old.phase() == phases.public:
624 if old.phase() == phases.public:
624 ui.warn(_("warning: not updating public commit %s\n")
625 ui.warn(_("warning: not updating public commit %s\n")
625 % scmutil.formatchangeid(old))
626 % scmutil.formatchangeid(old))
626 continue
627 continue
627 parents = [
628 parents = [
628 mapping.get(old.p1().node(), (old.p1(),))[0],
629 mapping.get(old.p1().node(), (old.p1(),))[0],
629 mapping.get(old.p2().node(), (old.p2(),))[0],
630 mapping.get(old.p2().node(), (old.p2(),))[0],
630 ]
631 ]
631 new = context.metadataonlyctx(
632 new = context.metadataonlyctx(
632 repo, old, parents=parents, text=newdesc,
633 repo, old, parents=parents, text=newdesc,
633 user=old.user(), date=old.date(), extra=old.extra())
634 user=old.user(), date=old.date(), extra=old.extra())
634
635
635 newnode = new.commit()
636 newnode = new.commit()
636
637
637 mapping[old.node()] = [newnode]
638 mapping[old.node()] = [newnode]
638 # Update diff property
639 # Update diff property
639 writediffproperties(unfi[newnode], diffmap[old.node()])
640 writediffproperties(unfi[newnode], diffmap[old.node()])
640 # Remove local tags since it's no longer necessary
641 # Remove local tags since it's no longer necessary
641 tagname = b'D%d' % drevid
642 tagname = b'D%d' % drevid
642 if tagname in repo.tags():
643 if tagname in repo.tags():
643 tags.tag(repo, tagname, nullid, message=None, user=None,
644 tags.tag(repo, tagname, nullid, message=None, user=None,
644 date=None, local=True)
645 date=None, local=True)
645 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
646 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
646 if wnode in mapping:
647 if wnode in mapping:
647 unfi.setparents(mapping[wnode][0])
648 unfi.setparents(mapping[wnode][0])
648
649
649 # Map from "hg:meta" keys to header understood by "hg import". The order is
650 # Map from "hg:meta" keys to header understood by "hg import". The order is
650 # consistent with "hg export" output.
651 # consistent with "hg export" output.
651 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
652 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
652 (b'branch', b'Branch'), (b'node', b'Node ID'),
653 (b'branch', b'Branch'), (b'node', b'Node ID'),
653 (b'parent', b'Parent ')])
654 (b'parent', b'Parent ')])
654
655
655 def _confirmbeforesend(repo, revs, oldmap):
656 def _confirmbeforesend(repo, revs, oldmap):
656 url, token = readurltoken(repo.ui)
657 url, token = readurltoken(repo.ui)
657 ui = repo.ui
658 ui = repo.ui
658 for rev in revs:
659 for rev in revs:
659 ctx = repo[rev]
660 ctx = repo[rev]
660 desc = ctx.description().splitlines()[0]
661 desc = ctx.description().splitlines()[0]
661 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
662 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
662 if drevid:
663 if drevid:
663 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
664 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
664 else:
665 else:
665 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
666 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
666
667
667 ui.write(_(b'%s - %s: %s\n')
668 ui.write(_(b'%s - %s: %s\n')
668 % (drevdesc,
669 % (drevdesc,
669 ui.label(bytes(ctx), b'phabricator.node'),
670 ui.label(bytes(ctx), b'phabricator.node'),
670 ui.label(desc, b'phabricator.desc')))
671 ui.label(desc, b'phabricator.desc')))
671
672
672 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
673 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
673 b'$$ &Yes $$ &No') % url):
674 b'$$ &Yes $$ &No') % url):
674 return False
675 return False
675
676
676 return True
677 return True
677
678
678 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
679 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
679 b'abandoned'}
680 b'abandoned'}
680
681
681 def _getstatusname(drev):
682 def _getstatusname(drev):
682 """get normalized status name from a Differential Revision"""
683 """get normalized status name from a Differential Revision"""
683 return drev[b'statusName'].replace(b' ', b'').lower()
684 return drev[b'statusName'].replace(b' ', b'').lower()
684
685
685 # Small language to specify differential revisions. Support symbols: (), :X,
686 # Small language to specify differential revisions. Support symbols: (), :X,
686 # +, and -.
687 # +, and -.
687
688
688 _elements = {
689 _elements = {
689 # token-type: binding-strength, primary, prefix, infix, suffix
690 # token-type: binding-strength, primary, prefix, infix, suffix
690 b'(': (12, None, (b'group', 1, b')'), None, None),
691 b'(': (12, None, (b'group', 1, b')'), None, None),
691 b':': (8, None, (b'ancestors', 8), None, None),
692 b':': (8, None, (b'ancestors', 8), None, None),
692 b'&': (5, None, None, (b'and_', 5), None),
693 b'&': (5, None, None, (b'and_', 5), None),
693 b'+': (4, None, None, (b'add', 4), None),
694 b'+': (4, None, None, (b'add', 4), None),
694 b'-': (4, None, None, (b'sub', 4), None),
695 b'-': (4, None, None, (b'sub', 4), None),
695 b')': (0, None, None, None, None),
696 b')': (0, None, None, None, None),
696 b'symbol': (0, b'symbol', None, None, None),
697 b'symbol': (0, b'symbol', None, None, None),
697 b'end': (0, None, None, None, None),
698 b'end': (0, None, None, None, None),
698 }
699 }
699
700
700 def _tokenize(text):
701 def _tokenize(text):
701 view = memoryview(text) # zero-copy slice
702 view = memoryview(text) # zero-copy slice
702 special = b'():+-& '
703 special = b'():+-& '
703 pos = 0
704 pos = 0
704 length = len(text)
705 length = len(text)
705 while pos < length:
706 while pos < length:
706 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
707 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
707 pycompat.iterbytestr(view[pos:])))
708 pycompat.iterbytestr(view[pos:])))
708 if symbol:
709 if symbol:
709 yield (b'symbol', symbol, pos)
710 yield (b'symbol', symbol, pos)
710 pos += len(symbol)
711 pos += len(symbol)
711 else: # special char, ignore space
712 else: # special char, ignore space
712 if text[pos] != b' ':
713 if text[pos] != b' ':
713 yield (text[pos], None, pos)
714 yield (text[pos], None, pos)
714 pos += 1
715 pos += 1
715 yield (b'end', None, pos)
716 yield (b'end', None, pos)
716
717
717 def _parse(text):
718 def _parse(text):
718 tree, pos = parser.parser(_elements).parse(_tokenize(text))
719 tree, pos = parser.parser(_elements).parse(_tokenize(text))
719 if pos != len(text):
720 if pos != len(text):
720 raise error.ParseError(b'invalid token', pos)
721 raise error.ParseError(b'invalid token', pos)
721 return tree
722 return tree
722
723
723 def _parsedrev(symbol):
724 def _parsedrev(symbol):
724 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
725 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
725 if symbol.startswith(b'D') and symbol[1:].isdigit():
726 if symbol.startswith(b'D') and symbol[1:].isdigit():
726 return int(symbol[1:])
727 return int(symbol[1:])
727 if symbol.isdigit():
728 if symbol.isdigit():
728 return int(symbol)
729 return int(symbol)
729
730
730 def _prefetchdrevs(tree):
731 def _prefetchdrevs(tree):
731 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
732 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
732 drevs = set()
733 drevs = set()
733 ancestordrevs = set()
734 ancestordrevs = set()
734 op = tree[0]
735 op = tree[0]
735 if op == b'symbol':
736 if op == b'symbol':
736 r = _parsedrev(tree[1])
737 r = _parsedrev(tree[1])
737 if r:
738 if r:
738 drevs.add(r)
739 drevs.add(r)
739 elif op == b'ancestors':
740 elif op == b'ancestors':
740 r, a = _prefetchdrevs(tree[1])
741 r, a = _prefetchdrevs(tree[1])
741 drevs.update(r)
742 drevs.update(r)
742 ancestordrevs.update(r)
743 ancestordrevs.update(r)
743 ancestordrevs.update(a)
744 ancestordrevs.update(a)
744 else:
745 else:
745 for t in tree[1:]:
746 for t in tree[1:]:
746 r, a = _prefetchdrevs(t)
747 r, a = _prefetchdrevs(t)
747 drevs.update(r)
748 drevs.update(r)
748 ancestordrevs.update(a)
749 ancestordrevs.update(a)
749 return drevs, ancestordrevs
750 return drevs, ancestordrevs
750
751
751 def querydrev(repo, spec):
752 def querydrev(repo, spec):
752 """return a list of "Differential Revision" dicts
753 """return a list of "Differential Revision" dicts
753
754
754 spec is a string using a simple query language, see docstring in phabread
755 spec is a string using a simple query language, see docstring in phabread
755 for details.
756 for details.
756
757
757 A "Differential Revision dict" looks like:
758 A "Differential Revision dict" looks like:
758
759
759 {
760 {
760 "id": "2",
761 "id": "2",
761 "phid": "PHID-DREV-672qvysjcczopag46qty",
762 "phid": "PHID-DREV-672qvysjcczopag46qty",
762 "title": "example",
763 "title": "example",
763 "uri": "https://phab.example.com/D2",
764 "uri": "https://phab.example.com/D2",
764 "dateCreated": "1499181406",
765 "dateCreated": "1499181406",
765 "dateModified": "1499182103",
766 "dateModified": "1499182103",
766 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
767 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
767 "status": "0",
768 "status": "0",
768 "statusName": "Needs Review",
769 "statusName": "Needs Review",
769 "properties": [],
770 "properties": [],
770 "branch": null,
771 "branch": null,
771 "summary": "",
772 "summary": "",
772 "testPlan": "",
773 "testPlan": "",
773 "lineCount": "2",
774 "lineCount": "2",
774 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
775 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
775 "diffs": [
776 "diffs": [
776 "3",
777 "3",
777 "4",
778 "4",
778 ],
779 ],
779 "commits": [],
780 "commits": [],
780 "reviewers": [],
781 "reviewers": [],
781 "ccs": [],
782 "ccs": [],
782 "hashes": [],
783 "hashes": [],
783 "auxiliary": {
784 "auxiliary": {
784 "phabricator:projects": [],
785 "phabricator:projects": [],
785 "phabricator:depends-on": [
786 "phabricator:depends-on": [
786 "PHID-DREV-gbapp366kutjebt7agcd"
787 "PHID-DREV-gbapp366kutjebt7agcd"
787 ]
788 ]
788 },
789 },
789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
790 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
790 "sourcePath": null
791 "sourcePath": null
791 }
792 }
792 """
793 """
793 def fetch(params):
794 def fetch(params):
794 """params -> single drev or None"""
795 """params -> single drev or None"""
795 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
796 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
796 if key in prefetched:
797 if key in prefetched:
797 return prefetched[key]
798 return prefetched[key]
798 drevs = callconduit(repo.ui, b'differential.query', params)
799 drevs = callconduit(repo.ui, b'differential.query', params)
799 # Fill prefetched with the result
800 # Fill prefetched with the result
800 for drev in drevs:
801 for drev in drevs:
801 prefetched[drev[b'phid']] = drev
802 prefetched[drev[b'phid']] = drev
802 prefetched[int(drev[b'id'])] = drev
803 prefetched[int(drev[b'id'])] = drev
803 if key not in prefetched:
804 if key not in prefetched:
804 raise error.Abort(_(b'cannot get Differential Revision %r')
805 raise error.Abort(_(b'cannot get Differential Revision %r')
805 % params)
806 % params)
806 return prefetched[key]
807 return prefetched[key]
807
808
808 def getstack(topdrevids):
809 def getstack(topdrevids):
809 """given a top, get a stack from the bottom, [id] -> [id]"""
810 """given a top, get a stack from the bottom, [id] -> [id]"""
810 visited = set()
811 visited = set()
811 result = []
812 result = []
812 queue = [{b'ids': [i]} for i in topdrevids]
813 queue = [{b'ids': [i]} for i in topdrevids]
813 while queue:
814 while queue:
814 params = queue.pop()
815 params = queue.pop()
815 drev = fetch(params)
816 drev = fetch(params)
816 if drev[b'id'] in visited:
817 if drev[b'id'] in visited:
817 continue
818 continue
818 visited.add(drev[b'id'])
819 visited.add(drev[b'id'])
819 result.append(int(drev[b'id']))
820 result.append(int(drev[b'id']))
820 auxiliary = drev.get(b'auxiliary', {})
821 auxiliary = drev.get(b'auxiliary', {})
821 depends = auxiliary.get(b'phabricator:depends-on', [])
822 depends = auxiliary.get(b'phabricator:depends-on', [])
822 for phid in depends:
823 for phid in depends:
823 queue.append({b'phids': [phid]})
824 queue.append({b'phids': [phid]})
824 result.reverse()
825 result.reverse()
825 return smartset.baseset(result)
826 return smartset.baseset(result)
826
827
827 # Initialize prefetch cache
828 # Initialize prefetch cache
828 prefetched = {} # {id or phid: drev}
829 prefetched = {} # {id or phid: drev}
829
830
830 tree = _parse(spec)
831 tree = _parse(spec)
831 drevs, ancestordrevs = _prefetchdrevs(tree)
832 drevs, ancestordrevs = _prefetchdrevs(tree)
832
833
833 # developer config: phabricator.batchsize
834 # developer config: phabricator.batchsize
834 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
835 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
835
836
836 # Prefetch Differential Revisions in batch
837 # Prefetch Differential Revisions in batch
837 tofetch = set(drevs)
838 tofetch = set(drevs)
838 for r in ancestordrevs:
839 for r in ancestordrevs:
839 tofetch.update(range(max(1, r - batchsize), r + 1))
840 tofetch.update(range(max(1, r - batchsize), r + 1))
840 if drevs:
841 if drevs:
841 fetch({b'ids': list(tofetch)})
842 fetch({b'ids': list(tofetch)})
842 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
843 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
843
844
844 # Walk through the tree, return smartsets
845 # Walk through the tree, return smartsets
845 def walk(tree):
846 def walk(tree):
846 op = tree[0]
847 op = tree[0]
847 if op == b'symbol':
848 if op == b'symbol':
848 drev = _parsedrev(tree[1])
849 drev = _parsedrev(tree[1])
849 if drev:
850 if drev:
850 return smartset.baseset([drev])
851 return smartset.baseset([drev])
851 elif tree[1] in _knownstatusnames:
852 elif tree[1] in _knownstatusnames:
852 drevs = [r for r in validids
853 drevs = [r for r in validids
853 if _getstatusname(prefetched[r]) == tree[1]]
854 if _getstatusname(prefetched[r]) == tree[1]]
854 return smartset.baseset(drevs)
855 return smartset.baseset(drevs)
855 else:
856 else:
856 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
857 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
857 elif op in {b'and_', b'add', b'sub'}:
858 elif op in {b'and_', b'add', b'sub'}:
858 assert len(tree) == 3
859 assert len(tree) == 3
859 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
860 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
860 elif op == b'group':
861 elif op == b'group':
861 return walk(tree[1])
862 return walk(tree[1])
862 elif op == b'ancestors':
863 elif op == b'ancestors':
863 return getstack(walk(tree[1]))
864 return getstack(walk(tree[1]))
864 else:
865 else:
865 raise error.ProgrammingError(b'illegal tree: %r' % tree)
866 raise error.ProgrammingError(b'illegal tree: %r' % tree)
866
867
867 return [prefetched[r] for r in walk(tree)]
868 return [prefetched[r] for r in walk(tree)]
868
869
869 def getdescfromdrev(drev):
870 def getdescfromdrev(drev):
870 """get description (commit message) from "Differential Revision"
871 """get description (commit message) from "Differential Revision"
871
872
872 This is similar to differential.getcommitmessage API. But we only care
873 This is similar to differential.getcommitmessage API. But we only care
873 about limited fields: title, summary, test plan, and URL.
874 about limited fields: title, summary, test plan, and URL.
874 """
875 """
875 title = drev[b'title']
876 title = drev[b'title']
876 summary = drev[b'summary'].rstrip()
877 summary = drev[b'summary'].rstrip()
877 testplan = drev[b'testPlan'].rstrip()
878 testplan = drev[b'testPlan'].rstrip()
878 if testplan:
879 if testplan:
879 testplan = b'Test Plan:\n%s' % testplan
880 testplan = b'Test Plan:\n%s' % testplan
880 uri = b'Differential Revision: %s' % drev[b'uri']
881 uri = b'Differential Revision: %s' % drev[b'uri']
881 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
882 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
882
883
883 def getdiffmeta(diff):
884 def getdiffmeta(diff):
884 """get commit metadata (date, node, user, p1) from a diff object
885 """get commit metadata (date, node, user, p1) from a diff object
885
886
886 The metadata could be "hg:meta", sent by phabsend, like:
887 The metadata could be "hg:meta", sent by phabsend, like:
887
888
888 "properties": {
889 "properties": {
889 "hg:meta": {
890 "hg:meta": {
890 "date": "1499571514 25200",
891 "date": "1499571514 25200",
891 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "user": "Foo Bar <foo@example.com>",
893 "user": "Foo Bar <foo@example.com>",
893 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
894 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
894 }
895 }
895 }
896 }
896
897
897 Or converted from "local:commits", sent by "arc", like:
898 Or converted from "local:commits", sent by "arc", like:
898
899
899 "properties": {
900 "properties": {
900 "local:commits": {
901 "local:commits": {
901 "98c08acae292b2faf60a279b4189beb6cff1414d": {
902 "98c08acae292b2faf60a279b4189beb6cff1414d": {
902 "author": "Foo Bar",
903 "author": "Foo Bar",
903 "time": 1499546314,
904 "time": 1499546314,
904 "branch": "default",
905 "branch": "default",
905 "tag": "",
906 "tag": "",
906 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
907 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
907 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
908 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
908 "local": "1000",
909 "local": "1000",
909 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
910 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
910 "summary": "...",
911 "summary": "...",
911 "message": "...",
912 "message": "...",
912 "authorEmail": "foo@example.com"
913 "authorEmail": "foo@example.com"
913 }
914 }
914 }
915 }
915 }
916 }
916
917
917 Note: metadata extracted from "local:commits" will lose time zone
918 Note: metadata extracted from "local:commits" will lose time zone
918 information.
919 information.
919 """
920 """
920 props = diff.get(b'properties') or {}
921 props = diff.get(b'properties') or {}
921 meta = props.get(b'hg:meta')
922 meta = props.get(b'hg:meta')
922 if not meta:
923 if not meta:
923 if props.get(b'local:commits'):
924 if props.get(b'local:commits'):
924 commit = sorted(props[b'local:commits'].values())[0]
925 commit = sorted(props[b'local:commits'].values())[0]
925 meta = {}
926 meta = {}
926 if b'author' in commit and b'authorEmail' in commit:
927 if b'author' in commit and b'authorEmail' in commit:
927 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
928 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
928 commit[b'authorEmail'])
929 commit[b'authorEmail'])
929 if b'time' in commit:
930 if b'time' in commit:
930 meta[b'date'] = b'%d 0' % commit[b'time']
931 meta[b'date'] = b'%d 0' % commit[b'time']
931 if b'branch' in commit:
932 if b'branch' in commit:
932 meta[b'branch'] = commit[b'branch']
933 meta[b'branch'] = commit[b'branch']
933 node = commit.get(b'commit', commit.get(b'rev'))
934 node = commit.get(b'commit', commit.get(b'rev'))
934 if node:
935 if node:
935 meta[b'node'] = node
936 meta[b'node'] = node
936 if len(commit.get(b'parents', ())) >= 1:
937 if len(commit.get(b'parents', ())) >= 1:
937 meta[b'parent'] = commit[b'parents'][0]
938 meta[b'parent'] = commit[b'parents'][0]
938 else:
939 else:
939 meta = {}
940 meta = {}
940 if b'date' not in meta and b'dateCreated' in diff:
941 if b'date' not in meta and b'dateCreated' in diff:
941 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
942 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
942 if b'branch' not in meta and diff.get(b'branch'):
943 if b'branch' not in meta and diff.get(b'branch'):
943 meta[b'branch'] = diff[b'branch']
944 meta[b'branch'] = diff[b'branch']
944 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
945 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
945 meta[b'parent'] = diff[b'sourceControlBaseRevision']
946 meta[b'parent'] = diff[b'sourceControlBaseRevision']
946 return meta
947 return meta
947
948
948 def readpatch(repo, drevs, write):
949 def readpatch(repo, drevs, write):
949 """generate plain-text patch readable by 'hg import'
950 """generate plain-text patch readable by 'hg import'
950
951
951 write is usually ui.write. drevs is what "querydrev" returns, results of
952 write is usually ui.write. drevs is what "querydrev" returns, results of
952 "differential.query".
953 "differential.query".
953 """
954 """
954 # Prefetch hg:meta property for all diffs
955 # Prefetch hg:meta property for all diffs
955 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
956 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
956 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
957 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
957
958
958 # Generate patch for each drev
959 # Generate patch for each drev
959 for drev in drevs:
960 for drev in drevs:
960 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
961 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
961
962
962 diffid = max(int(v) for v in drev[b'diffs'])
963 diffid = max(int(v) for v in drev[b'diffs'])
963 body = callconduit(repo.ui, b'differential.getrawdiff',
964 body = callconduit(repo.ui, b'differential.getrawdiff',
964 {b'diffID': diffid})
965 {b'diffID': diffid})
965 desc = getdescfromdrev(drev)
966 desc = getdescfromdrev(drev)
966 header = b'# HG changeset patch\n'
967 header = b'# HG changeset patch\n'
967
968
968 # Try to preserve metadata from hg:meta property. Write hg patch
969 # Try to preserve metadata from hg:meta property. Write hg patch
969 # headers that can be read by the "import" command. See patchheadermap
970 # headers that can be read by the "import" command. See patchheadermap
970 # and extract in mercurial/patch.py for supported headers.
971 # and extract in mercurial/patch.py for supported headers.
971 meta = getdiffmeta(diffs[b'%d' % diffid])
972 meta = getdiffmeta(diffs[b'%d' % diffid])
972 for k in _metanamemap.keys():
973 for k in _metanamemap.keys():
973 if k in meta:
974 if k in meta:
974 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
975 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
975
976
976 content = b'%s%s\n%s' % (header, desc, body)
977 content = b'%s%s\n%s' % (header, desc, body)
977 write(content)
978 write(content)
978
979
979 @vcrcommand(b'phabread',
980 @vcrcommand(b'phabread',
980 [(b'', b'stack', False, _(b'read dependencies'))],
981 [(b'', b'stack', False, _(b'read dependencies'))],
981 _(b'DREVSPEC [OPTIONS]'),
982 _(b'DREVSPEC [OPTIONS]'),
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
983 helpcategory=command.CATEGORY_IMPORT_EXPORT)
983 def phabread(ui, repo, spec, **opts):
984 def phabread(ui, repo, spec, **opts):
984 """print patches from Phabricator suitable for importing
985 """print patches from Phabricator suitable for importing
985
986
986 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
987 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
987 the number ``123``. It could also have common operators like ``+``, ``-``,
988 the number ``123``. It could also have common operators like ``+``, ``-``,
988 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
989 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
989 select a stack.
990 select a stack.
990
991
991 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
992 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
992 could be used to filter patches by status. For performance reason, they
993 could be used to filter patches by status. For performance reason, they
993 only represent a subset of non-status selections and cannot be used alone.
994 only represent a subset of non-status selections and cannot be used alone.
994
995
995 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
996 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
996 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
997 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
997 stack up to D9.
998 stack up to D9.
998
999
999 If --stack is given, follow dependencies information and read all patches.
1000 If --stack is given, follow dependencies information and read all patches.
1000 It is equivalent to the ``:`` operator.
1001 It is equivalent to the ``:`` operator.
1001 """
1002 """
1002 opts = pycompat.byteskwargs(opts)
1003 opts = pycompat.byteskwargs(opts)
1003 if opts.get(b'stack'):
1004 if opts.get(b'stack'):
1004 spec = b':(%s)' % spec
1005 spec = b':(%s)' % spec
1005 drevs = querydrev(repo, spec)
1006 drevs = querydrev(repo, spec)
1006 readpatch(repo, drevs, ui.write)
1007 readpatch(repo, drevs, ui.write)
1007
1008
1008 @vcrcommand(b'phabupdate',
1009 @vcrcommand(b'phabupdate',
1009 [(b'', b'accept', False, _(b'accept revisions')),
1010 [(b'', b'accept', False, _(b'accept revisions')),
1010 (b'', b'reject', False, _(b'reject revisions')),
1011 (b'', b'reject', False, _(b'reject revisions')),
1011 (b'', b'abandon', False, _(b'abandon revisions')),
1012 (b'', b'abandon', False, _(b'abandon revisions')),
1012 (b'', b'reclaim', False, _(b'reclaim revisions')),
1013 (b'', b'reclaim', False, _(b'reclaim revisions')),
1013 (b'm', b'comment', b'', _(b'comment on the last revision')),
1014 (b'm', b'comment', b'', _(b'comment on the last revision')),
1014 ], _(b'DREVSPEC [OPTIONS]'),
1015 ], _(b'DREVSPEC [OPTIONS]'),
1015 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1016 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1016 def phabupdate(ui, repo, spec, **opts):
1017 def phabupdate(ui, repo, spec, **opts):
1017 """update Differential Revision in batch
1018 """update Differential Revision in batch
1018
1019
1019 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1020 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1020 """
1021 """
1021 opts = pycompat.byteskwargs(opts)
1022 opts = pycompat.byteskwargs(opts)
1022 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1023 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1023 if len(flags) > 1:
1024 if len(flags) > 1:
1024 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1025 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1025
1026
1026 actions = []
1027 actions = []
1027 for f in flags:
1028 for f in flags:
1028 actions.append({b'type': f, b'value': b'true'})
1029 actions.append({b'type': f, b'value': b'true'})
1029
1030
1030 drevs = querydrev(repo, spec)
1031 drevs = querydrev(repo, spec)
1031 for i, drev in enumerate(drevs):
1032 for i, drev in enumerate(drevs):
1032 if i + 1 == len(drevs) and opts.get(b'comment'):
1033 if i + 1 == len(drevs) and opts.get(b'comment'):
1033 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1034 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1034 if actions:
1035 if actions:
1035 params = {b'objectIdentifier': drev[b'phid'],
1036 params = {b'objectIdentifier': drev[b'phid'],
1036 b'transactions': actions}
1037 b'transactions': actions}
1037 callconduit(ui, b'differential.revision.edit', params)
1038 callconduit(ui, b'differential.revision.edit', params)
1038
1039
1039 templatekeyword = registrar.templatekeyword()
1040 templatekeyword = registrar.templatekeyword()
1040
1041
1041 @templatekeyword(b'phabreview', requires={b'ctx'})
1042 @templatekeyword(b'phabreview', requires={b'ctx'})
1042 def template_review(context, mapping):
1043 def template_review(context, mapping):
1043 """:phabreview: Object describing the review for this changeset.
1044 """:phabreview: Object describing the review for this changeset.
1044 Has attributes `url` and `id`.
1045 Has attributes `url` and `id`.
1045 """
1046 """
1046 ctx = context.resource(mapping, b'ctx')
1047 ctx = context.resource(mapping, b'ctx')
1047 m = _differentialrevisiondescre.search(ctx.description())
1048 m = _differentialrevisiondescre.search(ctx.description())
1048 if m:
1049 if m:
1049 return templateutil.hybriddict({
1050 return templateutil.hybriddict({
1050 b'url': m.group(r'url'),
1051 b'url': m.group(r'url'),
1051 b'id': b"D%s" % m.group(r'id'),
1052 b'id': b"D%s" % m.group(r'id'),
1052 })
1053 })
1053 else:
1054 else:
1054 tags = ctx.repo().nodetags(ctx.node())
1055 tags = ctx.repo().nodetags(ctx.node())
1055 for t in tags:
1056 for t in tags:
1056 if _differentialrevisiontagre.match(t):
1057 if _differentialrevisiontagre.match(t):
1057 url = ctx.repo().ui.config(b'phabricator', b'url')
1058 url = ctx.repo().ui.config(b'phabricator', b'url')
1058 if not url.endswith(b'/'):
1059 if not url.endswith(b'/'):
1059 url += b'/'
1060 url += b'/'
1060 url += t
1061 url += t
1061
1062
1062 return templateutil.hybriddict({
1063 return templateutil.hybriddict({
1063 b'url': url,
1064 b'url': url,
1064 b'id': t,
1065 b'id': t,
1065 })
1066 })
1066 return None
1067 return None
General Comments 0
You need to be logged in to leave comments. Login now