##// END OF EJS Templates
phabricator: pass ui into readurltoken instead of passing repo...
Pulkit Goyal -
r42626:500b64c5 default
parent child Browse files
Show More
@@ -1,1066 +1,1066 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def hgmatcher(r1, r2):
130 def hgmatcher(r1, r2):
131 if r1.uri != r2.uri or r1.method != r2.method:
131 if r1.uri != r2.uri or r1.method != r2.method:
132 return False
132 return False
133 r1params = r1.body.split(b'&')
133 r1params = r1.body.split(b'&')
134 r2params = r2.body.split(b'&')
134 r2params = r2.body.split(b'&')
135 return set(r1params) == set(r2params)
135 return set(r1params) == set(r2params)
136
136
137 def decorate(fn):
137 def decorate(fn):
138 def inner(*args, **kwargs):
138 def inner(*args, **kwargs):
139 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
139 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
140 if cassette:
140 if cassette:
141 import hgdemandimport
141 import hgdemandimport
142 with hgdemandimport.deactivated():
142 with hgdemandimport.deactivated():
143 import vcr as vcrmod
143 import vcr as vcrmod
144 import vcr.stubs as stubs
144 import vcr.stubs as stubs
145 vcr = vcrmod.VCR(
145 vcr = vcrmod.VCR(
146 serializer=r'json',
146 serializer=r'json',
147 custom_patches=[
147 custom_patches=[
148 (urlmod, r'httpconnection',
148 (urlmod, r'httpconnection',
149 stubs.VCRHTTPConnection),
149 stubs.VCRHTTPConnection),
150 (urlmod, r'httpsconnection',
150 (urlmod, r'httpsconnection',
151 stubs.VCRHTTPSConnection),
151 stubs.VCRHTTPSConnection),
152 ])
152 ])
153 vcr.register_matcher(r'hgmatcher', hgmatcher)
153 vcr.register_matcher(r'hgmatcher', hgmatcher)
154 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
154 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
155 return fn(*args, **kwargs)
155 return fn(*args, **kwargs)
156 return fn(*args, **kwargs)
156 return fn(*args, **kwargs)
157 inner.__name__ = fn.__name__
157 inner.__name__ = fn.__name__
158 inner.__doc__ = fn.__doc__
158 inner.__doc__ = fn.__doc__
159 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
159 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
160 return decorate
160 return decorate
161
161
162 def urlencodenested(params):
162 def urlencodenested(params):
163 """like urlencode, but works with nested parameters.
163 """like urlencode, but works with nested parameters.
164
164
165 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
165 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
166 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
166 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
167 urlencode. Note: the encoding is consistent with PHP's http_build_query.
167 urlencode. Note: the encoding is consistent with PHP's http_build_query.
168 """
168 """
169 flatparams = util.sortdict()
169 flatparams = util.sortdict()
170 def process(prefix, obj):
170 def process(prefix, obj):
171 if isinstance(obj, bool):
171 if isinstance(obj, bool):
172 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
172 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
173 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
173 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
174 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
174 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
175 if items is None:
175 if items is None:
176 flatparams[prefix] = obj
176 flatparams[prefix] = obj
177 else:
177 else:
178 for k, v in items(obj):
178 for k, v in items(obj):
179 if prefix:
179 if prefix:
180 process(b'%s[%s]' % (prefix, k), v)
180 process(b'%s[%s]' % (prefix, k), v)
181 else:
181 else:
182 process(k, v)
182 process(k, v)
183 process(b'', params)
183 process(b'', params)
184 return util.urlreq.urlencode(flatparams)
184 return util.urlreq.urlencode(flatparams)
185
185
186 def readurltoken(repo):
186 def readurltoken(ui):
187 """return conduit url, token and make sure they exist
187 """return conduit url, token and make sure they exist
188
188
189 Currently read from [auth] config section. In the future, it might
189 Currently read from [auth] config section. In the future, it might
190 make sense to read from .arcconfig and .arcrc as well.
190 make sense to read from .arcconfig and .arcrc as well.
191 """
191 """
192 url = repo.ui.config(b'phabricator', b'url')
192 url = ui.config(b'phabricator', b'url')
193 if not url:
193 if not url:
194 raise error.Abort(_(b'config %s.%s is required')
194 raise error.Abort(_(b'config %s.%s is required')
195 % (b'phabricator', b'url'))
195 % (b'phabricator', b'url'))
196
196
197 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
197 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
198 token = None
198 token = None
199
199
200 if res:
200 if res:
201 group, auth = res
201 group, auth = res
202
202
203 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
203 ui.debug(b"using auth.%s.* for authentication\n" % group)
204
204
205 token = auth.get(b'phabtoken')
205 token = auth.get(b'phabtoken')
206
206
207 if not token:
207 if not token:
208 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
208 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
209 % (url,))
209 % (url,))
210
210
211 return url, token
211 return url, token
212
212
213 def callconduit(repo, name, params):
213 def callconduit(repo, name, params):
214 """call Conduit API, params is a dict. return json.loads result, or None"""
214 """call Conduit API, params is a dict. return json.loads result, or None"""
215 host, token = readurltoken(repo)
215 host, token = readurltoken(repo.ui)
216 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
216 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
217 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
217 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
218 params = params.copy()
218 params = params.copy()
219 params[b'api.token'] = token
219 params[b'api.token'] = token
220 data = urlencodenested(params)
220 data = urlencodenested(params)
221 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
221 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
222 if curlcmd:
222 if curlcmd:
223 sin, sout = procutil.popen2(b'%s -d @- %s'
223 sin, sout = procutil.popen2(b'%s -d @- %s'
224 % (curlcmd, procutil.shellquote(url)))
224 % (curlcmd, procutil.shellquote(url)))
225 sin.write(data)
225 sin.write(data)
226 sin.close()
226 sin.close()
227 body = sout.read()
227 body = sout.read()
228 else:
228 else:
229 urlopener = urlmod.opener(repo.ui, authinfo)
229 urlopener = urlmod.opener(repo.ui, authinfo)
230 request = util.urlreq.request(pycompat.strurl(url), data=data)
230 request = util.urlreq.request(pycompat.strurl(url), data=data)
231 with contextlib.closing(urlopener.open(request)) as rsp:
231 with contextlib.closing(urlopener.open(request)) as rsp:
232 body = rsp.read()
232 body = rsp.read()
233 repo.ui.debug(b'Conduit Response: %s\n' % body)
233 repo.ui.debug(b'Conduit Response: %s\n' % body)
234 parsed = pycompat.rapply(
234 parsed = pycompat.rapply(
235 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
235 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
236 else x,
236 else x,
237 json.loads(body)
237 json.loads(body)
238 )
238 )
239 if parsed.get(b'error_code'):
239 if parsed.get(b'error_code'):
240 msg = (_(b'Conduit Error (%s): %s')
240 msg = (_(b'Conduit Error (%s): %s')
241 % (parsed[b'error_code'], parsed[b'error_info']))
241 % (parsed[b'error_code'], parsed[b'error_info']))
242 raise error.Abort(msg)
242 raise error.Abort(msg)
243 return parsed[b'result']
243 return parsed[b'result']
244
244
245 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
245 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
246 def debugcallconduit(ui, repo, name):
246 def debugcallconduit(ui, repo, name):
247 """call Conduit API
247 """call Conduit API
248
248
249 Call parameters are read from stdin as a JSON blob. Result will be written
249 Call parameters are read from stdin as a JSON blob. Result will be written
250 to stdout as a JSON blob.
250 to stdout as a JSON blob.
251 """
251 """
252 # json.loads only accepts bytes from 3.6+
252 # json.loads only accepts bytes from 3.6+
253 rawparams = encoding.unifromlocal(ui.fin.read())
253 rawparams = encoding.unifromlocal(ui.fin.read())
254 # json.loads only returns unicode strings
254 # json.loads only returns unicode strings
255 params = pycompat.rapply(lambda x:
255 params = pycompat.rapply(lambda x:
256 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
256 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
257 json.loads(rawparams)
257 json.loads(rawparams)
258 )
258 )
259 # json.dumps only accepts unicode strings
259 # json.dumps only accepts unicode strings
260 result = pycompat.rapply(lambda x:
260 result = pycompat.rapply(lambda x:
261 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
261 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
262 callconduit(repo, name, params)
262 callconduit(repo, name, params)
263 )
263 )
264 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
264 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
265 ui.write(b'%s\n' % encoding.unitolocal(s))
265 ui.write(b'%s\n' % encoding.unitolocal(s))
266
266
267 def getrepophid(repo):
267 def getrepophid(repo):
268 """given callsign, return repository PHID or None"""
268 """given callsign, return repository PHID or None"""
269 # developer config: phabricator.repophid
269 # developer config: phabricator.repophid
270 repophid = repo.ui.config(b'phabricator', b'repophid')
270 repophid = repo.ui.config(b'phabricator', b'repophid')
271 if repophid:
271 if repophid:
272 return repophid
272 return repophid
273 callsign = repo.ui.config(b'phabricator', b'callsign')
273 callsign = repo.ui.config(b'phabricator', b'callsign')
274 if not callsign:
274 if not callsign:
275 return None
275 return None
276 query = callconduit(repo, b'diffusion.repository.search',
276 query = callconduit(repo, b'diffusion.repository.search',
277 {b'constraints': {b'callsigns': [callsign]}})
277 {b'constraints': {b'callsigns': [callsign]}})
278 if len(query[b'data']) == 0:
278 if len(query[b'data']) == 0:
279 return None
279 return None
280 repophid = query[b'data'][0][b'phid']
280 repophid = query[b'data'][0][b'phid']
281 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
281 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
282 return repophid
282 return repophid
283
283
284 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
284 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
285 _differentialrevisiondescre = re.compile(
285 _differentialrevisiondescre = re.compile(
286 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
286 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
287
287
288 def getoldnodedrevmap(repo, nodelist):
288 def getoldnodedrevmap(repo, nodelist):
289 """find previous nodes that has been sent to Phabricator
289 """find previous nodes that has been sent to Phabricator
290
290
291 return {node: (oldnode, Differential diff, Differential Revision ID)}
291 return {node: (oldnode, Differential diff, Differential Revision ID)}
292 for node in nodelist with known previous sent versions, or associated
292 for node in nodelist with known previous sent versions, or associated
293 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
293 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
294 be ``None``.
294 be ``None``.
295
295
296 Examines commit messages like "Differential Revision:" to get the
296 Examines commit messages like "Differential Revision:" to get the
297 association information.
297 association information.
298
298
299 If such commit message line is not found, examines all precursors and their
299 If such commit message line is not found, examines all precursors and their
300 tags. Tags with format like "D1234" are considered a match and the node
300 tags. Tags with format like "D1234" are considered a match and the node
301 with that tag, and the number after "D" (ex. 1234) will be returned.
301 with that tag, and the number after "D" (ex. 1234) will be returned.
302
302
303 The ``old node``, if not None, is guaranteed to be the last diff of
303 The ``old node``, if not None, is guaranteed to be the last diff of
304 corresponding Differential Revision, and exist in the repo.
304 corresponding Differential Revision, and exist in the repo.
305 """
305 """
306 unfi = repo.unfiltered()
306 unfi = repo.unfiltered()
307 nodemap = unfi.changelog.nodemap
307 nodemap = unfi.changelog.nodemap
308
308
309 result = {} # {node: (oldnode?, lastdiff?, drev)}
309 result = {} # {node: (oldnode?, lastdiff?, drev)}
310 toconfirm = {} # {node: (force, {precnode}, drev)}
310 toconfirm = {} # {node: (force, {precnode}, drev)}
311 for node in nodelist:
311 for node in nodelist:
312 ctx = unfi[node]
312 ctx = unfi[node]
313 # For tags like "D123", put them into "toconfirm" to verify later
313 # For tags like "D123", put them into "toconfirm" to verify later
314 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
314 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
315 for n in precnodes:
315 for n in precnodes:
316 if n in nodemap:
316 if n in nodemap:
317 for tag in unfi.nodetags(n):
317 for tag in unfi.nodetags(n):
318 m = _differentialrevisiontagre.match(tag)
318 m = _differentialrevisiontagre.match(tag)
319 if m:
319 if m:
320 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
320 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
321 continue
321 continue
322
322
323 # Check commit message
323 # Check commit message
324 m = _differentialrevisiondescre.search(ctx.description())
324 m = _differentialrevisiondescre.search(ctx.description())
325 if m:
325 if m:
326 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
326 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
327
327
328 # Double check if tags are genuine by collecting all old nodes from
328 # Double check if tags are genuine by collecting all old nodes from
329 # Phabricator, and expect precursors overlap with it.
329 # Phabricator, and expect precursors overlap with it.
330 if toconfirm:
330 if toconfirm:
331 drevs = [drev for force, precs, drev in toconfirm.values()]
331 drevs = [drev for force, precs, drev in toconfirm.values()]
332 alldiffs = callconduit(unfi, b'differential.querydiffs',
332 alldiffs = callconduit(unfi, b'differential.querydiffs',
333 {b'revisionIDs': drevs})
333 {b'revisionIDs': drevs})
334 getnode = lambda d: bin(
334 getnode = lambda d: bin(
335 getdiffmeta(d).get(b'node', b'')) or None
335 getdiffmeta(d).get(b'node', b'')) or None
336 for newnode, (force, precset, drev) in toconfirm.items():
336 for newnode, (force, precset, drev) in toconfirm.items():
337 diffs = [d for d in alldiffs.values()
337 diffs = [d for d in alldiffs.values()
338 if int(d[b'revisionID']) == drev]
338 if int(d[b'revisionID']) == drev]
339
339
340 # "precursors" as known by Phabricator
340 # "precursors" as known by Phabricator
341 phprecset = set(getnode(d) for d in diffs)
341 phprecset = set(getnode(d) for d in diffs)
342
342
343 # Ignore if precursors (Phabricator and local repo) do not overlap,
343 # Ignore if precursors (Phabricator and local repo) do not overlap,
344 # and force is not set (when commit message says nothing)
344 # and force is not set (when commit message says nothing)
345 if not force and not bool(phprecset & precset):
345 if not force and not bool(phprecset & precset):
346 tagname = b'D%d' % drev
346 tagname = b'D%d' % drev
347 tags.tag(repo, tagname, nullid, message=None, user=None,
347 tags.tag(repo, tagname, nullid, message=None, user=None,
348 date=None, local=True)
348 date=None, local=True)
349 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
349 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
350 b'Differential history\n') % drev)
350 b'Differential history\n') % drev)
351 continue
351 continue
352
352
353 # Find the last node using Phabricator metadata, and make sure it
353 # Find the last node using Phabricator metadata, and make sure it
354 # exists in the repo
354 # exists in the repo
355 oldnode = lastdiff = None
355 oldnode = lastdiff = None
356 if diffs:
356 if diffs:
357 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
357 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
358 oldnode = getnode(lastdiff)
358 oldnode = getnode(lastdiff)
359 if oldnode and oldnode not in nodemap:
359 if oldnode and oldnode not in nodemap:
360 oldnode = None
360 oldnode = None
361
361
362 result[newnode] = (oldnode, lastdiff, drev)
362 result[newnode] = (oldnode, lastdiff, drev)
363
363
364 return result
364 return result
365
365
366 def getdiff(ctx, diffopts):
366 def getdiff(ctx, diffopts):
367 """plain-text diff without header (user, commit message, etc)"""
367 """plain-text diff without header (user, commit message, etc)"""
368 output = util.stringio()
368 output = util.stringio()
369 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
369 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
370 None, opts=diffopts):
370 None, opts=diffopts):
371 output.write(chunk)
371 output.write(chunk)
372 return output.getvalue()
372 return output.getvalue()
373
373
374 def creatediff(ctx):
374 def creatediff(ctx):
375 """create a Differential Diff"""
375 """create a Differential Diff"""
376 repo = ctx.repo()
376 repo = ctx.repo()
377 repophid = getrepophid(repo)
377 repophid = getrepophid(repo)
378 # Create a "Differential Diff" via "differential.createrawdiff" API
378 # Create a "Differential Diff" via "differential.createrawdiff" API
379 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
379 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
380 if repophid:
380 if repophid:
381 params[b'repositoryPHID'] = repophid
381 params[b'repositoryPHID'] = repophid
382 diff = callconduit(repo, b'differential.createrawdiff', params)
382 diff = callconduit(repo, b'differential.createrawdiff', params)
383 if not diff:
383 if not diff:
384 raise error.Abort(_(b'cannot create diff for %s') % ctx)
384 raise error.Abort(_(b'cannot create diff for %s') % ctx)
385 return diff
385 return diff
386
386
387 def writediffproperties(ctx, diff):
387 def writediffproperties(ctx, diff):
388 """write metadata to diff so patches could be applied losslessly"""
388 """write metadata to diff so patches could be applied losslessly"""
389 params = {
389 params = {
390 b'diff_id': diff[b'id'],
390 b'diff_id': diff[b'id'],
391 b'name': b'hg:meta',
391 b'name': b'hg:meta',
392 b'data': templatefilters.json({
392 b'data': templatefilters.json({
393 b'user': ctx.user(),
393 b'user': ctx.user(),
394 b'date': b'%d %d' % ctx.date(),
394 b'date': b'%d %d' % ctx.date(),
395 b'branch': ctx.branch(),
395 b'branch': ctx.branch(),
396 b'node': ctx.hex(),
396 b'node': ctx.hex(),
397 b'parent': ctx.p1().hex(),
397 b'parent': ctx.p1().hex(),
398 }),
398 }),
399 }
399 }
400 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
400 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
401
401
402 params = {
402 params = {
403 b'diff_id': diff[b'id'],
403 b'diff_id': diff[b'id'],
404 b'name': b'local:commits',
404 b'name': b'local:commits',
405 b'data': templatefilters.json({
405 b'data': templatefilters.json({
406 ctx.hex(): {
406 ctx.hex(): {
407 b'author': stringutil.person(ctx.user()),
407 b'author': stringutil.person(ctx.user()),
408 b'authorEmail': stringutil.email(ctx.user()),
408 b'authorEmail': stringutil.email(ctx.user()),
409 b'time': int(ctx.date()[0]),
409 b'time': int(ctx.date()[0]),
410 b'commit': ctx.hex(),
410 b'commit': ctx.hex(),
411 b'parents': [ctx.p1().hex()],
411 b'parents': [ctx.p1().hex()],
412 b'branch': ctx.branch(),
412 b'branch': ctx.branch(),
413 },
413 },
414 }),
414 }),
415 }
415 }
416 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
416 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
417
417
418 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
418 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
419 olddiff=None, actions=None, comment=None):
419 olddiff=None, actions=None, comment=None):
420 """create or update a Differential Revision
420 """create or update a Differential Revision
421
421
422 If revid is None, create a new Differential Revision, otherwise update
422 If revid is None, create a new Differential Revision, otherwise update
423 revid. If parentrevid is not None, set it as a dependency.
423 revid. If parentrevid is not None, set it as a dependency.
424
424
425 If oldnode is not None, check if the patch content (without commit message
425 If oldnode is not None, check if the patch content (without commit message
426 and metadata) has changed before creating another diff.
426 and metadata) has changed before creating another diff.
427
427
428 If actions is not None, they will be appended to the transaction.
428 If actions is not None, they will be appended to the transaction.
429 """
429 """
430 repo = ctx.repo()
430 repo = ctx.repo()
431 if oldnode:
431 if oldnode:
432 diffopts = mdiff.diffopts(git=True, context=32767)
432 diffopts = mdiff.diffopts(git=True, context=32767)
433 oldctx = repo.unfiltered()[oldnode]
433 oldctx = repo.unfiltered()[oldnode]
434 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
434 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
435 else:
435 else:
436 neednewdiff = True
436 neednewdiff = True
437
437
438 transactions = []
438 transactions = []
439 if neednewdiff:
439 if neednewdiff:
440 diff = creatediff(ctx)
440 diff = creatediff(ctx)
441 transactions.append({b'type': b'update', b'value': diff[b'phid']})
441 transactions.append({b'type': b'update', b'value': diff[b'phid']})
442 if comment:
442 if comment:
443 transactions.append({b'type': b'comment', b'value': comment})
443 transactions.append({b'type': b'comment', b'value': comment})
444 else:
444 else:
445 # Even if we don't need to upload a new diff because the patch content
445 # Even if we don't need to upload a new diff because the patch content
446 # does not change. We might still need to update its metadata so
446 # does not change. We might still need to update its metadata so
447 # pushers could know the correct node metadata.
447 # pushers could know the correct node metadata.
448 assert olddiff
448 assert olddiff
449 diff = olddiff
449 diff = olddiff
450 writediffproperties(ctx, diff)
450 writediffproperties(ctx, diff)
451
451
452 # Use a temporary summary to set dependency. There might be better ways but
452 # Use a temporary summary to set dependency. There might be better ways but
453 # I cannot find them for now. But do not do that if we are updating an
453 # I cannot find them for now. But do not do that if we are updating an
454 # existing revision (revid is not None) since that introduces visible
454 # existing revision (revid is not None) since that introduces visible
455 # churns (someone edited "Summary" twice) on the web page.
455 # churns (someone edited "Summary" twice) on the web page.
456 if parentrevid and revid is None:
456 if parentrevid and revid is None:
457 summary = b'Depends on D%d' % parentrevid
457 summary = b'Depends on D%d' % parentrevid
458 transactions += [{b'type': b'summary', b'value': summary},
458 transactions += [{b'type': b'summary', b'value': summary},
459 {b'type': b'summary', b'value': b' '}]
459 {b'type': b'summary', b'value': b' '}]
460
460
461 if actions:
461 if actions:
462 transactions += actions
462 transactions += actions
463
463
464 # Parse commit message and update related fields.
464 # Parse commit message and update related fields.
465 desc = ctx.description()
465 desc = ctx.description()
466 info = callconduit(repo, b'differential.parsecommitmessage',
466 info = callconduit(repo, b'differential.parsecommitmessage',
467 {b'corpus': desc})
467 {b'corpus': desc})
468 for k, v in info[b'fields'].items():
468 for k, v in info[b'fields'].items():
469 if k in [b'title', b'summary', b'testPlan']:
469 if k in [b'title', b'summary', b'testPlan']:
470 transactions.append({b'type': k, b'value': v})
470 transactions.append({b'type': k, b'value': v})
471
471
472 params = {b'transactions': transactions}
472 params = {b'transactions': transactions}
473 if revid is not None:
473 if revid is not None:
474 # Update an existing Differential Revision
474 # Update an existing Differential Revision
475 params[b'objectIdentifier'] = revid
475 params[b'objectIdentifier'] = revid
476
476
477 revision = callconduit(repo, b'differential.revision.edit', params)
477 revision = callconduit(repo, b'differential.revision.edit', params)
478 if not revision:
478 if not revision:
479 raise error.Abort(_(b'cannot create revision for %s') % ctx)
479 raise error.Abort(_(b'cannot create revision for %s') % ctx)
480
480
481 return revision, diff
481 return revision, diff
482
482
483 def userphids(repo, names):
483 def userphids(repo, names):
484 """convert user names to PHIDs"""
484 """convert user names to PHIDs"""
485 names = [name.lower() for name in names]
485 names = [name.lower() for name in names]
486 query = {b'constraints': {b'usernames': names}}
486 query = {b'constraints': {b'usernames': names}}
487 result = callconduit(repo, b'user.search', query)
487 result = callconduit(repo, b'user.search', query)
488 # username not found is not an error of the API. So check if we have missed
488 # username not found is not an error of the API. So check if we have missed
489 # some names here.
489 # some names here.
490 data = result[b'data']
490 data = result[b'data']
491 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
491 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
492 unresolved = set(names) - resolved
492 unresolved = set(names) - resolved
493 if unresolved:
493 if unresolved:
494 raise error.Abort(_(b'unknown username: %s')
494 raise error.Abort(_(b'unknown username: %s')
495 % b' '.join(sorted(unresolved)))
495 % b' '.join(sorted(unresolved)))
496 return [entry[b'phid'] for entry in data]
496 return [entry[b'phid'] for entry in data]
497
497
498 @vcrcommand(b'phabsend',
498 @vcrcommand(b'phabsend',
499 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
499 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
500 (b'', b'amend', True, _(b'update commit messages')),
500 (b'', b'amend', True, _(b'update commit messages')),
501 (b'', b'reviewer', [], _(b'specify reviewers')),
501 (b'', b'reviewer', [], _(b'specify reviewers')),
502 (b'm', b'comment', b'',
502 (b'm', b'comment', b'',
503 _(b'add a comment to Revisions with new/updated Diffs')),
503 _(b'add a comment to Revisions with new/updated Diffs')),
504 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
504 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
505 _(b'REV [OPTIONS]'),
505 _(b'REV [OPTIONS]'),
506 helpcategory=command.CATEGORY_IMPORT_EXPORT)
506 helpcategory=command.CATEGORY_IMPORT_EXPORT)
507 def phabsend(ui, repo, *revs, **opts):
507 def phabsend(ui, repo, *revs, **opts):
508 """upload changesets to Phabricator
508 """upload changesets to Phabricator
509
509
510 If there are multiple revisions specified, they will be send as a stack
510 If there are multiple revisions specified, they will be send as a stack
511 with a linear dependencies relationship using the order specified by the
511 with a linear dependencies relationship using the order specified by the
512 revset.
512 revset.
513
513
514 For the first time uploading changesets, local tags will be created to
514 For the first time uploading changesets, local tags will be created to
515 maintain the association. After the first time, phabsend will check
515 maintain the association. After the first time, phabsend will check
516 obsstore and tags information so it can figure out whether to update an
516 obsstore and tags information so it can figure out whether to update an
517 existing Differential Revision, or create a new one.
517 existing Differential Revision, or create a new one.
518
518
519 If --amend is set, update commit messages so they have the
519 If --amend is set, update commit messages so they have the
520 ``Differential Revision`` URL, remove related tags. This is similar to what
520 ``Differential Revision`` URL, remove related tags. This is similar to what
521 arcanist will do, and is more desired in author-push workflows. Otherwise,
521 arcanist will do, and is more desired in author-push workflows. Otherwise,
522 use local tags to record the ``Differential Revision`` association.
522 use local tags to record the ``Differential Revision`` association.
523
523
524 The --confirm option lets you confirm changesets before sending them. You
524 The --confirm option lets you confirm changesets before sending them. You
525 can also add following to your configuration file to make it default
525 can also add following to your configuration file to make it default
526 behaviour::
526 behaviour::
527
527
528 [phabsend]
528 [phabsend]
529 confirm = true
529 confirm = true
530
530
531 phabsend will check obsstore and the above association to decide whether to
531 phabsend will check obsstore and the above association to decide whether to
532 update an existing Differential Revision, or create a new one.
532 update an existing Differential Revision, or create a new one.
533 """
533 """
534 opts = pycompat.byteskwargs(opts)
534 opts = pycompat.byteskwargs(opts)
535 revs = list(revs) + opts.get(b'rev', [])
535 revs = list(revs) + opts.get(b'rev', [])
536 revs = scmutil.revrange(repo, revs)
536 revs = scmutil.revrange(repo, revs)
537
537
538 if not revs:
538 if not revs:
539 raise error.Abort(_(b'phabsend requires at least one changeset'))
539 raise error.Abort(_(b'phabsend requires at least one changeset'))
540 if opts.get(b'amend'):
540 if opts.get(b'amend'):
541 cmdutil.checkunfinished(repo)
541 cmdutil.checkunfinished(repo)
542
542
543 # {newnode: (oldnode, olddiff, olddrev}
543 # {newnode: (oldnode, olddiff, olddrev}
544 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
544 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
545
545
546 confirm = ui.configbool(b'phabsend', b'confirm')
546 confirm = ui.configbool(b'phabsend', b'confirm')
547 confirm |= bool(opts.get(b'confirm'))
547 confirm |= bool(opts.get(b'confirm'))
548 if confirm:
548 if confirm:
549 confirmed = _confirmbeforesend(repo, revs, oldmap)
549 confirmed = _confirmbeforesend(repo, revs, oldmap)
550 if not confirmed:
550 if not confirmed:
551 raise error.Abort(_(b'phabsend cancelled'))
551 raise error.Abort(_(b'phabsend cancelled'))
552
552
553 actions = []
553 actions = []
554 reviewers = opts.get(b'reviewer', [])
554 reviewers = opts.get(b'reviewer', [])
555 if reviewers:
555 if reviewers:
556 phids = userphids(repo, reviewers)
556 phids = userphids(repo, reviewers)
557 actions.append({b'type': b'reviewers.add', b'value': phids})
557 actions.append({b'type': b'reviewers.add', b'value': phids})
558
558
559 drevids = [] # [int]
559 drevids = [] # [int]
560 diffmap = {} # {newnode: diff}
560 diffmap = {} # {newnode: diff}
561
561
562 # Send patches one by one so we know their Differential Revision IDs and
562 # Send patches one by one so we know their Differential Revision IDs and
563 # can provide dependency relationship
563 # can provide dependency relationship
564 lastrevid = None
564 lastrevid = None
565 for rev in revs:
565 for rev in revs:
566 ui.debug(b'sending rev %d\n' % rev)
566 ui.debug(b'sending rev %d\n' % rev)
567 ctx = repo[rev]
567 ctx = repo[rev]
568
568
569 # Get Differential Revision ID
569 # Get Differential Revision ID
570 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
570 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
571 if oldnode != ctx.node() or opts.get(b'amend'):
571 if oldnode != ctx.node() or opts.get(b'amend'):
572 # Create or update Differential Revision
572 # Create or update Differential Revision
573 revision, diff = createdifferentialrevision(
573 revision, diff = createdifferentialrevision(
574 ctx, revid, lastrevid, oldnode, olddiff, actions,
574 ctx, revid, lastrevid, oldnode, olddiff, actions,
575 opts.get(b'comment'))
575 opts.get(b'comment'))
576 diffmap[ctx.node()] = diff
576 diffmap[ctx.node()] = diff
577 newrevid = int(revision[b'object'][b'id'])
577 newrevid = int(revision[b'object'][b'id'])
578 if revid:
578 if revid:
579 action = b'updated'
579 action = b'updated'
580 else:
580 else:
581 action = b'created'
581 action = b'created'
582
582
583 # Create a local tag to note the association, if commit message
583 # Create a local tag to note the association, if commit message
584 # does not have it already
584 # does not have it already
585 m = _differentialrevisiondescre.search(ctx.description())
585 m = _differentialrevisiondescre.search(ctx.description())
586 if not m or int(m.group(r'id')) != newrevid:
586 if not m or int(m.group(r'id')) != newrevid:
587 tagname = b'D%d' % newrevid
587 tagname = b'D%d' % newrevid
588 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
588 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
589 date=None, local=True)
589 date=None, local=True)
590 else:
590 else:
591 # Nothing changed. But still set "newrevid" so the next revision
591 # Nothing changed. But still set "newrevid" so the next revision
592 # could depend on this one.
592 # could depend on this one.
593 newrevid = revid
593 newrevid = revid
594 action = b'skipped'
594 action = b'skipped'
595
595
596 actiondesc = ui.label(
596 actiondesc = ui.label(
597 {b'created': _(b'created'),
597 {b'created': _(b'created'),
598 b'skipped': _(b'skipped'),
598 b'skipped': _(b'skipped'),
599 b'updated': _(b'updated')}[action],
599 b'updated': _(b'updated')}[action],
600 b'phabricator.action.%s' % action)
600 b'phabricator.action.%s' % action)
601 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
601 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
602 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
602 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
603 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
603 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
604 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
604 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
605 desc))
605 desc))
606 drevids.append(newrevid)
606 drevids.append(newrevid)
607 lastrevid = newrevid
607 lastrevid = newrevid
608
608
609 # Update commit messages and remove tags
609 # Update commit messages and remove tags
610 if opts.get(b'amend'):
610 if opts.get(b'amend'):
611 unfi = repo.unfiltered()
611 unfi = repo.unfiltered()
612 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
612 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
613 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
613 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
614 wnode = unfi[b'.'].node()
614 wnode = unfi[b'.'].node()
615 mapping = {} # {oldnode: [newnode]}
615 mapping = {} # {oldnode: [newnode]}
616 for i, rev in enumerate(revs):
616 for i, rev in enumerate(revs):
617 old = unfi[rev]
617 old = unfi[rev]
618 drevid = drevids[i]
618 drevid = drevids[i]
619 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
619 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
620 newdesc = getdescfromdrev(drev)
620 newdesc = getdescfromdrev(drev)
621 # Make sure commit message contain "Differential Revision"
621 # Make sure commit message contain "Differential Revision"
622 if old.description() != newdesc:
622 if old.description() != newdesc:
623 if old.phase() == phases.public:
623 if old.phase() == phases.public:
624 ui.warn(_("warning: not updating public commit %s\n")
624 ui.warn(_("warning: not updating public commit %s\n")
625 % scmutil.formatchangeid(old))
625 % scmutil.formatchangeid(old))
626 continue
626 continue
627 parents = [
627 parents = [
628 mapping.get(old.p1().node(), (old.p1(),))[0],
628 mapping.get(old.p1().node(), (old.p1(),))[0],
629 mapping.get(old.p2().node(), (old.p2(),))[0],
629 mapping.get(old.p2().node(), (old.p2(),))[0],
630 ]
630 ]
631 new = context.metadataonlyctx(
631 new = context.metadataonlyctx(
632 repo, old, parents=parents, text=newdesc,
632 repo, old, parents=parents, text=newdesc,
633 user=old.user(), date=old.date(), extra=old.extra())
633 user=old.user(), date=old.date(), extra=old.extra())
634
634
635 newnode = new.commit()
635 newnode = new.commit()
636
636
637 mapping[old.node()] = [newnode]
637 mapping[old.node()] = [newnode]
638 # Update diff property
638 # Update diff property
639 writediffproperties(unfi[newnode], diffmap[old.node()])
639 writediffproperties(unfi[newnode], diffmap[old.node()])
640 # Remove local tags since it's no longer necessary
640 # Remove local tags since it's no longer necessary
641 tagname = b'D%d' % drevid
641 tagname = b'D%d' % drevid
642 if tagname in repo.tags():
642 if tagname in repo.tags():
643 tags.tag(repo, tagname, nullid, message=None, user=None,
643 tags.tag(repo, tagname, nullid, message=None, user=None,
644 date=None, local=True)
644 date=None, local=True)
645 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
645 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
646 if wnode in mapping:
646 if wnode in mapping:
647 unfi.setparents(mapping[wnode][0])
647 unfi.setparents(mapping[wnode][0])
648
648
649 # Map from "hg:meta" keys to header understood by "hg import". The order is
649 # Map from "hg:meta" keys to header understood by "hg import". The order is
650 # consistent with "hg export" output.
650 # consistent with "hg export" output.
651 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
651 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
652 (b'branch', b'Branch'), (b'node', b'Node ID'),
652 (b'branch', b'Branch'), (b'node', b'Node ID'),
653 (b'parent', b'Parent ')])
653 (b'parent', b'Parent ')])
654
654
655 def _confirmbeforesend(repo, revs, oldmap):
655 def _confirmbeforesend(repo, revs, oldmap):
656 url, token = readurltoken(repo)
656 url, token = readurltoken(repo.ui)
657 ui = repo.ui
657 ui = repo.ui
658 for rev in revs:
658 for rev in revs:
659 ctx = repo[rev]
659 ctx = repo[rev]
660 desc = ctx.description().splitlines()[0]
660 desc = ctx.description().splitlines()[0]
661 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
661 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
662 if drevid:
662 if drevid:
663 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
663 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
664 else:
664 else:
665 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
665 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
666
666
667 ui.write(_(b'%s - %s: %s\n')
667 ui.write(_(b'%s - %s: %s\n')
668 % (drevdesc,
668 % (drevdesc,
669 ui.label(bytes(ctx), b'phabricator.node'),
669 ui.label(bytes(ctx), b'phabricator.node'),
670 ui.label(desc, b'phabricator.desc')))
670 ui.label(desc, b'phabricator.desc')))
671
671
672 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
672 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
673 b'$$ &Yes $$ &No') % url):
673 b'$$ &Yes $$ &No') % url):
674 return False
674 return False
675
675
676 return True
676 return True
677
677
678 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
678 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
679 b'abandoned'}
679 b'abandoned'}
680
680
681 def _getstatusname(drev):
681 def _getstatusname(drev):
682 """get normalized status name from a Differential Revision"""
682 """get normalized status name from a Differential Revision"""
683 return drev[b'statusName'].replace(b' ', b'').lower()
683 return drev[b'statusName'].replace(b' ', b'').lower()
684
684
685 # Small language to specify differential revisions. Support symbols: (), :X,
685 # Small language to specify differential revisions. Support symbols: (), :X,
686 # +, and -.
686 # +, and -.
687
687
688 _elements = {
688 _elements = {
689 # token-type: binding-strength, primary, prefix, infix, suffix
689 # token-type: binding-strength, primary, prefix, infix, suffix
690 b'(': (12, None, (b'group', 1, b')'), None, None),
690 b'(': (12, None, (b'group', 1, b')'), None, None),
691 b':': (8, None, (b'ancestors', 8), None, None),
691 b':': (8, None, (b'ancestors', 8), None, None),
692 b'&': (5, None, None, (b'and_', 5), None),
692 b'&': (5, None, None, (b'and_', 5), None),
693 b'+': (4, None, None, (b'add', 4), None),
693 b'+': (4, None, None, (b'add', 4), None),
694 b'-': (4, None, None, (b'sub', 4), None),
694 b'-': (4, None, None, (b'sub', 4), None),
695 b')': (0, None, None, None, None),
695 b')': (0, None, None, None, None),
696 b'symbol': (0, b'symbol', None, None, None),
696 b'symbol': (0, b'symbol', None, None, None),
697 b'end': (0, None, None, None, None),
697 b'end': (0, None, None, None, None),
698 }
698 }
699
699
700 def _tokenize(text):
700 def _tokenize(text):
701 view = memoryview(text) # zero-copy slice
701 view = memoryview(text) # zero-copy slice
702 special = b'():+-& '
702 special = b'():+-& '
703 pos = 0
703 pos = 0
704 length = len(text)
704 length = len(text)
705 while pos < length:
705 while pos < length:
706 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
706 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
707 pycompat.iterbytestr(view[pos:])))
707 pycompat.iterbytestr(view[pos:])))
708 if symbol:
708 if symbol:
709 yield (b'symbol', symbol, pos)
709 yield (b'symbol', symbol, pos)
710 pos += len(symbol)
710 pos += len(symbol)
711 else: # special char, ignore space
711 else: # special char, ignore space
712 if text[pos] != b' ':
712 if text[pos] != b' ':
713 yield (text[pos], None, pos)
713 yield (text[pos], None, pos)
714 pos += 1
714 pos += 1
715 yield (b'end', None, pos)
715 yield (b'end', None, pos)
716
716
717 def _parse(text):
717 def _parse(text):
718 tree, pos = parser.parser(_elements).parse(_tokenize(text))
718 tree, pos = parser.parser(_elements).parse(_tokenize(text))
719 if pos != len(text):
719 if pos != len(text):
720 raise error.ParseError(b'invalid token', pos)
720 raise error.ParseError(b'invalid token', pos)
721 return tree
721 return tree
722
722
723 def _parsedrev(symbol):
723 def _parsedrev(symbol):
724 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
724 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
725 if symbol.startswith(b'D') and symbol[1:].isdigit():
725 if symbol.startswith(b'D') and symbol[1:].isdigit():
726 return int(symbol[1:])
726 return int(symbol[1:])
727 if symbol.isdigit():
727 if symbol.isdigit():
728 return int(symbol)
728 return int(symbol)
729
729
730 def _prefetchdrevs(tree):
730 def _prefetchdrevs(tree):
731 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
731 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
732 drevs = set()
732 drevs = set()
733 ancestordrevs = set()
733 ancestordrevs = set()
734 op = tree[0]
734 op = tree[0]
735 if op == b'symbol':
735 if op == b'symbol':
736 r = _parsedrev(tree[1])
736 r = _parsedrev(tree[1])
737 if r:
737 if r:
738 drevs.add(r)
738 drevs.add(r)
739 elif op == b'ancestors':
739 elif op == b'ancestors':
740 r, a = _prefetchdrevs(tree[1])
740 r, a = _prefetchdrevs(tree[1])
741 drevs.update(r)
741 drevs.update(r)
742 ancestordrevs.update(r)
742 ancestordrevs.update(r)
743 ancestordrevs.update(a)
743 ancestordrevs.update(a)
744 else:
744 else:
745 for t in tree[1:]:
745 for t in tree[1:]:
746 r, a = _prefetchdrevs(t)
746 r, a = _prefetchdrevs(t)
747 drevs.update(r)
747 drevs.update(r)
748 ancestordrevs.update(a)
748 ancestordrevs.update(a)
749 return drevs, ancestordrevs
749 return drevs, ancestordrevs
750
750
751 def querydrev(repo, spec):
751 def querydrev(repo, spec):
752 """return a list of "Differential Revision" dicts
752 """return a list of "Differential Revision" dicts
753
753
754 spec is a string using a simple query language, see docstring in phabread
754 spec is a string using a simple query language, see docstring in phabread
755 for details.
755 for details.
756
756
757 A "Differential Revision dict" looks like:
757 A "Differential Revision dict" looks like:
758
758
759 {
759 {
760 "id": "2",
760 "id": "2",
761 "phid": "PHID-DREV-672qvysjcczopag46qty",
761 "phid": "PHID-DREV-672qvysjcczopag46qty",
762 "title": "example",
762 "title": "example",
763 "uri": "https://phab.example.com/D2",
763 "uri": "https://phab.example.com/D2",
764 "dateCreated": "1499181406",
764 "dateCreated": "1499181406",
765 "dateModified": "1499182103",
765 "dateModified": "1499182103",
766 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
766 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
767 "status": "0",
767 "status": "0",
768 "statusName": "Needs Review",
768 "statusName": "Needs Review",
769 "properties": [],
769 "properties": [],
770 "branch": null,
770 "branch": null,
771 "summary": "",
771 "summary": "",
772 "testPlan": "",
772 "testPlan": "",
773 "lineCount": "2",
773 "lineCount": "2",
774 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
774 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
775 "diffs": [
775 "diffs": [
776 "3",
776 "3",
777 "4",
777 "4",
778 ],
778 ],
779 "commits": [],
779 "commits": [],
780 "reviewers": [],
780 "reviewers": [],
781 "ccs": [],
781 "ccs": [],
782 "hashes": [],
782 "hashes": [],
783 "auxiliary": {
783 "auxiliary": {
784 "phabricator:projects": [],
784 "phabricator:projects": [],
785 "phabricator:depends-on": [
785 "phabricator:depends-on": [
786 "PHID-DREV-gbapp366kutjebt7agcd"
786 "PHID-DREV-gbapp366kutjebt7agcd"
787 ]
787 ]
788 },
788 },
789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
790 "sourcePath": null
790 "sourcePath": null
791 }
791 }
792 """
792 """
793 def fetch(params):
793 def fetch(params):
794 """params -> single drev or None"""
794 """params -> single drev or None"""
795 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
795 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
796 if key in prefetched:
796 if key in prefetched:
797 return prefetched[key]
797 return prefetched[key]
798 drevs = callconduit(repo, b'differential.query', params)
798 drevs = callconduit(repo, b'differential.query', params)
799 # Fill prefetched with the result
799 # Fill prefetched with the result
800 for drev in drevs:
800 for drev in drevs:
801 prefetched[drev[b'phid']] = drev
801 prefetched[drev[b'phid']] = drev
802 prefetched[int(drev[b'id'])] = drev
802 prefetched[int(drev[b'id'])] = drev
803 if key not in prefetched:
803 if key not in prefetched:
804 raise error.Abort(_(b'cannot get Differential Revision %r')
804 raise error.Abort(_(b'cannot get Differential Revision %r')
805 % params)
805 % params)
806 return prefetched[key]
806 return prefetched[key]
807
807
808 def getstack(topdrevids):
808 def getstack(topdrevids):
809 """given a top, get a stack from the bottom, [id] -> [id]"""
809 """given a top, get a stack from the bottom, [id] -> [id]"""
810 visited = set()
810 visited = set()
811 result = []
811 result = []
812 queue = [{b'ids': [i]} for i in topdrevids]
812 queue = [{b'ids': [i]} for i in topdrevids]
813 while queue:
813 while queue:
814 params = queue.pop()
814 params = queue.pop()
815 drev = fetch(params)
815 drev = fetch(params)
816 if drev[b'id'] in visited:
816 if drev[b'id'] in visited:
817 continue
817 continue
818 visited.add(drev[b'id'])
818 visited.add(drev[b'id'])
819 result.append(int(drev[b'id']))
819 result.append(int(drev[b'id']))
820 auxiliary = drev.get(b'auxiliary', {})
820 auxiliary = drev.get(b'auxiliary', {})
821 depends = auxiliary.get(b'phabricator:depends-on', [])
821 depends = auxiliary.get(b'phabricator:depends-on', [])
822 for phid in depends:
822 for phid in depends:
823 queue.append({b'phids': [phid]})
823 queue.append({b'phids': [phid]})
824 result.reverse()
824 result.reverse()
825 return smartset.baseset(result)
825 return smartset.baseset(result)
826
826
827 # Initialize prefetch cache
827 # Initialize prefetch cache
828 prefetched = {} # {id or phid: drev}
828 prefetched = {} # {id or phid: drev}
829
829
830 tree = _parse(spec)
830 tree = _parse(spec)
831 drevs, ancestordrevs = _prefetchdrevs(tree)
831 drevs, ancestordrevs = _prefetchdrevs(tree)
832
832
833 # developer config: phabricator.batchsize
833 # developer config: phabricator.batchsize
834 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
834 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
835
835
836 # Prefetch Differential Revisions in batch
836 # Prefetch Differential Revisions in batch
837 tofetch = set(drevs)
837 tofetch = set(drevs)
838 for r in ancestordrevs:
838 for r in ancestordrevs:
839 tofetch.update(range(max(1, r - batchsize), r + 1))
839 tofetch.update(range(max(1, r - batchsize), r + 1))
840 if drevs:
840 if drevs:
841 fetch({b'ids': list(tofetch)})
841 fetch({b'ids': list(tofetch)})
842 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
842 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
843
843
844 # Walk through the tree, return smartsets
844 # Walk through the tree, return smartsets
845 def walk(tree):
845 def walk(tree):
846 op = tree[0]
846 op = tree[0]
847 if op == b'symbol':
847 if op == b'symbol':
848 drev = _parsedrev(tree[1])
848 drev = _parsedrev(tree[1])
849 if drev:
849 if drev:
850 return smartset.baseset([drev])
850 return smartset.baseset([drev])
851 elif tree[1] in _knownstatusnames:
851 elif tree[1] in _knownstatusnames:
852 drevs = [r for r in validids
852 drevs = [r for r in validids
853 if _getstatusname(prefetched[r]) == tree[1]]
853 if _getstatusname(prefetched[r]) == tree[1]]
854 return smartset.baseset(drevs)
854 return smartset.baseset(drevs)
855 else:
855 else:
856 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
856 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
857 elif op in {b'and_', b'add', b'sub'}:
857 elif op in {b'and_', b'add', b'sub'}:
858 assert len(tree) == 3
858 assert len(tree) == 3
859 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
859 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
860 elif op == b'group':
860 elif op == b'group':
861 return walk(tree[1])
861 return walk(tree[1])
862 elif op == b'ancestors':
862 elif op == b'ancestors':
863 return getstack(walk(tree[1]))
863 return getstack(walk(tree[1]))
864 else:
864 else:
865 raise error.ProgrammingError(b'illegal tree: %r' % tree)
865 raise error.ProgrammingError(b'illegal tree: %r' % tree)
866
866
867 return [prefetched[r] for r in walk(tree)]
867 return [prefetched[r] for r in walk(tree)]
868
868
869 def getdescfromdrev(drev):
869 def getdescfromdrev(drev):
870 """get description (commit message) from "Differential Revision"
870 """get description (commit message) from "Differential Revision"
871
871
872 This is similar to differential.getcommitmessage API. But we only care
872 This is similar to differential.getcommitmessage API. But we only care
873 about limited fields: title, summary, test plan, and URL.
873 about limited fields: title, summary, test plan, and URL.
874 """
874 """
875 title = drev[b'title']
875 title = drev[b'title']
876 summary = drev[b'summary'].rstrip()
876 summary = drev[b'summary'].rstrip()
877 testplan = drev[b'testPlan'].rstrip()
877 testplan = drev[b'testPlan'].rstrip()
878 if testplan:
878 if testplan:
879 testplan = b'Test Plan:\n%s' % testplan
879 testplan = b'Test Plan:\n%s' % testplan
880 uri = b'Differential Revision: %s' % drev[b'uri']
880 uri = b'Differential Revision: %s' % drev[b'uri']
881 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
881 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
882
882
883 def getdiffmeta(diff):
883 def getdiffmeta(diff):
884 """get commit metadata (date, node, user, p1) from a diff object
884 """get commit metadata (date, node, user, p1) from a diff object
885
885
886 The metadata could be "hg:meta", sent by phabsend, like:
886 The metadata could be "hg:meta", sent by phabsend, like:
887
887
888 "properties": {
888 "properties": {
889 "hg:meta": {
889 "hg:meta": {
890 "date": "1499571514 25200",
890 "date": "1499571514 25200",
891 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
891 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "user": "Foo Bar <foo@example.com>",
892 "user": "Foo Bar <foo@example.com>",
893 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
893 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
894 }
894 }
895 }
895 }
896
896
897 Or converted from "local:commits", sent by "arc", like:
897 Or converted from "local:commits", sent by "arc", like:
898
898
899 "properties": {
899 "properties": {
900 "local:commits": {
900 "local:commits": {
901 "98c08acae292b2faf60a279b4189beb6cff1414d": {
901 "98c08acae292b2faf60a279b4189beb6cff1414d": {
902 "author": "Foo Bar",
902 "author": "Foo Bar",
903 "time": 1499546314,
903 "time": 1499546314,
904 "branch": "default",
904 "branch": "default",
905 "tag": "",
905 "tag": "",
906 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
906 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
907 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
907 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
908 "local": "1000",
908 "local": "1000",
909 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
909 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
910 "summary": "...",
910 "summary": "...",
911 "message": "...",
911 "message": "...",
912 "authorEmail": "foo@example.com"
912 "authorEmail": "foo@example.com"
913 }
913 }
914 }
914 }
915 }
915 }
916
916
917 Note: metadata extracted from "local:commits" will lose time zone
917 Note: metadata extracted from "local:commits" will lose time zone
918 information.
918 information.
919 """
919 """
920 props = diff.get(b'properties') or {}
920 props = diff.get(b'properties') or {}
921 meta = props.get(b'hg:meta')
921 meta = props.get(b'hg:meta')
922 if not meta:
922 if not meta:
923 if props.get(b'local:commits'):
923 if props.get(b'local:commits'):
924 commit = sorted(props[b'local:commits'].values())[0]
924 commit = sorted(props[b'local:commits'].values())[0]
925 meta = {}
925 meta = {}
926 if b'author' in commit and b'authorEmail' in commit:
926 if b'author' in commit and b'authorEmail' in commit:
927 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
927 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
928 commit[b'authorEmail'])
928 commit[b'authorEmail'])
929 if b'time' in commit:
929 if b'time' in commit:
930 meta[b'date'] = b'%d 0' % commit[b'time']
930 meta[b'date'] = b'%d 0' % commit[b'time']
931 if b'branch' in commit:
931 if b'branch' in commit:
932 meta[b'branch'] = commit[b'branch']
932 meta[b'branch'] = commit[b'branch']
933 node = commit.get(b'commit', commit.get(b'rev'))
933 node = commit.get(b'commit', commit.get(b'rev'))
934 if node:
934 if node:
935 meta[b'node'] = node
935 meta[b'node'] = node
936 if len(commit.get(b'parents', ())) >= 1:
936 if len(commit.get(b'parents', ())) >= 1:
937 meta[b'parent'] = commit[b'parents'][0]
937 meta[b'parent'] = commit[b'parents'][0]
938 else:
938 else:
939 meta = {}
939 meta = {}
940 if b'date' not in meta and b'dateCreated' in diff:
940 if b'date' not in meta and b'dateCreated' in diff:
941 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
941 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
942 if b'branch' not in meta and diff.get(b'branch'):
942 if b'branch' not in meta and diff.get(b'branch'):
943 meta[b'branch'] = diff[b'branch']
943 meta[b'branch'] = diff[b'branch']
944 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
944 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
945 meta[b'parent'] = diff[b'sourceControlBaseRevision']
945 meta[b'parent'] = diff[b'sourceControlBaseRevision']
946 return meta
946 return meta
947
947
948 def readpatch(repo, drevs, write):
948 def readpatch(repo, drevs, write):
949 """generate plain-text patch readable by 'hg import'
949 """generate plain-text patch readable by 'hg import'
950
950
951 write is usually ui.write. drevs is what "querydrev" returns, results of
951 write is usually ui.write. drevs is what "querydrev" returns, results of
952 "differential.query".
952 "differential.query".
953 """
953 """
954 # Prefetch hg:meta property for all diffs
954 # Prefetch hg:meta property for all diffs
955 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
955 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
956 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
956 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
957
957
958 # Generate patch for each drev
958 # Generate patch for each drev
959 for drev in drevs:
959 for drev in drevs:
960 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
960 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
961
961
962 diffid = max(int(v) for v in drev[b'diffs'])
962 diffid = max(int(v) for v in drev[b'diffs'])
963 body = callconduit(repo, b'differential.getrawdiff',
963 body = callconduit(repo, b'differential.getrawdiff',
964 {b'diffID': diffid})
964 {b'diffID': diffid})
965 desc = getdescfromdrev(drev)
965 desc = getdescfromdrev(drev)
966 header = b'# HG changeset patch\n'
966 header = b'# HG changeset patch\n'
967
967
968 # Try to preserve metadata from hg:meta property. Write hg patch
968 # Try to preserve metadata from hg:meta property. Write hg patch
969 # headers that can be read by the "import" command. See patchheadermap
969 # headers that can be read by the "import" command. See patchheadermap
970 # and extract in mercurial/patch.py for supported headers.
970 # and extract in mercurial/patch.py for supported headers.
971 meta = getdiffmeta(diffs[b'%d' % diffid])
971 meta = getdiffmeta(diffs[b'%d' % diffid])
972 for k in _metanamemap.keys():
972 for k in _metanamemap.keys():
973 if k in meta:
973 if k in meta:
974 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
974 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
975
975
976 content = b'%s%s\n%s' % (header, desc, body)
976 content = b'%s%s\n%s' % (header, desc, body)
977 write(content)
977 write(content)
978
978
979 @vcrcommand(b'phabread',
979 @vcrcommand(b'phabread',
980 [(b'', b'stack', False, _(b'read dependencies'))],
980 [(b'', b'stack', False, _(b'read dependencies'))],
981 _(b'DREVSPEC [OPTIONS]'),
981 _(b'DREVSPEC [OPTIONS]'),
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
982 helpcategory=command.CATEGORY_IMPORT_EXPORT)
983 def phabread(ui, repo, spec, **opts):
983 def phabread(ui, repo, spec, **opts):
984 """print patches from Phabricator suitable for importing
984 """print patches from Phabricator suitable for importing
985
985
986 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
986 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
987 the number ``123``. It could also have common operators like ``+``, ``-``,
987 the number ``123``. It could also have common operators like ``+``, ``-``,
988 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
988 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
989 select a stack.
989 select a stack.
990
990
991 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
991 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
992 could be used to filter patches by status. For performance reason, they
992 could be used to filter patches by status. For performance reason, they
993 only represent a subset of non-status selections and cannot be used alone.
993 only represent a subset of non-status selections and cannot be used alone.
994
994
995 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
995 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
996 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
996 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
997 stack up to D9.
997 stack up to D9.
998
998
999 If --stack is given, follow dependencies information and read all patches.
999 If --stack is given, follow dependencies information and read all patches.
1000 It is equivalent to the ``:`` operator.
1000 It is equivalent to the ``:`` operator.
1001 """
1001 """
1002 opts = pycompat.byteskwargs(opts)
1002 opts = pycompat.byteskwargs(opts)
1003 if opts.get(b'stack'):
1003 if opts.get(b'stack'):
1004 spec = b':(%s)' % spec
1004 spec = b':(%s)' % spec
1005 drevs = querydrev(repo, spec)
1005 drevs = querydrev(repo, spec)
1006 readpatch(repo, drevs, ui.write)
1006 readpatch(repo, drevs, ui.write)
1007
1007
1008 @vcrcommand(b'phabupdate',
1008 @vcrcommand(b'phabupdate',
1009 [(b'', b'accept', False, _(b'accept revisions')),
1009 [(b'', b'accept', False, _(b'accept revisions')),
1010 (b'', b'reject', False, _(b'reject revisions')),
1010 (b'', b'reject', False, _(b'reject revisions')),
1011 (b'', b'abandon', False, _(b'abandon revisions')),
1011 (b'', b'abandon', False, _(b'abandon revisions')),
1012 (b'', b'reclaim', False, _(b'reclaim revisions')),
1012 (b'', b'reclaim', False, _(b'reclaim revisions')),
1013 (b'm', b'comment', b'', _(b'comment on the last revision')),
1013 (b'm', b'comment', b'', _(b'comment on the last revision')),
1014 ], _(b'DREVSPEC [OPTIONS]'),
1014 ], _(b'DREVSPEC [OPTIONS]'),
1015 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1015 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1016 def phabupdate(ui, repo, spec, **opts):
1016 def phabupdate(ui, repo, spec, **opts):
1017 """update Differential Revision in batch
1017 """update Differential Revision in batch
1018
1018
1019 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1019 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1020 """
1020 """
1021 opts = pycompat.byteskwargs(opts)
1021 opts = pycompat.byteskwargs(opts)
1022 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1022 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1023 if len(flags) > 1:
1023 if len(flags) > 1:
1024 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1024 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1025
1025
1026 actions = []
1026 actions = []
1027 for f in flags:
1027 for f in flags:
1028 actions.append({b'type': f, b'value': b'true'})
1028 actions.append({b'type': f, b'value': b'true'})
1029
1029
1030 drevs = querydrev(repo, spec)
1030 drevs = querydrev(repo, spec)
1031 for i, drev in enumerate(drevs):
1031 for i, drev in enumerate(drevs):
1032 if i + 1 == len(drevs) and opts.get(b'comment'):
1032 if i + 1 == len(drevs) and opts.get(b'comment'):
1033 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1033 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1034 if actions:
1034 if actions:
1035 params = {b'objectIdentifier': drev[b'phid'],
1035 params = {b'objectIdentifier': drev[b'phid'],
1036 b'transactions': actions}
1036 b'transactions': actions}
1037 callconduit(repo, b'differential.revision.edit', params)
1037 callconduit(repo, b'differential.revision.edit', params)
1038
1038
1039 templatekeyword = registrar.templatekeyword()
1039 templatekeyword = registrar.templatekeyword()
1040
1040
1041 @templatekeyword(b'phabreview', requires={b'ctx'})
1041 @templatekeyword(b'phabreview', requires={b'ctx'})
1042 def template_review(context, mapping):
1042 def template_review(context, mapping):
1043 """:phabreview: Object describing the review for this changeset.
1043 """:phabreview: Object describing the review for this changeset.
1044 Has attributes `url` and `id`.
1044 Has attributes `url` and `id`.
1045 """
1045 """
1046 ctx = context.resource(mapping, b'ctx')
1046 ctx = context.resource(mapping, b'ctx')
1047 m = _differentialrevisiondescre.search(ctx.description())
1047 m = _differentialrevisiondescre.search(ctx.description())
1048 if m:
1048 if m:
1049 return templateutil.hybriddict({
1049 return templateutil.hybriddict({
1050 b'url': m.group(r'url'),
1050 b'url': m.group(r'url'),
1051 b'id': b"D%s" % m.group(r'id'),
1051 b'id': b"D%s" % m.group(r'id'),
1052 })
1052 })
1053 else:
1053 else:
1054 tags = ctx.repo().nodetags(ctx.node())
1054 tags = ctx.repo().nodetags(ctx.node())
1055 for t in tags:
1055 for t in tags:
1056 if _differentialrevisiontagre.match(t):
1056 if _differentialrevisiontagre.match(t):
1057 url = ctx.repo().ui.config(b'phabricator', b'url')
1057 url = ctx.repo().ui.config(b'phabricator', b'url')
1058 if not url.endswith(b'/'):
1058 if not url.endswith(b'/'):
1059 url += b'/'
1059 url += b'/'
1060 url += t
1060 url += t
1061
1061
1062 return templateutil.hybriddict({
1062 return templateutil.hybriddict({
1063 b'url': url,
1063 b'url': url,
1064 b'id': t,
1064 b'id': t,
1065 })
1065 })
1066 return None
1066 return None
General Comments 0
You need to be logged in to leave comments. Login now