##// END OF EJS Templates
phabricator: fallback to reading metadata from diff for phabread...
Ian Moody -
r42442:a4f7dceb default
parent child Browse files
Show More
@@ -1,1043 +1,1052 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 scmutil,
65 scmutil,
66 smartset,
66 smartset,
67 tags,
67 tags,
68 templatefilters,
68 templatefilters,
69 templateutil,
69 templateutil,
70 url as urlmod,
70 url as urlmod,
71 util,
71 util,
72 )
72 )
73 from mercurial.utils import (
73 from mercurial.utils import (
74 procutil,
74 procutil,
75 stringutil,
75 stringutil,
76 )
76 )
77
77
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
81 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
82 testedwith = 'ships-with-hg-core'
83
83
84 cmdtable = {}
84 cmdtable = {}
85 command = registrar.command(cmdtable)
85 command = registrar.command(cmdtable)
86
86
87 configtable = {}
87 configtable = {}
88 configitem = registrar.configitem(configtable)
88 configitem = registrar.configitem(configtable)
89
89
90 # developer config: phabricator.batchsize
90 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
91 configitem(b'phabricator', b'batchsize',
92 default=12,
92 default=12,
93 )
93 )
94 configitem(b'phabricator', b'callsign',
94 configitem(b'phabricator', b'callsign',
95 default=None,
95 default=None,
96 )
96 )
97 configitem(b'phabricator', b'curlcmd',
97 configitem(b'phabricator', b'curlcmd',
98 default=None,
98 default=None,
99 )
99 )
100 # developer config: phabricator.repophid
100 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
101 configitem(b'phabricator', b'repophid',
102 default=None,
102 default=None,
103 )
103 )
104 configitem(b'phabricator', b'url',
104 configitem(b'phabricator', b'url',
105 default=None,
105 default=None,
106 )
106 )
107 configitem(b'phabsend', b'confirm',
107 configitem(b'phabsend', b'confirm',
108 default=False,
108 default=False,
109 )
109 )
110
110
111 colortable = {
111 colortable = {
112 b'phabricator.action.created': b'green',
112 b'phabricator.action.created': b'green',
113 b'phabricator.action.skipped': b'magenta',
113 b'phabricator.action.skipped': b'magenta',
114 b'phabricator.action.updated': b'magenta',
114 b'phabricator.action.updated': b'magenta',
115 b'phabricator.desc': b'',
115 b'phabricator.desc': b'',
116 b'phabricator.drev': b'bold',
116 b'phabricator.drev': b'bold',
117 b'phabricator.node': b'',
117 b'phabricator.node': b'',
118 }
118 }
119
119
120 _VCR_FLAGS = [
120 _VCR_FLAGS = [
121 (b'', b'test-vcr', b'',
121 (b'', b'test-vcr', b'',
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
122 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 b', otherwise will mock all http requests using the specified vcr file.'
123 b', otherwise will mock all http requests using the specified vcr file.'
124 b' (ADVANCED)'
124 b' (ADVANCED)'
125 )),
125 )),
126 ]
126 ]
127
127
128 def vcrcommand(name, flags, spec, helpcategory=None):
128 def vcrcommand(name, flags, spec, helpcategory=None):
129 fullflags = flags + _VCR_FLAGS
129 fullflags = flags + _VCR_FLAGS
130 def decorate(fn):
130 def decorate(fn):
131 def inner(*args, **kwargs):
131 def inner(*args, **kwargs):
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
132 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
133 if cassette:
133 if cassette:
134 import hgdemandimport
134 import hgdemandimport
135 with hgdemandimport.deactivated():
135 with hgdemandimport.deactivated():
136 import vcr as vcrmod
136 import vcr as vcrmod
137 import vcr.stubs as stubs
137 import vcr.stubs as stubs
138 vcr = vcrmod.VCR(
138 vcr = vcrmod.VCR(
139 serializer=r'json',
139 serializer=r'json',
140 custom_patches=[
140 custom_patches=[
141 (urlmod, r'httpconnection',
141 (urlmod, r'httpconnection',
142 stubs.VCRHTTPConnection),
142 stubs.VCRHTTPConnection),
143 (urlmod, r'httpsconnection',
143 (urlmod, r'httpsconnection',
144 stubs.VCRHTTPSConnection),
144 stubs.VCRHTTPSConnection),
145 ])
145 ])
146 with vcr.use_cassette(cassette):
146 with vcr.use_cassette(cassette):
147 return fn(*args, **kwargs)
147 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
148 return fn(*args, **kwargs)
149 inner.__name__ = fn.__name__
149 inner.__name__ = fn.__name__
150 inner.__doc__ = fn.__doc__
150 inner.__doc__ = fn.__doc__
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
151 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
152 return decorate
152 return decorate
153
153
154 def urlencodenested(params):
154 def urlencodenested(params):
155 """like urlencode, but works with nested parameters.
155 """like urlencode, but works with nested parameters.
156
156
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
157 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
158 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
159 urlencode. Note: the encoding is consistent with PHP's http_build_query.
160 """
160 """
161 flatparams = util.sortdict()
161 flatparams = util.sortdict()
162 def process(prefix, obj):
162 def process(prefix, obj):
163 if isinstance(obj, bool):
163 if isinstance(obj, bool):
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
164 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
165 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
166 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
167 if items is None:
167 if items is None:
168 flatparams[prefix] = obj
168 flatparams[prefix] = obj
169 else:
169 else:
170 for k, v in items(obj):
170 for k, v in items(obj):
171 if prefix:
171 if prefix:
172 process(b'%s[%s]' % (prefix, k), v)
172 process(b'%s[%s]' % (prefix, k), v)
173 else:
173 else:
174 process(k, v)
174 process(k, v)
175 process(b'', params)
175 process(b'', params)
176 return util.urlreq.urlencode(flatparams)
176 return util.urlreq.urlencode(flatparams)
177
177
178 def readurltoken(repo):
178 def readurltoken(repo):
179 """return conduit url, token and make sure they exist
179 """return conduit url, token and make sure they exist
180
180
181 Currently read from [auth] config section. In the future, it might
181 Currently read from [auth] config section. In the future, it might
182 make sense to read from .arcconfig and .arcrc as well.
182 make sense to read from .arcconfig and .arcrc as well.
183 """
183 """
184 url = repo.ui.config(b'phabricator', b'url')
184 url = repo.ui.config(b'phabricator', b'url')
185 if not url:
185 if not url:
186 raise error.Abort(_(b'config %s.%s is required')
186 raise error.Abort(_(b'config %s.%s is required')
187 % (b'phabricator', b'url'))
187 % (b'phabricator', b'url'))
188
188
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
189 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
190 token = None
190 token = None
191
191
192 if res:
192 if res:
193 group, auth = res
193 group, auth = res
194
194
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
195 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
196
196
197 token = auth.get(b'phabtoken')
197 token = auth.get(b'phabtoken')
198
198
199 if not token:
199 if not token:
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
200 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
201 % (url,))
201 % (url,))
202
202
203 return url, token
203 return url, token
204
204
205 def callconduit(repo, name, params):
205 def callconduit(repo, name, params):
206 """call Conduit API, params is a dict. return json.loads result, or None"""
206 """call Conduit API, params is a dict. return json.loads result, or None"""
207 host, token = readurltoken(repo)
207 host, token = readurltoken(repo)
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
208 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
209 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
210 params = params.copy()
210 params = params.copy()
211 params[b'api.token'] = token
211 params[b'api.token'] = token
212 data = urlencodenested(params)
212 data = urlencodenested(params)
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
213 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
214 if curlcmd:
214 if curlcmd:
215 sin, sout = procutil.popen2(b'%s -d @- %s'
215 sin, sout = procutil.popen2(b'%s -d @- %s'
216 % (curlcmd, procutil.shellquote(url)))
216 % (curlcmd, procutil.shellquote(url)))
217 sin.write(data)
217 sin.write(data)
218 sin.close()
218 sin.close()
219 body = sout.read()
219 body = sout.read()
220 else:
220 else:
221 urlopener = urlmod.opener(repo.ui, authinfo)
221 urlopener = urlmod.opener(repo.ui, authinfo)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
222 request = util.urlreq.request(pycompat.strurl(url), data=data)
223 with contextlib.closing(urlopener.open(request)) as rsp:
223 with contextlib.closing(urlopener.open(request)) as rsp:
224 body = rsp.read()
224 body = rsp.read()
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
225 repo.ui.debug(b'Conduit Response: %s\n' % body)
226 parsed = pycompat.rapply(
226 parsed = pycompat.rapply(
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
227 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
228 else x,
228 else x,
229 json.loads(body)
229 json.loads(body)
230 )
230 )
231 if parsed.get(b'error_code'):
231 if parsed.get(b'error_code'):
232 msg = (_(b'Conduit Error (%s): %s')
232 msg = (_(b'Conduit Error (%s): %s')
233 % (parsed[b'error_code'], parsed[b'error_info']))
233 % (parsed[b'error_code'], parsed[b'error_info']))
234 raise error.Abort(msg)
234 raise error.Abort(msg)
235 return parsed[b'result']
235 return parsed[b'result']
236
236
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
237 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
238 def debugcallconduit(ui, repo, name):
238 def debugcallconduit(ui, repo, name):
239 """call Conduit API
239 """call Conduit API
240
240
241 Call parameters are read from stdin as a JSON blob. Result will be written
241 Call parameters are read from stdin as a JSON blob. Result will be written
242 to stdout as a JSON blob.
242 to stdout as a JSON blob.
243 """
243 """
244 # json.loads only accepts bytes from 3.6+
244 # json.loads only accepts bytes from 3.6+
245 rawparams = encoding.unifromlocal(ui.fin.read())
245 rawparams = encoding.unifromlocal(ui.fin.read())
246 # json.loads only returns unicode strings
246 # json.loads only returns unicode strings
247 params = pycompat.rapply(lambda x:
247 params = pycompat.rapply(lambda x:
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
248 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
249 json.loads(rawparams)
249 json.loads(rawparams)
250 )
250 )
251 # json.dumps only accepts unicode strings
251 # json.dumps only accepts unicode strings
252 result = pycompat.rapply(lambda x:
252 result = pycompat.rapply(lambda x:
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
253 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
254 callconduit(repo, name, params)
254 callconduit(repo, name, params)
255 )
255 )
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
256 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
257 ui.write(b'%s\n' % encoding.unitolocal(s))
258
258
259 def getrepophid(repo):
259 def getrepophid(repo):
260 """given callsign, return repository PHID or None"""
260 """given callsign, return repository PHID or None"""
261 # developer config: phabricator.repophid
261 # developer config: phabricator.repophid
262 repophid = repo.ui.config(b'phabricator', b'repophid')
262 repophid = repo.ui.config(b'phabricator', b'repophid')
263 if repophid:
263 if repophid:
264 return repophid
264 return repophid
265 callsign = repo.ui.config(b'phabricator', b'callsign')
265 callsign = repo.ui.config(b'phabricator', b'callsign')
266 if not callsign:
266 if not callsign:
267 return None
267 return None
268 query = callconduit(repo, b'diffusion.repository.search',
268 query = callconduit(repo, b'diffusion.repository.search',
269 {b'constraints': {b'callsigns': [callsign]}})
269 {b'constraints': {b'callsigns': [callsign]}})
270 if len(query[b'data']) == 0:
270 if len(query[b'data']) == 0:
271 return None
271 return None
272 repophid = query[b'data'][0][b'phid']
272 repophid = query[b'data'][0][b'phid']
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
273 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
274 return repophid
274 return repophid
275
275
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
276 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
277 _differentialrevisiondescre = re.compile(
277 _differentialrevisiondescre = re.compile(
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
278 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
279
279
280 def getoldnodedrevmap(repo, nodelist):
280 def getoldnodedrevmap(repo, nodelist):
281 """find previous nodes that has been sent to Phabricator
281 """find previous nodes that has been sent to Phabricator
282
282
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
283 return {node: (oldnode, Differential diff, Differential Revision ID)}
284 for node in nodelist with known previous sent versions, or associated
284 for node in nodelist with known previous sent versions, or associated
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
285 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
286 be ``None``.
286 be ``None``.
287
287
288 Examines commit messages like "Differential Revision:" to get the
288 Examines commit messages like "Differential Revision:" to get the
289 association information.
289 association information.
290
290
291 If such commit message line is not found, examines all precursors and their
291 If such commit message line is not found, examines all precursors and their
292 tags. Tags with format like "D1234" are considered a match and the node
292 tags. Tags with format like "D1234" are considered a match and the node
293 with that tag, and the number after "D" (ex. 1234) will be returned.
293 with that tag, and the number after "D" (ex. 1234) will be returned.
294
294
295 The ``old node``, if not None, is guaranteed to be the last diff of
295 The ``old node``, if not None, is guaranteed to be the last diff of
296 corresponding Differential Revision, and exist in the repo.
296 corresponding Differential Revision, and exist in the repo.
297 """
297 """
298 unfi = repo.unfiltered()
298 unfi = repo.unfiltered()
299 nodemap = unfi.changelog.nodemap
299 nodemap = unfi.changelog.nodemap
300
300
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
301 result = {} # {node: (oldnode?, lastdiff?, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
302 toconfirm = {} # {node: (force, {precnode}, drev)}
303 for node in nodelist:
303 for node in nodelist:
304 ctx = unfi[node]
304 ctx = unfi[node]
305 # For tags like "D123", put them into "toconfirm" to verify later
305 # For tags like "D123", put them into "toconfirm" to verify later
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
306 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
307 for n in precnodes:
307 for n in precnodes:
308 if n in nodemap:
308 if n in nodemap:
309 for tag in unfi.nodetags(n):
309 for tag in unfi.nodetags(n):
310 m = _differentialrevisiontagre.match(tag)
310 m = _differentialrevisiontagre.match(tag)
311 if m:
311 if m:
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
312 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
313 continue
313 continue
314
314
315 # Check commit message
315 # Check commit message
316 m = _differentialrevisiondescre.search(ctx.description())
316 m = _differentialrevisiondescre.search(ctx.description())
317 if m:
317 if m:
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
318 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
319
319
320 # Double check if tags are genuine by collecting all old nodes from
320 # Double check if tags are genuine by collecting all old nodes from
321 # Phabricator, and expect precursors overlap with it.
321 # Phabricator, and expect precursors overlap with it.
322 if toconfirm:
322 if toconfirm:
323 drevs = [drev for force, precs, drev in toconfirm.values()]
323 drevs = [drev for force, precs, drev in toconfirm.values()]
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
324 alldiffs = callconduit(unfi, b'differential.querydiffs',
325 {b'revisionIDs': drevs})
325 {b'revisionIDs': drevs})
326 getnode = lambda d: bin(
326 getnode = lambda d: bin(
327 getdiffmeta(d).get(b'node', b'')) or None
327 getdiffmeta(d).get(b'node', b'')) or None
328 for newnode, (force, precset, drev) in toconfirm.items():
328 for newnode, (force, precset, drev) in toconfirm.items():
329 diffs = [d for d in alldiffs.values()
329 diffs = [d for d in alldiffs.values()
330 if int(d[b'revisionID']) == drev]
330 if int(d[b'revisionID']) == drev]
331
331
332 # "precursors" as known by Phabricator
332 # "precursors" as known by Phabricator
333 phprecset = set(getnode(d) for d in diffs)
333 phprecset = set(getnode(d) for d in diffs)
334
334
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
335 # Ignore if precursors (Phabricator and local repo) do not overlap,
336 # and force is not set (when commit message says nothing)
336 # and force is not set (when commit message says nothing)
337 if not force and not bool(phprecset & precset):
337 if not force and not bool(phprecset & precset):
338 tagname = b'D%d' % drev
338 tagname = b'D%d' % drev
339 tags.tag(repo, tagname, nullid, message=None, user=None,
339 tags.tag(repo, tagname, nullid, message=None, user=None,
340 date=None, local=True)
340 date=None, local=True)
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
341 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
342 b'Differential history\n') % drev)
342 b'Differential history\n') % drev)
343 continue
343 continue
344
344
345 # Find the last node using Phabricator metadata, and make sure it
345 # Find the last node using Phabricator metadata, and make sure it
346 # exists in the repo
346 # exists in the repo
347 oldnode = lastdiff = None
347 oldnode = lastdiff = None
348 if diffs:
348 if diffs:
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
349 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
350 oldnode = getnode(lastdiff)
350 oldnode = getnode(lastdiff)
351 if oldnode and oldnode not in nodemap:
351 if oldnode and oldnode not in nodemap:
352 oldnode = None
352 oldnode = None
353
353
354 result[newnode] = (oldnode, lastdiff, drev)
354 result[newnode] = (oldnode, lastdiff, drev)
355
355
356 return result
356 return result
357
357
358 def getdiff(ctx, diffopts):
358 def getdiff(ctx, diffopts):
359 """plain-text diff without header (user, commit message, etc)"""
359 """plain-text diff without header (user, commit message, etc)"""
360 output = util.stringio()
360 output = util.stringio()
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
361 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
362 None, opts=diffopts):
362 None, opts=diffopts):
363 output.write(chunk)
363 output.write(chunk)
364 return output.getvalue()
364 return output.getvalue()
365
365
366 def creatediff(ctx):
366 def creatediff(ctx):
367 """create a Differential Diff"""
367 """create a Differential Diff"""
368 repo = ctx.repo()
368 repo = ctx.repo()
369 repophid = getrepophid(repo)
369 repophid = getrepophid(repo)
370 # Create a "Differential Diff" via "differential.createrawdiff" API
370 # Create a "Differential Diff" via "differential.createrawdiff" API
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
371 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
372 if repophid:
372 if repophid:
373 params[b'repositoryPHID'] = repophid
373 params[b'repositoryPHID'] = repophid
374 diff = callconduit(repo, b'differential.createrawdiff', params)
374 diff = callconduit(repo, b'differential.createrawdiff', params)
375 if not diff:
375 if not diff:
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
376 raise error.Abort(_(b'cannot create diff for %s') % ctx)
377 return diff
377 return diff
378
378
379 def writediffproperties(ctx, diff):
379 def writediffproperties(ctx, diff):
380 """write metadata to diff so patches could be applied losslessly"""
380 """write metadata to diff so patches could be applied losslessly"""
381 params = {
381 params = {
382 b'diff_id': diff[b'id'],
382 b'diff_id': diff[b'id'],
383 b'name': b'hg:meta',
383 b'name': b'hg:meta',
384 b'data': templatefilters.json({
384 b'data': templatefilters.json({
385 b'user': ctx.user(),
385 b'user': ctx.user(),
386 b'date': b'%d %d' % ctx.date(),
386 b'date': b'%d %d' % ctx.date(),
387 b'branch': ctx.branch(),
387 b'branch': ctx.branch(),
388 b'node': ctx.hex(),
388 b'node': ctx.hex(),
389 b'parent': ctx.p1().hex(),
389 b'parent': ctx.p1().hex(),
390 }),
390 }),
391 }
391 }
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
392 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
393
393
394 params = {
394 params = {
395 b'diff_id': diff[b'id'],
395 b'diff_id': diff[b'id'],
396 b'name': b'local:commits',
396 b'name': b'local:commits',
397 b'data': templatefilters.json({
397 b'data': templatefilters.json({
398 ctx.hex(): {
398 ctx.hex(): {
399 b'author': stringutil.person(ctx.user()),
399 b'author': stringutil.person(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
400 b'authorEmail': stringutil.email(ctx.user()),
401 b'time': int(ctx.date()[0]),
401 b'time': int(ctx.date()[0]),
402 b'commit': ctx.hex(),
402 b'commit': ctx.hex(),
403 b'parents': [ctx.p1().hex()],
403 b'parents': [ctx.p1().hex()],
404 b'branch': ctx.branch(),
404 b'branch': ctx.branch(),
405 },
405 },
406 }),
406 }),
407 }
407 }
408 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
408 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
409
409
410 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
410 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
411 olddiff=None, actions=None):
411 olddiff=None, actions=None):
412 """create or update a Differential Revision
412 """create or update a Differential Revision
413
413
414 If revid is None, create a new Differential Revision, otherwise update
414 If revid is None, create a new Differential Revision, otherwise update
415 revid. If parentrevid is not None, set it as a dependency.
415 revid. If parentrevid is not None, set it as a dependency.
416
416
417 If oldnode is not None, check if the patch content (without commit message
417 If oldnode is not None, check if the patch content (without commit message
418 and metadata) has changed before creating another diff.
418 and metadata) has changed before creating another diff.
419
419
420 If actions is not None, they will be appended to the transaction.
420 If actions is not None, they will be appended to the transaction.
421 """
421 """
422 repo = ctx.repo()
422 repo = ctx.repo()
423 if oldnode:
423 if oldnode:
424 diffopts = mdiff.diffopts(git=True, context=32767)
424 diffopts = mdiff.diffopts(git=True, context=32767)
425 oldctx = repo.unfiltered()[oldnode]
425 oldctx = repo.unfiltered()[oldnode]
426 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
426 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
427 else:
427 else:
428 neednewdiff = True
428 neednewdiff = True
429
429
430 transactions = []
430 transactions = []
431 if neednewdiff:
431 if neednewdiff:
432 diff = creatediff(ctx)
432 diff = creatediff(ctx)
433 transactions.append({b'type': b'update', b'value': diff[b'phid']})
433 transactions.append({b'type': b'update', b'value': diff[b'phid']})
434 else:
434 else:
435 # Even if we don't need to upload a new diff because the patch content
435 # Even if we don't need to upload a new diff because the patch content
436 # does not change. We might still need to update its metadata so
436 # does not change. We might still need to update its metadata so
437 # pushers could know the correct node metadata.
437 # pushers could know the correct node metadata.
438 assert olddiff
438 assert olddiff
439 diff = olddiff
439 diff = olddiff
440 writediffproperties(ctx, diff)
440 writediffproperties(ctx, diff)
441
441
442 # Use a temporary summary to set dependency. There might be better ways but
442 # Use a temporary summary to set dependency. There might be better ways but
443 # I cannot find them for now. But do not do that if we are updating an
443 # I cannot find them for now. But do not do that if we are updating an
444 # existing revision (revid is not None) since that introduces visible
444 # existing revision (revid is not None) since that introduces visible
445 # churns (someone edited "Summary" twice) on the web page.
445 # churns (someone edited "Summary" twice) on the web page.
446 if parentrevid and revid is None:
446 if parentrevid and revid is None:
447 summary = b'Depends on D%d' % parentrevid
447 summary = b'Depends on D%d' % parentrevid
448 transactions += [{b'type': b'summary', b'value': summary},
448 transactions += [{b'type': b'summary', b'value': summary},
449 {b'type': b'summary', b'value': b' '}]
449 {b'type': b'summary', b'value': b' '}]
450
450
451 if actions:
451 if actions:
452 transactions += actions
452 transactions += actions
453
453
454 # Parse commit message and update related fields.
454 # Parse commit message and update related fields.
455 desc = ctx.description()
455 desc = ctx.description()
456 info = callconduit(repo, b'differential.parsecommitmessage',
456 info = callconduit(repo, b'differential.parsecommitmessage',
457 {b'corpus': desc})
457 {b'corpus': desc})
458 for k, v in info[b'fields'].items():
458 for k, v in info[b'fields'].items():
459 if k in [b'title', b'summary', b'testPlan']:
459 if k in [b'title', b'summary', b'testPlan']:
460 transactions.append({b'type': k, b'value': v})
460 transactions.append({b'type': k, b'value': v})
461
461
462 params = {b'transactions': transactions}
462 params = {b'transactions': transactions}
463 if revid is not None:
463 if revid is not None:
464 # Update an existing Differential Revision
464 # Update an existing Differential Revision
465 params[b'objectIdentifier'] = revid
465 params[b'objectIdentifier'] = revid
466
466
467 revision = callconduit(repo, b'differential.revision.edit', params)
467 revision = callconduit(repo, b'differential.revision.edit', params)
468 if not revision:
468 if not revision:
469 raise error.Abort(_(b'cannot create revision for %s') % ctx)
469 raise error.Abort(_(b'cannot create revision for %s') % ctx)
470
470
471 return revision, diff
471 return revision, diff
472
472
473 def userphids(repo, names):
473 def userphids(repo, names):
474 """convert user names to PHIDs"""
474 """convert user names to PHIDs"""
475 names = [name.lower() for name in names]
475 names = [name.lower() for name in names]
476 query = {b'constraints': {b'usernames': names}}
476 query = {b'constraints': {b'usernames': names}}
477 result = callconduit(repo, b'user.search', query)
477 result = callconduit(repo, b'user.search', query)
478 # username not found is not an error of the API. So check if we have missed
478 # username not found is not an error of the API. So check if we have missed
479 # some names here.
479 # some names here.
480 data = result[b'data']
480 data = result[b'data']
481 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
481 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
482 unresolved = set(names) - resolved
482 unresolved = set(names) - resolved
483 if unresolved:
483 if unresolved:
484 raise error.Abort(_(b'unknown username: %s')
484 raise error.Abort(_(b'unknown username: %s')
485 % b' '.join(sorted(unresolved)))
485 % b' '.join(sorted(unresolved)))
486 return [entry[b'phid'] for entry in data]
486 return [entry[b'phid'] for entry in data]
487
487
488 @vcrcommand(b'phabsend',
488 @vcrcommand(b'phabsend',
489 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
489 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
490 (b'', b'amend', True, _(b'update commit messages')),
490 (b'', b'amend', True, _(b'update commit messages')),
491 (b'', b'reviewer', [], _(b'specify reviewers')),
491 (b'', b'reviewer', [], _(b'specify reviewers')),
492 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
492 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
493 _(b'REV [OPTIONS]'),
493 _(b'REV [OPTIONS]'),
494 helpcategory=command.CATEGORY_IMPORT_EXPORT)
494 helpcategory=command.CATEGORY_IMPORT_EXPORT)
495 def phabsend(ui, repo, *revs, **opts):
495 def phabsend(ui, repo, *revs, **opts):
496 """upload changesets to Phabricator
496 """upload changesets to Phabricator
497
497
498 If there are multiple revisions specified, they will be send as a stack
498 If there are multiple revisions specified, they will be send as a stack
499 with a linear dependencies relationship using the order specified by the
499 with a linear dependencies relationship using the order specified by the
500 revset.
500 revset.
501
501
502 For the first time uploading changesets, local tags will be created to
502 For the first time uploading changesets, local tags will be created to
503 maintain the association. After the first time, phabsend will check
503 maintain the association. After the first time, phabsend will check
504 obsstore and tags information so it can figure out whether to update an
504 obsstore and tags information so it can figure out whether to update an
505 existing Differential Revision, or create a new one.
505 existing Differential Revision, or create a new one.
506
506
507 If --amend is set, update commit messages so they have the
507 If --amend is set, update commit messages so they have the
508 ``Differential Revision`` URL, remove related tags. This is similar to what
508 ``Differential Revision`` URL, remove related tags. This is similar to what
509 arcanist will do, and is more desired in author-push workflows. Otherwise,
509 arcanist will do, and is more desired in author-push workflows. Otherwise,
510 use local tags to record the ``Differential Revision`` association.
510 use local tags to record the ``Differential Revision`` association.
511
511
512 The --confirm option lets you confirm changesets before sending them. You
512 The --confirm option lets you confirm changesets before sending them. You
513 can also add following to your configuration file to make it default
513 can also add following to your configuration file to make it default
514 behaviour::
514 behaviour::
515
515
516 [phabsend]
516 [phabsend]
517 confirm = true
517 confirm = true
518
518
519 phabsend will check obsstore and the above association to decide whether to
519 phabsend will check obsstore and the above association to decide whether to
520 update an existing Differential Revision, or create a new one.
520 update an existing Differential Revision, or create a new one.
521 """
521 """
522 opts = pycompat.byteskwargs(opts)
522 opts = pycompat.byteskwargs(opts)
523 revs = list(revs) + opts.get(b'rev', [])
523 revs = list(revs) + opts.get(b'rev', [])
524 revs = scmutil.revrange(repo, revs)
524 revs = scmutil.revrange(repo, revs)
525
525
526 if not revs:
526 if not revs:
527 raise error.Abort(_(b'phabsend requires at least one changeset'))
527 raise error.Abort(_(b'phabsend requires at least one changeset'))
528 if opts.get(b'amend'):
528 if opts.get(b'amend'):
529 cmdutil.checkunfinished(repo)
529 cmdutil.checkunfinished(repo)
530
530
531 # {newnode: (oldnode, olddiff, olddrev}
531 # {newnode: (oldnode, olddiff, olddrev}
532 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
532 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
533
533
534 confirm = ui.configbool(b'phabsend', b'confirm')
534 confirm = ui.configbool(b'phabsend', b'confirm')
535 confirm |= bool(opts.get(b'confirm'))
535 confirm |= bool(opts.get(b'confirm'))
536 if confirm:
536 if confirm:
537 confirmed = _confirmbeforesend(repo, revs, oldmap)
537 confirmed = _confirmbeforesend(repo, revs, oldmap)
538 if not confirmed:
538 if not confirmed:
539 raise error.Abort(_(b'phabsend cancelled'))
539 raise error.Abort(_(b'phabsend cancelled'))
540
540
541 actions = []
541 actions = []
542 reviewers = opts.get(b'reviewer', [])
542 reviewers = opts.get(b'reviewer', [])
543 if reviewers:
543 if reviewers:
544 phids = userphids(repo, reviewers)
544 phids = userphids(repo, reviewers)
545 actions.append({b'type': b'reviewers.add', b'value': phids})
545 actions.append({b'type': b'reviewers.add', b'value': phids})
546
546
547 drevids = [] # [int]
547 drevids = [] # [int]
548 diffmap = {} # {newnode: diff}
548 diffmap = {} # {newnode: diff}
549
549
550 # Send patches one by one so we know their Differential Revision IDs and
550 # Send patches one by one so we know their Differential Revision IDs and
551 # can provide dependency relationship
551 # can provide dependency relationship
552 lastrevid = None
552 lastrevid = None
553 for rev in revs:
553 for rev in revs:
554 ui.debug(b'sending rev %d\n' % rev)
554 ui.debug(b'sending rev %d\n' % rev)
555 ctx = repo[rev]
555 ctx = repo[rev]
556
556
557 # Get Differential Revision ID
557 # Get Differential Revision ID
558 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
558 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
559 if oldnode != ctx.node() or opts.get(b'amend'):
559 if oldnode != ctx.node() or opts.get(b'amend'):
560 # Create or update Differential Revision
560 # Create or update Differential Revision
561 revision, diff = createdifferentialrevision(
561 revision, diff = createdifferentialrevision(
562 ctx, revid, lastrevid, oldnode, olddiff, actions)
562 ctx, revid, lastrevid, oldnode, olddiff, actions)
563 diffmap[ctx.node()] = diff
563 diffmap[ctx.node()] = diff
564 newrevid = int(revision[b'object'][b'id'])
564 newrevid = int(revision[b'object'][b'id'])
565 if revid:
565 if revid:
566 action = b'updated'
566 action = b'updated'
567 else:
567 else:
568 action = b'created'
568 action = b'created'
569
569
570 # Create a local tag to note the association, if commit message
570 # Create a local tag to note the association, if commit message
571 # does not have it already
571 # does not have it already
572 m = _differentialrevisiondescre.search(ctx.description())
572 m = _differentialrevisiondescre.search(ctx.description())
573 if not m or int(m.group(r'id')) != newrevid:
573 if not m or int(m.group(r'id')) != newrevid:
574 tagname = b'D%d' % newrevid
574 tagname = b'D%d' % newrevid
575 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
575 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
576 date=None, local=True)
576 date=None, local=True)
577 else:
577 else:
578 # Nothing changed. But still set "newrevid" so the next revision
578 # Nothing changed. But still set "newrevid" so the next revision
579 # could depend on this one.
579 # could depend on this one.
580 newrevid = revid
580 newrevid = revid
581 action = b'skipped'
581 action = b'skipped'
582
582
583 actiondesc = ui.label(
583 actiondesc = ui.label(
584 {b'created': _(b'created'),
584 {b'created': _(b'created'),
585 b'skipped': _(b'skipped'),
585 b'skipped': _(b'skipped'),
586 b'updated': _(b'updated')}[action],
586 b'updated': _(b'updated')}[action],
587 b'phabricator.action.%s' % action)
587 b'phabricator.action.%s' % action)
588 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
588 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
589 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
589 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
590 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
590 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
591 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
591 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
592 desc))
592 desc))
593 drevids.append(newrevid)
593 drevids.append(newrevid)
594 lastrevid = newrevid
594 lastrevid = newrevid
595
595
596 # Update commit messages and remove tags
596 # Update commit messages and remove tags
597 if opts.get(b'amend'):
597 if opts.get(b'amend'):
598 unfi = repo.unfiltered()
598 unfi = repo.unfiltered()
599 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
599 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
600 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
600 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
601 wnode = unfi[b'.'].node()
601 wnode = unfi[b'.'].node()
602 mapping = {} # {oldnode: [newnode]}
602 mapping = {} # {oldnode: [newnode]}
603 for i, rev in enumerate(revs):
603 for i, rev in enumerate(revs):
604 old = unfi[rev]
604 old = unfi[rev]
605 drevid = drevids[i]
605 drevid = drevids[i]
606 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
606 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
607 newdesc = getdescfromdrev(drev)
607 newdesc = getdescfromdrev(drev)
608 # Make sure commit message contain "Differential Revision"
608 # Make sure commit message contain "Differential Revision"
609 if old.description() != newdesc:
609 if old.description() != newdesc:
610 if old.phase() == phases.public:
610 if old.phase() == phases.public:
611 ui.warn(_("warning: not updating public commit %s\n")
611 ui.warn(_("warning: not updating public commit %s\n")
612 % scmutil.formatchangeid(old))
612 % scmutil.formatchangeid(old))
613 continue
613 continue
614 parents = [
614 parents = [
615 mapping.get(old.p1().node(), (old.p1(),))[0],
615 mapping.get(old.p1().node(), (old.p1(),))[0],
616 mapping.get(old.p2().node(), (old.p2(),))[0],
616 mapping.get(old.p2().node(), (old.p2(),))[0],
617 ]
617 ]
618 new = context.metadataonlyctx(
618 new = context.metadataonlyctx(
619 repo, old, parents=parents, text=newdesc,
619 repo, old, parents=parents, text=newdesc,
620 user=old.user(), date=old.date(), extra=old.extra())
620 user=old.user(), date=old.date(), extra=old.extra())
621
621
622 newnode = new.commit()
622 newnode = new.commit()
623
623
624 mapping[old.node()] = [newnode]
624 mapping[old.node()] = [newnode]
625 # Update diff property
625 # Update diff property
626 writediffproperties(unfi[newnode], diffmap[old.node()])
626 writediffproperties(unfi[newnode], diffmap[old.node()])
627 # Remove local tags since it's no longer necessary
627 # Remove local tags since it's no longer necessary
628 tagname = b'D%d' % drevid
628 tagname = b'D%d' % drevid
629 if tagname in repo.tags():
629 if tagname in repo.tags():
630 tags.tag(repo, tagname, nullid, message=None, user=None,
630 tags.tag(repo, tagname, nullid, message=None, user=None,
631 date=None, local=True)
631 date=None, local=True)
632 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
632 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
633 if wnode in mapping:
633 if wnode in mapping:
634 unfi.setparents(mapping[wnode][0])
634 unfi.setparents(mapping[wnode][0])
635
635
636 # Map from "hg:meta" keys to header understood by "hg import". The order is
636 # Map from "hg:meta" keys to header understood by "hg import". The order is
637 # consistent with "hg export" output.
637 # consistent with "hg export" output.
638 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
638 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
639 (b'node', b'Node ID'), (b'parent', b'Parent ')])
639 (b'node', b'Node ID'), (b'parent', b'Parent ')])
640
640
641 def _confirmbeforesend(repo, revs, oldmap):
641 def _confirmbeforesend(repo, revs, oldmap):
642 url, token = readurltoken(repo)
642 url, token = readurltoken(repo)
643 ui = repo.ui
643 ui = repo.ui
644 for rev in revs:
644 for rev in revs:
645 ctx = repo[rev]
645 ctx = repo[rev]
646 desc = ctx.description().splitlines()[0]
646 desc = ctx.description().splitlines()[0]
647 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
647 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
648 if drevid:
648 if drevid:
649 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
649 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
650 else:
650 else:
651 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
651 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
652
652
653 ui.write(_(b'%s - %s: %s\n')
653 ui.write(_(b'%s - %s: %s\n')
654 % (drevdesc,
654 % (drevdesc,
655 ui.label(bytes(ctx), b'phabricator.node'),
655 ui.label(bytes(ctx), b'phabricator.node'),
656 ui.label(desc, b'phabricator.desc')))
656 ui.label(desc, b'phabricator.desc')))
657
657
658 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
658 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
659 b'$$ &Yes $$ &No') % url):
659 b'$$ &Yes $$ &No') % url):
660 return False
660 return False
661
661
662 return True
662 return True
663
663
664 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
664 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
665 b'abandoned'}
665 b'abandoned'}
666
666
667 def _getstatusname(drev):
667 def _getstatusname(drev):
668 """get normalized status name from a Differential Revision"""
668 """get normalized status name from a Differential Revision"""
669 return drev[b'statusName'].replace(b' ', b'').lower()
669 return drev[b'statusName'].replace(b' ', b'').lower()
670
670
671 # Small language to specify differential revisions. Support symbols: (), :X,
671 # Small language to specify differential revisions. Support symbols: (), :X,
672 # +, and -.
672 # +, and -.
673
673
674 _elements = {
674 _elements = {
675 # token-type: binding-strength, primary, prefix, infix, suffix
675 # token-type: binding-strength, primary, prefix, infix, suffix
676 b'(': (12, None, (b'group', 1, b')'), None, None),
676 b'(': (12, None, (b'group', 1, b')'), None, None),
677 b':': (8, None, (b'ancestors', 8), None, None),
677 b':': (8, None, (b'ancestors', 8), None, None),
678 b'&': (5, None, None, (b'and_', 5), None),
678 b'&': (5, None, None, (b'and_', 5), None),
679 b'+': (4, None, None, (b'add', 4), None),
679 b'+': (4, None, None, (b'add', 4), None),
680 b'-': (4, None, None, (b'sub', 4), None),
680 b'-': (4, None, None, (b'sub', 4), None),
681 b')': (0, None, None, None, None),
681 b')': (0, None, None, None, None),
682 b'symbol': (0, b'symbol', None, None, None),
682 b'symbol': (0, b'symbol', None, None, None),
683 b'end': (0, None, None, None, None),
683 b'end': (0, None, None, None, None),
684 }
684 }
685
685
686 def _tokenize(text):
686 def _tokenize(text):
687 view = memoryview(text) # zero-copy slice
687 view = memoryview(text) # zero-copy slice
688 special = b'():+-& '
688 special = b'():+-& '
689 pos = 0
689 pos = 0
690 length = len(text)
690 length = len(text)
691 while pos < length:
691 while pos < length:
692 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
692 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
693 pycompat.iterbytestr(view[pos:])))
693 pycompat.iterbytestr(view[pos:])))
694 if symbol:
694 if symbol:
695 yield (b'symbol', symbol, pos)
695 yield (b'symbol', symbol, pos)
696 pos += len(symbol)
696 pos += len(symbol)
697 else: # special char, ignore space
697 else: # special char, ignore space
698 if text[pos] != b' ':
698 if text[pos] != b' ':
699 yield (text[pos], None, pos)
699 yield (text[pos], None, pos)
700 pos += 1
700 pos += 1
701 yield (b'end', None, pos)
701 yield (b'end', None, pos)
702
702
703 def _parse(text):
703 def _parse(text):
704 tree, pos = parser.parser(_elements).parse(_tokenize(text))
704 tree, pos = parser.parser(_elements).parse(_tokenize(text))
705 if pos != len(text):
705 if pos != len(text):
706 raise error.ParseError(b'invalid token', pos)
706 raise error.ParseError(b'invalid token', pos)
707 return tree
707 return tree
708
708
709 def _parsedrev(symbol):
709 def _parsedrev(symbol):
710 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
710 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
711 if symbol.startswith(b'D') and symbol[1:].isdigit():
711 if symbol.startswith(b'D') and symbol[1:].isdigit():
712 return int(symbol[1:])
712 return int(symbol[1:])
713 if symbol.isdigit():
713 if symbol.isdigit():
714 return int(symbol)
714 return int(symbol)
715
715
716 def _prefetchdrevs(tree):
716 def _prefetchdrevs(tree):
717 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
717 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
718 drevs = set()
718 drevs = set()
719 ancestordrevs = set()
719 ancestordrevs = set()
720 op = tree[0]
720 op = tree[0]
721 if op == b'symbol':
721 if op == b'symbol':
722 r = _parsedrev(tree[1])
722 r = _parsedrev(tree[1])
723 if r:
723 if r:
724 drevs.add(r)
724 drevs.add(r)
725 elif op == b'ancestors':
725 elif op == b'ancestors':
726 r, a = _prefetchdrevs(tree[1])
726 r, a = _prefetchdrevs(tree[1])
727 drevs.update(r)
727 drevs.update(r)
728 ancestordrevs.update(r)
728 ancestordrevs.update(r)
729 ancestordrevs.update(a)
729 ancestordrevs.update(a)
730 else:
730 else:
731 for t in tree[1:]:
731 for t in tree[1:]:
732 r, a = _prefetchdrevs(t)
732 r, a = _prefetchdrevs(t)
733 drevs.update(r)
733 drevs.update(r)
734 ancestordrevs.update(a)
734 ancestordrevs.update(a)
735 return drevs, ancestordrevs
735 return drevs, ancestordrevs
736
736
737 def querydrev(repo, spec):
737 def querydrev(repo, spec):
738 """return a list of "Differential Revision" dicts
738 """return a list of "Differential Revision" dicts
739
739
740 spec is a string using a simple query language, see docstring in phabread
740 spec is a string using a simple query language, see docstring in phabread
741 for details.
741 for details.
742
742
743 A "Differential Revision dict" looks like:
743 A "Differential Revision dict" looks like:
744
744
745 {
745 {
746 "id": "2",
746 "id": "2",
747 "phid": "PHID-DREV-672qvysjcczopag46qty",
747 "phid": "PHID-DREV-672qvysjcczopag46qty",
748 "title": "example",
748 "title": "example",
749 "uri": "https://phab.example.com/D2",
749 "uri": "https://phab.example.com/D2",
750 "dateCreated": "1499181406",
750 "dateCreated": "1499181406",
751 "dateModified": "1499182103",
751 "dateModified": "1499182103",
752 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
752 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
753 "status": "0",
753 "status": "0",
754 "statusName": "Needs Review",
754 "statusName": "Needs Review",
755 "properties": [],
755 "properties": [],
756 "branch": null,
756 "branch": null,
757 "summary": "",
757 "summary": "",
758 "testPlan": "",
758 "testPlan": "",
759 "lineCount": "2",
759 "lineCount": "2",
760 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
760 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
761 "diffs": [
761 "diffs": [
762 "3",
762 "3",
763 "4",
763 "4",
764 ],
764 ],
765 "commits": [],
765 "commits": [],
766 "reviewers": [],
766 "reviewers": [],
767 "ccs": [],
767 "ccs": [],
768 "hashes": [],
768 "hashes": [],
769 "auxiliary": {
769 "auxiliary": {
770 "phabricator:projects": [],
770 "phabricator:projects": [],
771 "phabricator:depends-on": [
771 "phabricator:depends-on": [
772 "PHID-DREV-gbapp366kutjebt7agcd"
772 "PHID-DREV-gbapp366kutjebt7agcd"
773 ]
773 ]
774 },
774 },
775 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
775 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
776 "sourcePath": null
776 "sourcePath": null
777 }
777 }
778 """
778 """
779 def fetch(params):
779 def fetch(params):
780 """params -> single drev or None"""
780 """params -> single drev or None"""
781 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
781 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
782 if key in prefetched:
782 if key in prefetched:
783 return prefetched[key]
783 return prefetched[key]
784 drevs = callconduit(repo, b'differential.query', params)
784 drevs = callconduit(repo, b'differential.query', params)
785 # Fill prefetched with the result
785 # Fill prefetched with the result
786 for drev in drevs:
786 for drev in drevs:
787 prefetched[drev[b'phid']] = drev
787 prefetched[drev[b'phid']] = drev
788 prefetched[int(drev[b'id'])] = drev
788 prefetched[int(drev[b'id'])] = drev
789 if key not in prefetched:
789 if key not in prefetched:
790 raise error.Abort(_(b'cannot get Differential Revision %r')
790 raise error.Abort(_(b'cannot get Differential Revision %r')
791 % params)
791 % params)
792 return prefetched[key]
792 return prefetched[key]
793
793
794 def getstack(topdrevids):
794 def getstack(topdrevids):
795 """given a top, get a stack from the bottom, [id] -> [id]"""
795 """given a top, get a stack from the bottom, [id] -> [id]"""
796 visited = set()
796 visited = set()
797 result = []
797 result = []
798 queue = [{b'ids': [i]} for i in topdrevids]
798 queue = [{b'ids': [i]} for i in topdrevids]
799 while queue:
799 while queue:
800 params = queue.pop()
800 params = queue.pop()
801 drev = fetch(params)
801 drev = fetch(params)
802 if drev[b'id'] in visited:
802 if drev[b'id'] in visited:
803 continue
803 continue
804 visited.add(drev[b'id'])
804 visited.add(drev[b'id'])
805 result.append(int(drev[b'id']))
805 result.append(int(drev[b'id']))
806 auxiliary = drev.get(b'auxiliary', {})
806 auxiliary = drev.get(b'auxiliary', {})
807 depends = auxiliary.get(b'phabricator:depends-on', [])
807 depends = auxiliary.get(b'phabricator:depends-on', [])
808 for phid in depends:
808 for phid in depends:
809 queue.append({b'phids': [phid]})
809 queue.append({b'phids': [phid]})
810 result.reverse()
810 result.reverse()
811 return smartset.baseset(result)
811 return smartset.baseset(result)
812
812
813 # Initialize prefetch cache
813 # Initialize prefetch cache
814 prefetched = {} # {id or phid: drev}
814 prefetched = {} # {id or phid: drev}
815
815
816 tree = _parse(spec)
816 tree = _parse(spec)
817 drevs, ancestordrevs = _prefetchdrevs(tree)
817 drevs, ancestordrevs = _prefetchdrevs(tree)
818
818
819 # developer config: phabricator.batchsize
819 # developer config: phabricator.batchsize
820 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
820 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
821
821
822 # Prefetch Differential Revisions in batch
822 # Prefetch Differential Revisions in batch
823 tofetch = set(drevs)
823 tofetch = set(drevs)
824 for r in ancestordrevs:
824 for r in ancestordrevs:
825 tofetch.update(range(max(1, r - batchsize), r + 1))
825 tofetch.update(range(max(1, r - batchsize), r + 1))
826 if drevs:
826 if drevs:
827 fetch({b'ids': list(tofetch)})
827 fetch({b'ids': list(tofetch)})
828 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
828 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
829
829
830 # Walk through the tree, return smartsets
830 # Walk through the tree, return smartsets
831 def walk(tree):
831 def walk(tree):
832 op = tree[0]
832 op = tree[0]
833 if op == b'symbol':
833 if op == b'symbol':
834 drev = _parsedrev(tree[1])
834 drev = _parsedrev(tree[1])
835 if drev:
835 if drev:
836 return smartset.baseset([drev])
836 return smartset.baseset([drev])
837 elif tree[1] in _knownstatusnames:
837 elif tree[1] in _knownstatusnames:
838 drevs = [r for r in validids
838 drevs = [r for r in validids
839 if _getstatusname(prefetched[r]) == tree[1]]
839 if _getstatusname(prefetched[r]) == tree[1]]
840 return smartset.baseset(drevs)
840 return smartset.baseset(drevs)
841 else:
841 else:
842 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
842 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
843 elif op in {b'and_', b'add', b'sub'}:
843 elif op in {b'and_', b'add', b'sub'}:
844 assert len(tree) == 3
844 assert len(tree) == 3
845 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
845 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
846 elif op == b'group':
846 elif op == b'group':
847 return walk(tree[1])
847 return walk(tree[1])
848 elif op == b'ancestors':
848 elif op == b'ancestors':
849 return getstack(walk(tree[1]))
849 return getstack(walk(tree[1]))
850 else:
850 else:
851 raise error.ProgrammingError(b'illegal tree: %r' % tree)
851 raise error.ProgrammingError(b'illegal tree: %r' % tree)
852
852
853 return [prefetched[r] for r in walk(tree)]
853 return [prefetched[r] for r in walk(tree)]
854
854
855 def getdescfromdrev(drev):
855 def getdescfromdrev(drev):
856 """get description (commit message) from "Differential Revision"
856 """get description (commit message) from "Differential Revision"
857
857
858 This is similar to differential.getcommitmessage API. But we only care
858 This is similar to differential.getcommitmessage API. But we only care
859 about limited fields: title, summary, test plan, and URL.
859 about limited fields: title, summary, test plan, and URL.
860 """
860 """
861 title = drev[b'title']
861 title = drev[b'title']
862 summary = drev[b'summary'].rstrip()
862 summary = drev[b'summary'].rstrip()
863 testplan = drev[b'testPlan'].rstrip()
863 testplan = drev[b'testPlan'].rstrip()
864 if testplan:
864 if testplan:
865 testplan = b'Test Plan:\n%s' % testplan
865 testplan = b'Test Plan:\n%s' % testplan
866 uri = b'Differential Revision: %s' % drev[b'uri']
866 uri = b'Differential Revision: %s' % drev[b'uri']
867 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
867 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
868
868
869 def getdiffmeta(diff):
869 def getdiffmeta(diff):
870 """get commit metadata (date, node, user, p1) from a diff object
870 """get commit metadata (date, node, user, p1) from a diff object
871
871
872 The metadata could be "hg:meta", sent by phabsend, like:
872 The metadata could be "hg:meta", sent by phabsend, like:
873
873
874 "properties": {
874 "properties": {
875 "hg:meta": {
875 "hg:meta": {
876 "date": "1499571514 25200",
876 "date": "1499571514 25200",
877 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
877 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
878 "user": "Foo Bar <foo@example.com>",
878 "user": "Foo Bar <foo@example.com>",
879 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
879 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
880 }
880 }
881 }
881 }
882
882
883 Or converted from "local:commits", sent by "arc", like:
883 Or converted from "local:commits", sent by "arc", like:
884
884
885 "properties": {
885 "properties": {
886 "local:commits": {
886 "local:commits": {
887 "98c08acae292b2faf60a279b4189beb6cff1414d": {
887 "98c08acae292b2faf60a279b4189beb6cff1414d": {
888 "author": "Foo Bar",
888 "author": "Foo Bar",
889 "time": 1499546314,
889 "time": 1499546314,
890 "branch": "default",
890 "branch": "default",
891 "tag": "",
891 "tag": "",
892 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
892 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
893 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
893 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
894 "local": "1000",
894 "local": "1000",
895 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
895 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
896 "summary": "...",
896 "summary": "...",
897 "message": "...",
897 "message": "...",
898 "authorEmail": "foo@example.com"
898 "authorEmail": "foo@example.com"
899 }
899 }
900 }
900 }
901 }
901 }
902
902
903 Note: metadata extracted from "local:commits" will lose time zone
903 Note: metadata extracted from "local:commits" will lose time zone
904 information.
904 information.
905 """
905 """
906 props = diff.get(b'properties') or {}
906 props = diff.get(b'properties') or {}
907 meta = props.get(b'hg:meta')
907 meta = props.get(b'hg:meta')
908 if not meta and props.get(b'local:commits'):
908 if not meta:
909 commit = sorted(props[b'local:commits'].values())[0]
909 if props.get(b'local:commits'):
910 meta = {}
910 commit = sorted(props[b'local:commits'].values())[0]
911 if b'author' in commit and b'authorEmail' in commit:
911 meta = {}
912 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
912 if b'author' in commit and b'authorEmail' in commit:
913 commit[b'authorEmail'])
913 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
914 if b'time' in commit:
914 commit[b'authorEmail'])
915 meta[b'date'] = b'%d 0' % commit[b'time']
915 if b'time' in commit:
916 if b'branch' in commit:
916 meta[b'date'] = b'%d 0' % commit[b'time']
917 meta[b'branch'] = commit[b'branch']
917 if b'branch' in commit:
918 node = commit.get(b'commit', commit.get(b'rev'))
918 meta[b'branch'] = commit[b'branch']
919 if node:
919 node = commit.get(b'commit', commit.get(b'rev'))
920 meta[b'node'] = node
920 if node:
921 if len(commit.get(b'parents', ())) >= 1:
921 meta[b'node'] = node
922 meta[b'parent'] = commit[b'parents'][0]
922 if len(commit.get(b'parents', ())) >= 1:
923 return meta or {}
923 meta[b'parent'] = commit[b'parents'][0]
924 else:
925 meta = {}
926 if b'date' not in meta and b'dateCreated' in diff:
927 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
928 if b'branch' not in meta and diff.get(b'branch'):
929 meta[b'branch'] = diff[b'branch']
930 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
931 meta[b'parent'] = diff[b'sourceControlBaseRevision']
932 return meta
924
933
925 def readpatch(repo, drevs, write):
934 def readpatch(repo, drevs, write):
926 """generate plain-text patch readable by 'hg import'
935 """generate plain-text patch readable by 'hg import'
927
936
928 write is usually ui.write. drevs is what "querydrev" returns, results of
937 write is usually ui.write. drevs is what "querydrev" returns, results of
929 "differential.query".
938 "differential.query".
930 """
939 """
931 # Prefetch hg:meta property for all diffs
940 # Prefetch hg:meta property for all diffs
932 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
941 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
933 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
942 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
934
943
935 # Generate patch for each drev
944 # Generate patch for each drev
936 for drev in drevs:
945 for drev in drevs:
937 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
946 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
938
947
939 diffid = max(int(v) for v in drev[b'diffs'])
948 diffid = max(int(v) for v in drev[b'diffs'])
940 body = callconduit(repo, b'differential.getrawdiff',
949 body = callconduit(repo, b'differential.getrawdiff',
941 {b'diffID': diffid})
950 {b'diffID': diffid})
942 desc = getdescfromdrev(drev)
951 desc = getdescfromdrev(drev)
943 header = b'# HG changeset patch\n'
952 header = b'# HG changeset patch\n'
944
953
945 # Try to preserve metadata from hg:meta property. Write hg patch
954 # Try to preserve metadata from hg:meta property. Write hg patch
946 # headers that can be read by the "import" command. See patchheadermap
955 # headers that can be read by the "import" command. See patchheadermap
947 # and extract in mercurial/patch.py for supported headers.
956 # and extract in mercurial/patch.py for supported headers.
948 meta = getdiffmeta(diffs[b'%d' % diffid])
957 meta = getdiffmeta(diffs[b'%d' % diffid])
949 for k in _metanamemap.keys():
958 for k in _metanamemap.keys():
950 if k in meta:
959 if k in meta:
951 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
960 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
952
961
953 content = b'%s%s\n%s' % (header, desc, body)
962 content = b'%s%s\n%s' % (header, desc, body)
954 write(content)
963 write(content)
955
964
956 @vcrcommand(b'phabread',
965 @vcrcommand(b'phabread',
957 [(b'', b'stack', False, _(b'read dependencies'))],
966 [(b'', b'stack', False, _(b'read dependencies'))],
958 _(b'DREVSPEC [OPTIONS]'),
967 _(b'DREVSPEC [OPTIONS]'),
959 helpcategory=command.CATEGORY_IMPORT_EXPORT)
968 helpcategory=command.CATEGORY_IMPORT_EXPORT)
960 def phabread(ui, repo, spec, **opts):
969 def phabread(ui, repo, spec, **opts):
961 """print patches from Phabricator suitable for importing
970 """print patches from Phabricator suitable for importing
962
971
963 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
972 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
964 the number ``123``. It could also have common operators like ``+``, ``-``,
973 the number ``123``. It could also have common operators like ``+``, ``-``,
965 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
974 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
966 select a stack.
975 select a stack.
967
976
968 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
977 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
969 could be used to filter patches by status. For performance reason, they
978 could be used to filter patches by status. For performance reason, they
970 only represent a subset of non-status selections and cannot be used alone.
979 only represent a subset of non-status selections and cannot be used alone.
971
980
972 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
981 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
973 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
982 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
974 stack up to D9.
983 stack up to D9.
975
984
976 If --stack is given, follow dependencies information and read all patches.
985 If --stack is given, follow dependencies information and read all patches.
977 It is equivalent to the ``:`` operator.
986 It is equivalent to the ``:`` operator.
978 """
987 """
979 opts = pycompat.byteskwargs(opts)
988 opts = pycompat.byteskwargs(opts)
980 if opts.get(b'stack'):
989 if opts.get(b'stack'):
981 spec = b':(%s)' % spec
990 spec = b':(%s)' % spec
982 drevs = querydrev(repo, spec)
991 drevs = querydrev(repo, spec)
983 readpatch(repo, drevs, ui.write)
992 readpatch(repo, drevs, ui.write)
984
993
985 @vcrcommand(b'phabupdate',
994 @vcrcommand(b'phabupdate',
986 [(b'', b'accept', False, _(b'accept revisions')),
995 [(b'', b'accept', False, _(b'accept revisions')),
987 (b'', b'reject', False, _(b'reject revisions')),
996 (b'', b'reject', False, _(b'reject revisions')),
988 (b'', b'abandon', False, _(b'abandon revisions')),
997 (b'', b'abandon', False, _(b'abandon revisions')),
989 (b'', b'reclaim', False, _(b'reclaim revisions')),
998 (b'', b'reclaim', False, _(b'reclaim revisions')),
990 (b'm', b'comment', b'', _(b'comment on the last revision')),
999 (b'm', b'comment', b'', _(b'comment on the last revision')),
991 ], _(b'DREVSPEC [OPTIONS]'),
1000 ], _(b'DREVSPEC [OPTIONS]'),
992 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1001 helpcategory=command.CATEGORY_IMPORT_EXPORT)
993 def phabupdate(ui, repo, spec, **opts):
1002 def phabupdate(ui, repo, spec, **opts):
994 """update Differential Revision in batch
1003 """update Differential Revision in batch
995
1004
996 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1005 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
997 """
1006 """
998 opts = pycompat.byteskwargs(opts)
1007 opts = pycompat.byteskwargs(opts)
999 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1008 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1000 if len(flags) > 1:
1009 if len(flags) > 1:
1001 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1010 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1002
1011
1003 actions = []
1012 actions = []
1004 for f in flags:
1013 for f in flags:
1005 actions.append({b'type': f, b'value': b'true'})
1014 actions.append({b'type': f, b'value': b'true'})
1006
1015
1007 drevs = querydrev(repo, spec)
1016 drevs = querydrev(repo, spec)
1008 for i, drev in enumerate(drevs):
1017 for i, drev in enumerate(drevs):
1009 if i + 1 == len(drevs) and opts.get(b'comment'):
1018 if i + 1 == len(drevs) and opts.get(b'comment'):
1010 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1019 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1011 if actions:
1020 if actions:
1012 params = {b'objectIdentifier': drev[b'phid'],
1021 params = {b'objectIdentifier': drev[b'phid'],
1013 b'transactions': actions}
1022 b'transactions': actions}
1014 callconduit(repo, b'differential.revision.edit', params)
1023 callconduit(repo, b'differential.revision.edit', params)
1015
1024
1016 templatekeyword = registrar.templatekeyword()
1025 templatekeyword = registrar.templatekeyword()
1017
1026
1018 @templatekeyword(b'phabreview', requires={b'ctx'})
1027 @templatekeyword(b'phabreview', requires={b'ctx'})
1019 def template_review(context, mapping):
1028 def template_review(context, mapping):
1020 """:phabreview: Object describing the review for this changeset.
1029 """:phabreview: Object describing the review for this changeset.
1021 Has attributes `url` and `id`.
1030 Has attributes `url` and `id`.
1022 """
1031 """
1023 ctx = context.resource(mapping, b'ctx')
1032 ctx = context.resource(mapping, b'ctx')
1024 m = _differentialrevisiondescre.search(ctx.description())
1033 m = _differentialrevisiondescre.search(ctx.description())
1025 if m:
1034 if m:
1026 return templateutil.hybriddict({
1035 return templateutil.hybriddict({
1027 b'url': m.group(r'url'),
1036 b'url': m.group(r'url'),
1028 b'id': b"D%s" % m.group(r'id'),
1037 b'id': b"D%s" % m.group(r'id'),
1029 })
1038 })
1030 else:
1039 else:
1031 tags = ctx.repo().nodetags(ctx.node())
1040 tags = ctx.repo().nodetags(ctx.node())
1032 for t in tags:
1041 for t in tags:
1033 if _differentialrevisiontagre.match(t):
1042 if _differentialrevisiontagre.match(t):
1034 url = ctx.repo().ui.config(b'phabricator', b'url')
1043 url = ctx.repo().ui.config(b'phabricator', b'url')
1035 if not url.endswith(b'/'):
1044 if not url.endswith(b'/'):
1036 url += b'/'
1045 url += b'/'
1037 url += t
1046 url += t
1038
1047
1039 return templateutil.hybriddict({
1048 return templateutil.hybriddict({
1040 b'url': url,
1049 b'url': url,
1041 b'id': t,
1050 b'id': t,
1042 })
1051 })
1043 return None
1052 return None
@@ -1,121 +1,121 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 > EOF
5 > EOF
6 $ hg init repo
6 $ hg init repo
7 $ cd repo
7 $ cd repo
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [phabricator]
9 > [phabricator]
10 > url = https://phab.mercurial-scm.org/
10 > url = https://phab.mercurial-scm.org/
11 > callsign = HG
11 > callsign = HG
12 >
12 >
13 > [auth]
13 > [auth]
14 > hgphab.schemes = https
14 > hgphab.schemes = https
15 > hgphab.prefix = phab.mercurial-scm.org
15 > hgphab.prefix = phab.mercurial-scm.org
16 > # When working on the extension and making phabricator interaction
16 > # When working on the extension and making phabricator interaction
17 > # changes, edit this to be a real phabricator token. When done, edit
17 > # changes, edit this to be a real phabricator token. When done, edit
18 > # it back, and make sure to also edit your VCR transcripts to match
18 > # it back, and make sure to also edit your VCR transcripts to match
19 > # whatever value you put here.
19 > # whatever value you put here.
20 > hgphab.phabtoken = cli-hahayouwish
20 > hgphab.phabtoken = cli-hahayouwish
21 > EOF
21 > EOF
22 $ VCR="$TESTDIR/phabricator"
22 $ VCR="$TESTDIR/phabricator"
23
23
24 Error is handled reasonably. We override the phabtoken here so that
24 Error is handled reasonably. We override the phabtoken here so that
25 when you're developing changes to phabricator.py you can edit the
25 when you're developing changes to phabricator.py you can edit the
26 above config and have a real token in the test but not have to edit
26 above config and have a real token in the test but not have to edit
27 this test.
27 this test.
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31
31
32 Basic phabread:
32 Basic phabread:
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
33 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
34 # HG changeset patch
34 # HG changeset patch
35 # Date 1536771503 0
36 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
35 exchangev2: start to implement pull with wire protocol v2
37 exchangev2: start to implement pull with wire protocol v2
36
38
37 Wire protocol version 2 will take a substantially different
39 Wire protocol version 2 will take a substantially different
38 approach to exchange than version 1 (at least as far as pulling
40 approach to exchange than version 1 (at least as far as pulling
39 is concerned).
41 is concerned).
40
42
41 This commit establishes a new exchangev2 module for holding
43 This commit establishes a new exchangev2 module for holding
42 code related to exchange using wire protocol v2. I could have
43 added things to the existing exchange module. But it is already
44
44
45 phabupdate with an accept:
45 phabupdate with an accept:
46 $ hg phabupdate --accept D4564 \
46 $ hg phabupdate --accept D4564 \
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
47 > -m 'I think I like where this is headed. Will read rest of series later.'\
48 > --test-vcr "$VCR/accept-4564.json"
48 > --test-vcr "$VCR/accept-4564.json"
49
49
50 Create a differential diff:
50 Create a differential diff:
51 $ HGENCODING=utf-8; export HGENCODING
51 $ HGENCODING=utf-8; export HGENCODING
52 $ echo alpha > alpha
52 $ echo alpha > alpha
53 $ hg ci --addremove -m 'create alpha for phabricator test €'
53 $ hg ci --addremove -m 'create alpha for phabricator test €'
54 adding alpha
54 adding alpha
55 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
55 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
56 D6054 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
56 D6054 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
57 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
57 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
58 $ echo more >> alpha
58 $ echo more >> alpha
59 $ HGEDITOR=true hg ci --amend
59 $ HGEDITOR=true hg ci --amend
60 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/cb03845d6dd9-870f61a6-amend.hg
60 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/cb03845d6dd9-870f61a6-amend.hg
61 $ echo beta > beta
61 $ echo beta > beta
62 $ hg ci --addremove -m 'create beta for phabricator test'
62 $ hg ci --addremove -m 'create beta for phabricator test'
63 adding beta
63 adding beta
64 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
64 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
65 D6054 - updated - 939d862f0318: create alpha for phabricator test \xe2\x82\xac (esc)
65 D6054 - updated - 939d862f0318: create alpha for phabricator test \xe2\x82\xac (esc)
66 D6055 - created - f55f947ed0f8: create beta for phabricator test
66 D6055 - created - f55f947ed0f8: create beta for phabricator test
67 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f55f947ed0f8-0d1e502e-phabsend.hg
67 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f55f947ed0f8-0d1e502e-phabsend.hg
68 $ unset HGENCODING
68 $ unset HGENCODING
69
69
70 The amend won't explode after posting a public commit. The local tag is left
70 The amend won't explode after posting a public commit. The local tag is left
71 behind to identify it.
71 behind to identify it.
72
72
73 $ echo 'public change' > beta
73 $ echo 'public change' > beta
74 $ hg ci -m 'create public change for phabricator testing'
74 $ hg ci -m 'create public change for phabricator testing'
75 $ hg phase --public .
75 $ hg phase --public .
76 $ echo 'draft change' > alpha
76 $ echo 'draft change' > alpha
77 $ hg ci -m 'create draft change for phabricator testing'
77 $ hg ci -m 'create draft change for phabricator testing'
78 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
78 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
79 D5544 - created - a56e5ebd77e6: create public change for phabricator testing
79 D5544 - created - a56e5ebd77e6: create public change for phabricator testing
80 D5545 - created - 6a0ade3e3ec2: create draft change for phabricator testing
80 D5545 - created - 6a0ade3e3ec2: create draft change for phabricator testing
81 warning: not updating public commit 2:a56e5ebd77e6
81 warning: not updating public commit 2:a56e5ebd77e6
82 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6a0ade3e3ec2-aca7d23c-phabsend.hg
82 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6a0ade3e3ec2-aca7d23c-phabsend.hg
83 $ hg tags -v
83 $ hg tags -v
84 tip 3:90532860b5e1
84 tip 3:90532860b5e1
85 D5544 2:a56e5ebd77e6 local
85 D5544 2:a56e5ebd77e6 local
86
86
87 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
87 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
88 > {
88 > {
89 > "constraints": {
89 > "constraints": {
90 > "isBot": true
90 > "isBot": true
91 > }
91 > }
92 > }
92 > }
93 > EOF
93 > EOF
94 {
94 {
95 "cursor": {
95 "cursor": {
96 "after": null,
96 "after": null,
97 "before": null,
97 "before": null,
98 "limit": 100,
98 "limit": 100,
99 "order": null
99 "order": null
100 },
100 },
101 "data": [],
101 "data": [],
102 "maps": {},
102 "maps": {},
103 "query": {
103 "query": {
104 "queryKey": null
104 "queryKey": null
105 }
105 }
106 }
106 }
107
107
108 Template keywords
108 Template keywords
109 $ hg log -T'{rev} {phabreview|json}\n'
109 $ hg log -T'{rev} {phabreview|json}\n'
110 3 {"id": "D5545", "url": "https://phab.mercurial-scm.org/D5545"}
110 3 {"id": "D5545", "url": "https://phab.mercurial-scm.org/D5545"}
111 2 {"id": "D5544", "url": "https://phab.mercurial-scm.org/D5544"}
111 2 {"id": "D5544", "url": "https://phab.mercurial-scm.org/D5544"}
112 1 {"id": "D6055", "url": "https://phab.mercurial-scm.org/D6055"}
112 1 {"id": "D6055", "url": "https://phab.mercurial-scm.org/D6055"}
113 0 {"id": "D6054", "url": "https://phab.mercurial-scm.org/D6054"}
113 0 {"id": "D6054", "url": "https://phab.mercurial-scm.org/D6054"}
114
114
115 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
115 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
116 3 https://phab.mercurial-scm.org/D5545 D5545
116 3 https://phab.mercurial-scm.org/D5545 D5545
117 2 https://phab.mercurial-scm.org/D5544 D5544
117 2 https://phab.mercurial-scm.org/D5544 D5544
118 1 https://phab.mercurial-scm.org/D6055 D6055
118 1 https://phab.mercurial-scm.org/D6055 D6055
119 0 https://phab.mercurial-scm.org/D6054 D6054
119 0 https://phab.mercurial-scm.org/D6054 D6054
120
120
121 $ cd ..
121 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now