##// END OF EJS Templates
phabricator: add the maketext function...
Ian Moody -
r43456:f742faba default
parent child Browse files
Show More
@@ -1,1359 +1,1390 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial.pycompat import getattr
52 from mercurial.pycompat import getattr
53 from mercurial.thirdparty import attr
53 from mercurial.thirdparty import attr
54 from mercurial import (
54 from mercurial import (
55 cmdutil,
55 cmdutil,
56 context,
56 context,
57 encoding,
57 encoding,
58 error,
58 error,
59 exthelper,
59 exthelper,
60 httpconnection as httpconnectionmod,
60 httpconnection as httpconnectionmod,
61 match,
61 mdiff,
62 mdiff,
62 obsutil,
63 obsutil,
63 parser,
64 parser,
64 patch,
65 patch,
65 phases,
66 phases,
66 pycompat,
67 pycompat,
67 scmutil,
68 scmutil,
68 smartset,
69 smartset,
69 tags,
70 tags,
70 templatefilters,
71 templatefilters,
71 templateutil,
72 templateutil,
72 url as urlmod,
73 url as urlmod,
73 util,
74 util,
74 )
75 )
75 from mercurial.utils import (
76 from mercurial.utils import (
76 procutil,
77 procutil,
77 stringutil,
78 stringutil,
78 )
79 )
79
80
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # be specifying the version(s) of Mercurial they are tested with, or
83 # be specifying the version(s) of Mercurial they are tested with, or
83 # leave the attribute unspecified.
84 # leave the attribute unspecified.
84 testedwith = b'ships-with-hg-core'
85 testedwith = b'ships-with-hg-core'
85
86
86 eh = exthelper.exthelper()
87 eh = exthelper.exthelper()
87
88
88 cmdtable = eh.cmdtable
89 cmdtable = eh.cmdtable
89 command = eh.command
90 command = eh.command
90 configtable = eh.configtable
91 configtable = eh.configtable
91 templatekeyword = eh.templatekeyword
92 templatekeyword = eh.templatekeyword
92
93
93 # developer config: phabricator.batchsize
94 # developer config: phabricator.batchsize
94 eh.configitem(
95 eh.configitem(
95 b'phabricator', b'batchsize', default=12,
96 b'phabricator', b'batchsize', default=12,
96 )
97 )
97 eh.configitem(
98 eh.configitem(
98 b'phabricator', b'callsign', default=None,
99 b'phabricator', b'callsign', default=None,
99 )
100 )
100 eh.configitem(
101 eh.configitem(
101 b'phabricator', b'curlcmd', default=None,
102 b'phabricator', b'curlcmd', default=None,
102 )
103 )
103 # developer config: phabricator.repophid
104 # developer config: phabricator.repophid
104 eh.configitem(
105 eh.configitem(
105 b'phabricator', b'repophid', default=None,
106 b'phabricator', b'repophid', default=None,
106 )
107 )
107 eh.configitem(
108 eh.configitem(
108 b'phabricator', b'url', default=None,
109 b'phabricator', b'url', default=None,
109 )
110 )
110 eh.configitem(
111 eh.configitem(
111 b'phabsend', b'confirm', default=False,
112 b'phabsend', b'confirm', default=False,
112 )
113 )
113
114
114 colortable = {
115 colortable = {
115 b'phabricator.action.created': b'green',
116 b'phabricator.action.created': b'green',
116 b'phabricator.action.skipped': b'magenta',
117 b'phabricator.action.skipped': b'magenta',
117 b'phabricator.action.updated': b'magenta',
118 b'phabricator.action.updated': b'magenta',
118 b'phabricator.desc': b'',
119 b'phabricator.desc': b'',
119 b'phabricator.drev': b'bold',
120 b'phabricator.drev': b'bold',
120 b'phabricator.node': b'',
121 b'phabricator.node': b'',
121 }
122 }
122
123
123 _VCR_FLAGS = [
124 _VCR_FLAGS = [
124 (
125 (
125 b'',
126 b'',
126 b'test-vcr',
127 b'test-vcr',
127 b'',
128 b'',
128 _(
129 _(
129 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
130 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
130 b', otherwise will mock all http requests using the specified vcr file.'
131 b', otherwise will mock all http requests using the specified vcr file.'
131 b' (ADVANCED)'
132 b' (ADVANCED)'
132 ),
133 ),
133 ),
134 ),
134 ]
135 ]
135
136
136
137
137 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
138 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
138 fullflags = flags + _VCR_FLAGS
139 fullflags = flags + _VCR_FLAGS
139
140
140 def hgmatcher(r1, r2):
141 def hgmatcher(r1, r2):
141 if r1.uri != r2.uri or r1.method != r2.method:
142 if r1.uri != r2.uri or r1.method != r2.method:
142 return False
143 return False
143 r1params = r1.body.split(b'&')
144 r1params = r1.body.split(b'&')
144 r2params = r2.body.split(b'&')
145 r2params = r2.body.split(b'&')
145 return set(r1params) == set(r2params)
146 return set(r1params) == set(r2params)
146
147
147 def sanitiserequest(request):
148 def sanitiserequest(request):
148 request.body = re.sub(
149 request.body = re.sub(
149 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
150 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
150 )
151 )
151 return request
152 return request
152
153
153 def sanitiseresponse(response):
154 def sanitiseresponse(response):
154 if r'set-cookie' in response[r'headers']:
155 if r'set-cookie' in response[r'headers']:
155 del response[r'headers'][r'set-cookie']
156 del response[r'headers'][r'set-cookie']
156 return response
157 return response
157
158
158 def decorate(fn):
159 def decorate(fn):
159 def inner(*args, **kwargs):
160 def inner(*args, **kwargs):
160 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
161 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
161 if cassette:
162 if cassette:
162 import hgdemandimport
163 import hgdemandimport
163
164
164 with hgdemandimport.deactivated():
165 with hgdemandimport.deactivated():
165 import vcr as vcrmod
166 import vcr as vcrmod
166 import vcr.stubs as stubs
167 import vcr.stubs as stubs
167
168
168 vcr = vcrmod.VCR(
169 vcr = vcrmod.VCR(
169 serializer=r'json',
170 serializer=r'json',
170 before_record_request=sanitiserequest,
171 before_record_request=sanitiserequest,
171 before_record_response=sanitiseresponse,
172 before_record_response=sanitiseresponse,
172 custom_patches=[
173 custom_patches=[
173 (
174 (
174 urlmod,
175 urlmod,
175 r'httpconnection',
176 r'httpconnection',
176 stubs.VCRHTTPConnection,
177 stubs.VCRHTTPConnection,
177 ),
178 ),
178 (
179 (
179 urlmod,
180 urlmod,
180 r'httpsconnection',
181 r'httpsconnection',
181 stubs.VCRHTTPSConnection,
182 stubs.VCRHTTPSConnection,
182 ),
183 ),
183 ],
184 ],
184 )
185 )
185 vcr.register_matcher(r'hgmatcher', hgmatcher)
186 vcr.register_matcher(r'hgmatcher', hgmatcher)
186 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
187 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
187 return fn(*args, **kwargs)
188 return fn(*args, **kwargs)
188 return fn(*args, **kwargs)
189 return fn(*args, **kwargs)
189
190
190 inner.__name__ = fn.__name__
191 inner.__name__ = fn.__name__
191 inner.__doc__ = fn.__doc__
192 inner.__doc__ = fn.__doc__
192 return command(
193 return command(
193 name,
194 name,
194 fullflags,
195 fullflags,
195 spec,
196 spec,
196 helpcategory=helpcategory,
197 helpcategory=helpcategory,
197 optionalrepo=optionalrepo,
198 optionalrepo=optionalrepo,
198 )(inner)
199 )(inner)
199
200
200 return decorate
201 return decorate
201
202
202
203
203 def urlencodenested(params):
204 def urlencodenested(params):
204 """like urlencode, but works with nested parameters.
205 """like urlencode, but works with nested parameters.
205
206
206 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
207 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
207 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
208 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
208 urlencode. Note: the encoding is consistent with PHP's http_build_query.
209 urlencode. Note: the encoding is consistent with PHP's http_build_query.
209 """
210 """
210 flatparams = util.sortdict()
211 flatparams = util.sortdict()
211
212
212 def process(prefix, obj):
213 def process(prefix, obj):
213 if isinstance(obj, bool):
214 if isinstance(obj, bool):
214 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
215 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
215 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
216 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
216 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
217 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
217 if items is None:
218 if items is None:
218 flatparams[prefix] = obj
219 flatparams[prefix] = obj
219 else:
220 else:
220 for k, v in items(obj):
221 for k, v in items(obj):
221 if prefix:
222 if prefix:
222 process(b'%s[%s]' % (prefix, k), v)
223 process(b'%s[%s]' % (prefix, k), v)
223 else:
224 else:
224 process(k, v)
225 process(k, v)
225
226
226 process(b'', params)
227 process(b'', params)
227 return util.urlreq.urlencode(flatparams)
228 return util.urlreq.urlencode(flatparams)
228
229
229
230
230 def readurltoken(ui):
231 def readurltoken(ui):
231 """return conduit url, token and make sure they exist
232 """return conduit url, token and make sure they exist
232
233
233 Currently read from [auth] config section. In the future, it might
234 Currently read from [auth] config section. In the future, it might
234 make sense to read from .arcconfig and .arcrc as well.
235 make sense to read from .arcconfig and .arcrc as well.
235 """
236 """
236 url = ui.config(b'phabricator', b'url')
237 url = ui.config(b'phabricator', b'url')
237 if not url:
238 if not url:
238 raise error.Abort(
239 raise error.Abort(
239 _(b'config %s.%s is required') % (b'phabricator', b'url')
240 _(b'config %s.%s is required') % (b'phabricator', b'url')
240 )
241 )
241
242
242 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
243 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
243 token = None
244 token = None
244
245
245 if res:
246 if res:
246 group, auth = res
247 group, auth = res
247
248
248 ui.debug(b"using auth.%s.* for authentication\n" % group)
249 ui.debug(b"using auth.%s.* for authentication\n" % group)
249
250
250 token = auth.get(b'phabtoken')
251 token = auth.get(b'phabtoken')
251
252
252 if not token:
253 if not token:
253 raise error.Abort(
254 raise error.Abort(
254 _(b'Can\'t find conduit token associated to %s') % (url,)
255 _(b'Can\'t find conduit token associated to %s') % (url,)
255 )
256 )
256
257
257 return url, token
258 return url, token
258
259
259
260
260 def callconduit(ui, name, params):
261 def callconduit(ui, name, params):
261 """call Conduit API, params is a dict. return json.loads result, or None"""
262 """call Conduit API, params is a dict. return json.loads result, or None"""
262 host, token = readurltoken(ui)
263 host, token = readurltoken(ui)
263 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
264 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
264 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
265 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
265 params = params.copy()
266 params = params.copy()
266 params[b'api.token'] = token
267 params[b'api.token'] = token
267 data = urlencodenested(params)
268 data = urlencodenested(params)
268 curlcmd = ui.config(b'phabricator', b'curlcmd')
269 curlcmd = ui.config(b'phabricator', b'curlcmd')
269 if curlcmd:
270 if curlcmd:
270 sin, sout = procutil.popen2(
271 sin, sout = procutil.popen2(
271 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
272 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
272 )
273 )
273 sin.write(data)
274 sin.write(data)
274 sin.close()
275 sin.close()
275 body = sout.read()
276 body = sout.read()
276 else:
277 else:
277 urlopener = urlmod.opener(ui, authinfo)
278 urlopener = urlmod.opener(ui, authinfo)
278 request = util.urlreq.request(pycompat.strurl(url), data=data)
279 request = util.urlreq.request(pycompat.strurl(url), data=data)
279 with contextlib.closing(urlopener.open(request)) as rsp:
280 with contextlib.closing(urlopener.open(request)) as rsp:
280 body = rsp.read()
281 body = rsp.read()
281 ui.debug(b'Conduit Response: %s\n' % body)
282 ui.debug(b'Conduit Response: %s\n' % body)
282 parsed = pycompat.rapply(
283 parsed = pycompat.rapply(
283 lambda x: encoding.unitolocal(x)
284 lambda x: encoding.unitolocal(x)
284 if isinstance(x, pycompat.unicode)
285 if isinstance(x, pycompat.unicode)
285 else x,
286 else x,
286 # json.loads only accepts bytes from py3.6+
287 # json.loads only accepts bytes from py3.6+
287 json.loads(encoding.unifromlocal(body)),
288 json.loads(encoding.unifromlocal(body)),
288 )
289 )
289 if parsed.get(b'error_code'):
290 if parsed.get(b'error_code'):
290 msg = _(b'Conduit Error (%s): %s') % (
291 msg = _(b'Conduit Error (%s): %s') % (
291 parsed[b'error_code'],
292 parsed[b'error_code'],
292 parsed[b'error_info'],
293 parsed[b'error_info'],
293 )
294 )
294 raise error.Abort(msg)
295 raise error.Abort(msg)
295 return parsed[b'result']
296 return parsed[b'result']
296
297
297
298
298 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
299 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
299 def debugcallconduit(ui, repo, name):
300 def debugcallconduit(ui, repo, name):
300 """call Conduit API
301 """call Conduit API
301
302
302 Call parameters are read from stdin as a JSON blob. Result will be written
303 Call parameters are read from stdin as a JSON blob. Result will be written
303 to stdout as a JSON blob.
304 to stdout as a JSON blob.
304 """
305 """
305 # json.loads only accepts bytes from 3.6+
306 # json.loads only accepts bytes from 3.6+
306 rawparams = encoding.unifromlocal(ui.fin.read())
307 rawparams = encoding.unifromlocal(ui.fin.read())
307 # json.loads only returns unicode strings
308 # json.loads only returns unicode strings
308 params = pycompat.rapply(
309 params = pycompat.rapply(
309 lambda x: encoding.unitolocal(x)
310 lambda x: encoding.unitolocal(x)
310 if isinstance(x, pycompat.unicode)
311 if isinstance(x, pycompat.unicode)
311 else x,
312 else x,
312 json.loads(rawparams),
313 json.loads(rawparams),
313 )
314 )
314 # json.dumps only accepts unicode strings
315 # json.dumps only accepts unicode strings
315 result = pycompat.rapply(
316 result = pycompat.rapply(
316 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
317 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
317 callconduit(ui, name, params),
318 callconduit(ui, name, params),
318 )
319 )
319 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
320 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
320 ui.write(b'%s\n' % encoding.unitolocal(s))
321 ui.write(b'%s\n' % encoding.unitolocal(s))
321
322
322
323
323 def getrepophid(repo):
324 def getrepophid(repo):
324 """given callsign, return repository PHID or None"""
325 """given callsign, return repository PHID or None"""
325 # developer config: phabricator.repophid
326 # developer config: phabricator.repophid
326 repophid = repo.ui.config(b'phabricator', b'repophid')
327 repophid = repo.ui.config(b'phabricator', b'repophid')
327 if repophid:
328 if repophid:
328 return repophid
329 return repophid
329 callsign = repo.ui.config(b'phabricator', b'callsign')
330 callsign = repo.ui.config(b'phabricator', b'callsign')
330 if not callsign:
331 if not callsign:
331 return None
332 return None
332 query = callconduit(
333 query = callconduit(
333 repo.ui,
334 repo.ui,
334 b'diffusion.repository.search',
335 b'diffusion.repository.search',
335 {b'constraints': {b'callsigns': [callsign]}},
336 {b'constraints': {b'callsigns': [callsign]}},
336 )
337 )
337 if len(query[b'data']) == 0:
338 if len(query[b'data']) == 0:
338 return None
339 return None
339 repophid = query[b'data'][0][b'phid']
340 repophid = query[b'data'][0][b'phid']
340 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
341 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
341 return repophid
342 return repophid
342
343
343
344
344 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
345 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
345 _differentialrevisiondescre = re.compile(
346 _differentialrevisiondescre = re.compile(
346 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
347 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
347 )
348 )
348
349
349
350
350 def getoldnodedrevmap(repo, nodelist):
351 def getoldnodedrevmap(repo, nodelist):
351 """find previous nodes that has been sent to Phabricator
352 """find previous nodes that has been sent to Phabricator
352
353
353 return {node: (oldnode, Differential diff, Differential Revision ID)}
354 return {node: (oldnode, Differential diff, Differential Revision ID)}
354 for node in nodelist with known previous sent versions, or associated
355 for node in nodelist with known previous sent versions, or associated
355 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
356 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
356 be ``None``.
357 be ``None``.
357
358
358 Examines commit messages like "Differential Revision:" to get the
359 Examines commit messages like "Differential Revision:" to get the
359 association information.
360 association information.
360
361
361 If such commit message line is not found, examines all precursors and their
362 If such commit message line is not found, examines all precursors and their
362 tags. Tags with format like "D1234" are considered a match and the node
363 tags. Tags with format like "D1234" are considered a match and the node
363 with that tag, and the number after "D" (ex. 1234) will be returned.
364 with that tag, and the number after "D" (ex. 1234) will be returned.
364
365
365 The ``old node``, if not None, is guaranteed to be the last diff of
366 The ``old node``, if not None, is guaranteed to be the last diff of
366 corresponding Differential Revision, and exist in the repo.
367 corresponding Differential Revision, and exist in the repo.
367 """
368 """
368 unfi = repo.unfiltered()
369 unfi = repo.unfiltered()
369 nodemap = unfi.changelog.nodemap
370 nodemap = unfi.changelog.nodemap
370
371
371 result = {} # {node: (oldnode?, lastdiff?, drev)}
372 result = {} # {node: (oldnode?, lastdiff?, drev)}
372 toconfirm = {} # {node: (force, {precnode}, drev)}
373 toconfirm = {} # {node: (force, {precnode}, drev)}
373 for node in nodelist:
374 for node in nodelist:
374 ctx = unfi[node]
375 ctx = unfi[node]
375 # For tags like "D123", put them into "toconfirm" to verify later
376 # For tags like "D123", put them into "toconfirm" to verify later
376 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
377 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
377 for n in precnodes:
378 for n in precnodes:
378 if n in nodemap:
379 if n in nodemap:
379 for tag in unfi.nodetags(n):
380 for tag in unfi.nodetags(n):
380 m = _differentialrevisiontagre.match(tag)
381 m = _differentialrevisiontagre.match(tag)
381 if m:
382 if m:
382 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
383 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
383 continue
384 continue
384
385
385 # Check commit message
386 # Check commit message
386 m = _differentialrevisiondescre.search(ctx.description())
387 m = _differentialrevisiondescre.search(ctx.description())
387 if m:
388 if m:
388 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
389 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
389
390
390 # Double check if tags are genuine by collecting all old nodes from
391 # Double check if tags are genuine by collecting all old nodes from
391 # Phabricator, and expect precursors overlap with it.
392 # Phabricator, and expect precursors overlap with it.
392 if toconfirm:
393 if toconfirm:
393 drevs = [drev for force, precs, drev in toconfirm.values()]
394 drevs = [drev for force, precs, drev in toconfirm.values()]
394 alldiffs = callconduit(
395 alldiffs = callconduit(
395 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
396 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
396 )
397 )
397 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
398 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
398 for newnode, (force, precset, drev) in toconfirm.items():
399 for newnode, (force, precset, drev) in toconfirm.items():
399 diffs = [
400 diffs = [
400 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
401 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
401 ]
402 ]
402
403
403 # "precursors" as known by Phabricator
404 # "precursors" as known by Phabricator
404 phprecset = set(getnode(d) for d in diffs)
405 phprecset = set(getnode(d) for d in diffs)
405
406
406 # Ignore if precursors (Phabricator and local repo) do not overlap,
407 # Ignore if precursors (Phabricator and local repo) do not overlap,
407 # and force is not set (when commit message says nothing)
408 # and force is not set (when commit message says nothing)
408 if not force and not bool(phprecset & precset):
409 if not force and not bool(phprecset & precset):
409 tagname = b'D%d' % drev
410 tagname = b'D%d' % drev
410 tags.tag(
411 tags.tag(
411 repo,
412 repo,
412 tagname,
413 tagname,
413 nullid,
414 nullid,
414 message=None,
415 message=None,
415 user=None,
416 user=None,
416 date=None,
417 date=None,
417 local=True,
418 local=True,
418 )
419 )
419 unfi.ui.warn(
420 unfi.ui.warn(
420 _(
421 _(
421 b'D%s: local tag removed - does not match '
422 b'D%s: local tag removed - does not match '
422 b'Differential history\n'
423 b'Differential history\n'
423 )
424 )
424 % drev
425 % drev
425 )
426 )
426 continue
427 continue
427
428
428 # Find the last node using Phabricator metadata, and make sure it
429 # Find the last node using Phabricator metadata, and make sure it
429 # exists in the repo
430 # exists in the repo
430 oldnode = lastdiff = None
431 oldnode = lastdiff = None
431 if diffs:
432 if diffs:
432 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
433 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
433 oldnode = getnode(lastdiff)
434 oldnode = getnode(lastdiff)
434 if oldnode and oldnode not in nodemap:
435 if oldnode and oldnode not in nodemap:
435 oldnode = None
436 oldnode = None
436
437
437 result[newnode] = (oldnode, lastdiff, drev)
438 result[newnode] = (oldnode, lastdiff, drev)
438
439
439 return result
440 return result
440
441
441
442
442 def getdiff(ctx, diffopts):
443 def getdiff(ctx, diffopts):
443 """plain-text diff without header (user, commit message, etc)"""
444 """plain-text diff without header (user, commit message, etc)"""
444 output = util.stringio()
445 output = util.stringio()
445 for chunk, _label in patch.diffui(
446 for chunk, _label in patch.diffui(
446 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
447 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
447 ):
448 ):
448 output.write(chunk)
449 output.write(chunk)
449 return output.getvalue()
450 return output.getvalue()
450
451
451
452
452 class DiffChangeType(object):
453 class DiffChangeType(object):
453 ADD = 1
454 ADD = 1
454 CHANGE = 2
455 CHANGE = 2
455 DELETE = 3
456 DELETE = 3
456 MOVE_AWAY = 4
457 MOVE_AWAY = 4
457 COPY_AWAY = 5
458 COPY_AWAY = 5
458 MOVE_HERE = 6
459 MOVE_HERE = 6
459 COPY_HERE = 7
460 COPY_HERE = 7
460 MULTICOPY = 8
461 MULTICOPY = 8
461
462
462
463
463 class DiffFileType(object):
464 class DiffFileType(object):
464 TEXT = 1
465 TEXT = 1
465 IMAGE = 2
466 IMAGE = 2
466 BINARY = 3
467 BINARY = 3
467
468
468
469
469 @attr.s
470 @attr.s
470 class phabhunk(dict):
471 class phabhunk(dict):
471 """Represents a Differential hunk, which is owned by a Differential change
472 """Represents a Differential hunk, which is owned by a Differential change
472 """
473 """
473
474
474 oldOffset = attr.ib(default=0) # camelcase-required
475 oldOffset = attr.ib(default=0) # camelcase-required
475 oldLength = attr.ib(default=0) # camelcase-required
476 oldLength = attr.ib(default=0) # camelcase-required
476 newOffset = attr.ib(default=0) # camelcase-required
477 newOffset = attr.ib(default=0) # camelcase-required
477 newLength = attr.ib(default=0) # camelcase-required
478 newLength = attr.ib(default=0) # camelcase-required
478 corpus = attr.ib(default='')
479 corpus = attr.ib(default='')
479 # These get added to the phabchange's equivalents
480 # These get added to the phabchange's equivalents
480 addLines = attr.ib(default=0) # camelcase-required
481 addLines = attr.ib(default=0) # camelcase-required
481 delLines = attr.ib(default=0) # camelcase-required
482 delLines = attr.ib(default=0) # camelcase-required
482
483
483
484
484 @attr.s
485 @attr.s
485 class phabchange(object):
486 class phabchange(object):
486 """Represents a Differential change, owns Differential hunks and owned by a
487 """Represents a Differential change, owns Differential hunks and owned by a
487 Differential diff. Each one represents one file in a diff.
488 Differential diff. Each one represents one file in a diff.
488 """
489 """
489
490
490 currentPath = attr.ib(default=None) # camelcase-required
491 currentPath = attr.ib(default=None) # camelcase-required
491 oldPath = attr.ib(default=None) # camelcase-required
492 oldPath = attr.ib(default=None) # camelcase-required
492 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
493 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
493 metadata = attr.ib(default=attr.Factory(dict))
494 metadata = attr.ib(default=attr.Factory(dict))
494 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
495 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
495 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 type = attr.ib(default=DiffChangeType.CHANGE)
497 type = attr.ib(default=DiffChangeType.CHANGE)
497 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
498 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
498 commitHash = attr.ib(default=None) # camelcase-required
499 commitHash = attr.ib(default=None) # camelcase-required
499 addLines = attr.ib(default=0) # camelcase-required
500 addLines = attr.ib(default=0) # camelcase-required
500 delLines = attr.ib(default=0) # camelcase-required
501 delLines = attr.ib(default=0) # camelcase-required
501 hunks = attr.ib(default=attr.Factory(list))
502 hunks = attr.ib(default=attr.Factory(list))
502
503
503 def copynewmetadatatoold(self):
504 def copynewmetadatatoold(self):
504 for key in list(self.metadata.keys()):
505 for key in list(self.metadata.keys()):
505 newkey = key.replace(b'new:', b'old:')
506 newkey = key.replace(b'new:', b'old:')
506 self.metadata[newkey] = self.metadata[key]
507 self.metadata[newkey] = self.metadata[key]
507
508
508 def addoldmode(self, value):
509 def addoldmode(self, value):
509 self.oldProperties[b'unix:filemode'] = value
510 self.oldProperties[b'unix:filemode'] = value
510
511
511 def addnewmode(self, value):
512 def addnewmode(self, value):
512 self.newProperties[b'unix:filemode'] = value
513 self.newProperties[b'unix:filemode'] = value
513
514
514 def addhunk(self, hunk):
515 def addhunk(self, hunk):
515 if not isinstance(hunk, phabhunk):
516 if not isinstance(hunk, phabhunk):
516 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
517 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
517 self.hunks.append(hunk)
518 self.hunks.append(hunk)
518 # It's useful to include these stats since the Phab web UI shows them,
519 # It's useful to include these stats since the Phab web UI shows them,
519 # and uses them to estimate how large a change a Revision is. Also used
520 # and uses them to estimate how large a change a Revision is. Also used
520 # in email subjects for the [+++--] bit.
521 # in email subjects for the [+++--] bit.
521 self.addLines += hunk.addLines
522 self.addLines += hunk.addLines
522 self.delLines += hunk.delLines
523 self.delLines += hunk.delLines
523
524
524
525
525 @attr.s
526 @attr.s
526 class phabdiff(object):
527 class phabdiff(object):
527 """Represents a Differential diff, owns Differential changes. Corresponds
528 """Represents a Differential diff, owns Differential changes. Corresponds
528 to a commit.
529 to a commit.
529 """
530 """
530
531
531 # Doesn't seem to be any reason to send this (output of uname -n)
532 # Doesn't seem to be any reason to send this (output of uname -n)
532 sourceMachine = attr.ib(default=b'') # camelcase-required
533 sourceMachine = attr.ib(default=b'') # camelcase-required
533 sourcePath = attr.ib(default=b'/') # camelcase-required
534 sourcePath = attr.ib(default=b'/') # camelcase-required
534 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
535 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
535 sourceControlPath = attr.ib(default=b'/') # camelcase-required
536 sourceControlPath = attr.ib(default=b'/') # camelcase-required
536 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
537 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
537 branch = attr.ib(default=b'default')
538 branch = attr.ib(default=b'default')
538 bookmark = attr.ib(default=None)
539 bookmark = attr.ib(default=None)
539 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
540 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
540 lintStatus = attr.ib(default=b'none') # camelcase-required
541 lintStatus = attr.ib(default=b'none') # camelcase-required
541 unitStatus = attr.ib(default=b'none') # camelcase-required
542 unitStatus = attr.ib(default=b'none') # camelcase-required
542 changes = attr.ib(default=attr.Factory(dict))
543 changes = attr.ib(default=attr.Factory(dict))
543 repositoryPHID = attr.ib(default=None) # camelcase-required
544 repositoryPHID = attr.ib(default=None) # camelcase-required
544
545
545 def addchange(self, change):
546 def addchange(self, change):
546 if not isinstance(change, phabchange):
547 if not isinstance(change, phabchange):
547 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
548 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
548 self.changes[change.currentPath] = change
549 self.changes[change.currentPath] = change
549
550
550
551
552 def maketext(pchange, ctx, fname):
553 """populate the phabchange for a text file"""
554 repo = ctx.repo()
555 fmatcher = match.exact([fname])
556 diffopts = mdiff.diffopts(git=True, context=32767)
557 _pfctx, _fctx, header, fhunks = next(
558 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
559 )
560
561 for fhunk in fhunks:
562 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
563 corpus = b''.join(lines[1:])
564 shunk = list(header)
565 shunk.extend(lines)
566 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
567 patch.diffstatdata(util.iterlines(shunk))
568 )
569 pchange.addhunk(
570 phabhunk(
571 oldOffset,
572 oldLength,
573 newOffset,
574 newLength,
575 corpus,
576 addLines,
577 delLines,
578 )
579 )
580
581
551 def creatediff(ctx):
582 def creatediff(ctx):
552 """create a Differential Diff"""
583 """create a Differential Diff"""
553 repo = ctx.repo()
584 repo = ctx.repo()
554 repophid = getrepophid(repo)
585 repophid = getrepophid(repo)
555 # Create a "Differential Diff" via "differential.createrawdiff" API
586 # Create a "Differential Diff" via "differential.createrawdiff" API
556 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
587 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
557 if repophid:
588 if repophid:
558 params[b'repositoryPHID'] = repophid
589 params[b'repositoryPHID'] = repophid
559 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
590 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
560 if not diff:
591 if not diff:
561 raise error.Abort(_(b'cannot create diff for %s') % ctx)
592 raise error.Abort(_(b'cannot create diff for %s') % ctx)
562 return diff
593 return diff
563
594
564
595
565 def writediffproperties(ctx, diff):
596 def writediffproperties(ctx, diff):
566 """write metadata to diff so patches could be applied losslessly"""
597 """write metadata to diff so patches could be applied losslessly"""
567 params = {
598 params = {
568 b'diff_id': diff[b'id'],
599 b'diff_id': diff[b'id'],
569 b'name': b'hg:meta',
600 b'name': b'hg:meta',
570 b'data': templatefilters.json(
601 b'data': templatefilters.json(
571 {
602 {
572 b'user': ctx.user(),
603 b'user': ctx.user(),
573 b'date': b'%d %d' % ctx.date(),
604 b'date': b'%d %d' % ctx.date(),
574 b'branch': ctx.branch(),
605 b'branch': ctx.branch(),
575 b'node': ctx.hex(),
606 b'node': ctx.hex(),
576 b'parent': ctx.p1().hex(),
607 b'parent': ctx.p1().hex(),
577 }
608 }
578 ),
609 ),
579 }
610 }
580 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
611 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
581
612
582 params = {
613 params = {
583 b'diff_id': diff[b'id'],
614 b'diff_id': diff[b'id'],
584 b'name': b'local:commits',
615 b'name': b'local:commits',
585 b'data': templatefilters.json(
616 b'data': templatefilters.json(
586 {
617 {
587 ctx.hex(): {
618 ctx.hex(): {
588 b'author': stringutil.person(ctx.user()),
619 b'author': stringutil.person(ctx.user()),
589 b'authorEmail': stringutil.email(ctx.user()),
620 b'authorEmail': stringutil.email(ctx.user()),
590 b'time': int(ctx.date()[0]),
621 b'time': int(ctx.date()[0]),
591 b'commit': ctx.hex(),
622 b'commit': ctx.hex(),
592 b'parents': [ctx.p1().hex()],
623 b'parents': [ctx.p1().hex()],
593 b'branch': ctx.branch(),
624 b'branch': ctx.branch(),
594 },
625 },
595 }
626 }
596 ),
627 ),
597 }
628 }
598 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
629 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
599
630
600
631
601 def createdifferentialrevision(
632 def createdifferentialrevision(
602 ctx,
633 ctx,
603 revid=None,
634 revid=None,
604 parentrevphid=None,
635 parentrevphid=None,
605 oldnode=None,
636 oldnode=None,
606 olddiff=None,
637 olddiff=None,
607 actions=None,
638 actions=None,
608 comment=None,
639 comment=None,
609 ):
640 ):
610 """create or update a Differential Revision
641 """create or update a Differential Revision
611
642
612 If revid is None, create a new Differential Revision, otherwise update
643 If revid is None, create a new Differential Revision, otherwise update
613 revid. If parentrevphid is not None, set it as a dependency.
644 revid. If parentrevphid is not None, set it as a dependency.
614
645
615 If oldnode is not None, check if the patch content (without commit message
646 If oldnode is not None, check if the patch content (without commit message
616 and metadata) has changed before creating another diff.
647 and metadata) has changed before creating another diff.
617
648
618 If actions is not None, they will be appended to the transaction.
649 If actions is not None, they will be appended to the transaction.
619 """
650 """
620 repo = ctx.repo()
651 repo = ctx.repo()
621 if oldnode:
652 if oldnode:
622 diffopts = mdiff.diffopts(git=True, context=32767)
653 diffopts = mdiff.diffopts(git=True, context=32767)
623 oldctx = repo.unfiltered()[oldnode]
654 oldctx = repo.unfiltered()[oldnode]
624 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
655 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
625 else:
656 else:
626 neednewdiff = True
657 neednewdiff = True
627
658
628 transactions = []
659 transactions = []
629 if neednewdiff:
660 if neednewdiff:
630 diff = creatediff(ctx)
661 diff = creatediff(ctx)
631 transactions.append({b'type': b'update', b'value': diff[b'phid']})
662 transactions.append({b'type': b'update', b'value': diff[b'phid']})
632 if comment:
663 if comment:
633 transactions.append({b'type': b'comment', b'value': comment})
664 transactions.append({b'type': b'comment', b'value': comment})
634 else:
665 else:
635 # Even if we don't need to upload a new diff because the patch content
666 # Even if we don't need to upload a new diff because the patch content
636 # does not change. We might still need to update its metadata so
667 # does not change. We might still need to update its metadata so
637 # pushers could know the correct node metadata.
668 # pushers could know the correct node metadata.
638 assert olddiff
669 assert olddiff
639 diff = olddiff
670 diff = olddiff
640 writediffproperties(ctx, diff)
671 writediffproperties(ctx, diff)
641
672
642 # Set the parent Revision every time, so commit re-ordering is picked-up
673 # Set the parent Revision every time, so commit re-ordering is picked-up
643 if parentrevphid:
674 if parentrevphid:
644 transactions.append(
675 transactions.append(
645 {b'type': b'parents.set', b'value': [parentrevphid]}
676 {b'type': b'parents.set', b'value': [parentrevphid]}
646 )
677 )
647
678
648 if actions:
679 if actions:
649 transactions += actions
680 transactions += actions
650
681
651 # Parse commit message and update related fields.
682 # Parse commit message and update related fields.
652 desc = ctx.description()
683 desc = ctx.description()
653 info = callconduit(
684 info = callconduit(
654 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
685 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
655 )
686 )
656 for k, v in info[b'fields'].items():
687 for k, v in info[b'fields'].items():
657 if k in [b'title', b'summary', b'testPlan']:
688 if k in [b'title', b'summary', b'testPlan']:
658 transactions.append({b'type': k, b'value': v})
689 transactions.append({b'type': k, b'value': v})
659
690
660 params = {b'transactions': transactions}
691 params = {b'transactions': transactions}
661 if revid is not None:
692 if revid is not None:
662 # Update an existing Differential Revision
693 # Update an existing Differential Revision
663 params[b'objectIdentifier'] = revid
694 params[b'objectIdentifier'] = revid
664
695
665 revision = callconduit(repo.ui, b'differential.revision.edit', params)
696 revision = callconduit(repo.ui, b'differential.revision.edit', params)
666 if not revision:
697 if not revision:
667 raise error.Abort(_(b'cannot create revision for %s') % ctx)
698 raise error.Abort(_(b'cannot create revision for %s') % ctx)
668
699
669 return revision, diff
700 return revision, diff
670
701
671
702
672 def userphids(repo, names):
703 def userphids(repo, names):
673 """convert user names to PHIDs"""
704 """convert user names to PHIDs"""
674 names = [name.lower() for name in names]
705 names = [name.lower() for name in names]
675 query = {b'constraints': {b'usernames': names}}
706 query = {b'constraints': {b'usernames': names}}
676 result = callconduit(repo.ui, b'user.search', query)
707 result = callconduit(repo.ui, b'user.search', query)
677 # username not found is not an error of the API. So check if we have missed
708 # username not found is not an error of the API. So check if we have missed
678 # some names here.
709 # some names here.
679 data = result[b'data']
710 data = result[b'data']
680 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
711 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
681 unresolved = set(names) - resolved
712 unresolved = set(names) - resolved
682 if unresolved:
713 if unresolved:
683 raise error.Abort(
714 raise error.Abort(
684 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
715 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
685 )
716 )
686 return [entry[b'phid'] for entry in data]
717 return [entry[b'phid'] for entry in data]
687
718
688
719
689 @vcrcommand(
720 @vcrcommand(
690 b'phabsend',
721 b'phabsend',
691 [
722 [
692 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
723 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
693 (b'', b'amend', True, _(b'update commit messages')),
724 (b'', b'amend', True, _(b'update commit messages')),
694 (b'', b'reviewer', [], _(b'specify reviewers')),
725 (b'', b'reviewer', [], _(b'specify reviewers')),
695 (b'', b'blocker', [], _(b'specify blocking reviewers')),
726 (b'', b'blocker', [], _(b'specify blocking reviewers')),
696 (
727 (
697 b'm',
728 b'm',
698 b'comment',
729 b'comment',
699 b'',
730 b'',
700 _(b'add a comment to Revisions with new/updated Diffs'),
731 _(b'add a comment to Revisions with new/updated Diffs'),
701 ),
732 ),
702 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
733 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
703 ],
734 ],
704 _(b'REV [OPTIONS]'),
735 _(b'REV [OPTIONS]'),
705 helpcategory=command.CATEGORY_IMPORT_EXPORT,
736 helpcategory=command.CATEGORY_IMPORT_EXPORT,
706 )
737 )
707 def phabsend(ui, repo, *revs, **opts):
738 def phabsend(ui, repo, *revs, **opts):
708 """upload changesets to Phabricator
739 """upload changesets to Phabricator
709
740
710 If there are multiple revisions specified, they will be send as a stack
741 If there are multiple revisions specified, they will be send as a stack
711 with a linear dependencies relationship using the order specified by the
742 with a linear dependencies relationship using the order specified by the
712 revset.
743 revset.
713
744
714 For the first time uploading changesets, local tags will be created to
745 For the first time uploading changesets, local tags will be created to
715 maintain the association. After the first time, phabsend will check
746 maintain the association. After the first time, phabsend will check
716 obsstore and tags information so it can figure out whether to update an
747 obsstore and tags information so it can figure out whether to update an
717 existing Differential Revision, or create a new one.
748 existing Differential Revision, or create a new one.
718
749
719 If --amend is set, update commit messages so they have the
750 If --amend is set, update commit messages so they have the
720 ``Differential Revision`` URL, remove related tags. This is similar to what
751 ``Differential Revision`` URL, remove related tags. This is similar to what
721 arcanist will do, and is more desired in author-push workflows. Otherwise,
752 arcanist will do, and is more desired in author-push workflows. Otherwise,
722 use local tags to record the ``Differential Revision`` association.
753 use local tags to record the ``Differential Revision`` association.
723
754
724 The --confirm option lets you confirm changesets before sending them. You
755 The --confirm option lets you confirm changesets before sending them. You
725 can also add following to your configuration file to make it default
756 can also add following to your configuration file to make it default
726 behaviour::
757 behaviour::
727
758
728 [phabsend]
759 [phabsend]
729 confirm = true
760 confirm = true
730
761
731 phabsend will check obsstore and the above association to decide whether to
762 phabsend will check obsstore and the above association to decide whether to
732 update an existing Differential Revision, or create a new one.
763 update an existing Differential Revision, or create a new one.
733 """
764 """
734 opts = pycompat.byteskwargs(opts)
765 opts = pycompat.byteskwargs(opts)
735 revs = list(revs) + opts.get(b'rev', [])
766 revs = list(revs) + opts.get(b'rev', [])
736 revs = scmutil.revrange(repo, revs)
767 revs = scmutil.revrange(repo, revs)
737
768
738 if not revs:
769 if not revs:
739 raise error.Abort(_(b'phabsend requires at least one changeset'))
770 raise error.Abort(_(b'phabsend requires at least one changeset'))
740 if opts.get(b'amend'):
771 if opts.get(b'amend'):
741 cmdutil.checkunfinished(repo)
772 cmdutil.checkunfinished(repo)
742
773
743 # {newnode: (oldnode, olddiff, olddrev}
774 # {newnode: (oldnode, olddiff, olddrev}
744 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
775 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
745
776
746 confirm = ui.configbool(b'phabsend', b'confirm')
777 confirm = ui.configbool(b'phabsend', b'confirm')
747 confirm |= bool(opts.get(b'confirm'))
778 confirm |= bool(opts.get(b'confirm'))
748 if confirm:
779 if confirm:
749 confirmed = _confirmbeforesend(repo, revs, oldmap)
780 confirmed = _confirmbeforesend(repo, revs, oldmap)
750 if not confirmed:
781 if not confirmed:
751 raise error.Abort(_(b'phabsend cancelled'))
782 raise error.Abort(_(b'phabsend cancelled'))
752
783
753 actions = []
784 actions = []
754 reviewers = opts.get(b'reviewer', [])
785 reviewers = opts.get(b'reviewer', [])
755 blockers = opts.get(b'blocker', [])
786 blockers = opts.get(b'blocker', [])
756 phids = []
787 phids = []
757 if reviewers:
788 if reviewers:
758 phids.extend(userphids(repo, reviewers))
789 phids.extend(userphids(repo, reviewers))
759 if blockers:
790 if blockers:
760 phids.extend(
791 phids.extend(
761 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
792 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
762 )
793 )
763 if phids:
794 if phids:
764 actions.append({b'type': b'reviewers.add', b'value': phids})
795 actions.append({b'type': b'reviewers.add', b'value': phids})
765
796
766 drevids = [] # [int]
797 drevids = [] # [int]
767 diffmap = {} # {newnode: diff}
798 diffmap = {} # {newnode: diff}
768
799
769 # Send patches one by one so we know their Differential Revision PHIDs and
800 # Send patches one by one so we know their Differential Revision PHIDs and
770 # can provide dependency relationship
801 # can provide dependency relationship
771 lastrevphid = None
802 lastrevphid = None
772 for rev in revs:
803 for rev in revs:
773 ui.debug(b'sending rev %d\n' % rev)
804 ui.debug(b'sending rev %d\n' % rev)
774 ctx = repo[rev]
805 ctx = repo[rev]
775
806
776 # Get Differential Revision ID
807 # Get Differential Revision ID
777 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
808 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
778 if oldnode != ctx.node() or opts.get(b'amend'):
809 if oldnode != ctx.node() or opts.get(b'amend'):
779 # Create or update Differential Revision
810 # Create or update Differential Revision
780 revision, diff = createdifferentialrevision(
811 revision, diff = createdifferentialrevision(
781 ctx,
812 ctx,
782 revid,
813 revid,
783 lastrevphid,
814 lastrevphid,
784 oldnode,
815 oldnode,
785 olddiff,
816 olddiff,
786 actions,
817 actions,
787 opts.get(b'comment'),
818 opts.get(b'comment'),
788 )
819 )
789 diffmap[ctx.node()] = diff
820 diffmap[ctx.node()] = diff
790 newrevid = int(revision[b'object'][b'id'])
821 newrevid = int(revision[b'object'][b'id'])
791 newrevphid = revision[b'object'][b'phid']
822 newrevphid = revision[b'object'][b'phid']
792 if revid:
823 if revid:
793 action = b'updated'
824 action = b'updated'
794 else:
825 else:
795 action = b'created'
826 action = b'created'
796
827
797 # Create a local tag to note the association, if commit message
828 # Create a local tag to note the association, if commit message
798 # does not have it already
829 # does not have it already
799 m = _differentialrevisiondescre.search(ctx.description())
830 m = _differentialrevisiondescre.search(ctx.description())
800 if not m or int(m.group(r'id')) != newrevid:
831 if not m or int(m.group(r'id')) != newrevid:
801 tagname = b'D%d' % newrevid
832 tagname = b'D%d' % newrevid
802 tags.tag(
833 tags.tag(
803 repo,
834 repo,
804 tagname,
835 tagname,
805 ctx.node(),
836 ctx.node(),
806 message=None,
837 message=None,
807 user=None,
838 user=None,
808 date=None,
839 date=None,
809 local=True,
840 local=True,
810 )
841 )
811 else:
842 else:
812 # Nothing changed. But still set "newrevphid" so the next revision
843 # Nothing changed. But still set "newrevphid" so the next revision
813 # could depend on this one and "newrevid" for the summary line.
844 # could depend on this one and "newrevid" for the summary line.
814 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
845 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
815 newrevid = revid
846 newrevid = revid
816 action = b'skipped'
847 action = b'skipped'
817
848
818 actiondesc = ui.label(
849 actiondesc = ui.label(
819 {
850 {
820 b'created': _(b'created'),
851 b'created': _(b'created'),
821 b'skipped': _(b'skipped'),
852 b'skipped': _(b'skipped'),
822 b'updated': _(b'updated'),
853 b'updated': _(b'updated'),
823 }[action],
854 }[action],
824 b'phabricator.action.%s' % action,
855 b'phabricator.action.%s' % action,
825 )
856 )
826 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
857 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
827 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
858 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
828 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
859 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
829 ui.write(
860 ui.write(
830 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
861 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
831 )
862 )
832 drevids.append(newrevid)
863 drevids.append(newrevid)
833 lastrevphid = newrevphid
864 lastrevphid = newrevphid
834
865
835 # Update commit messages and remove tags
866 # Update commit messages and remove tags
836 if opts.get(b'amend'):
867 if opts.get(b'amend'):
837 unfi = repo.unfiltered()
868 unfi = repo.unfiltered()
838 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
869 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
839 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
870 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
840 wnode = unfi[b'.'].node()
871 wnode = unfi[b'.'].node()
841 mapping = {} # {oldnode: [newnode]}
872 mapping = {} # {oldnode: [newnode]}
842 for i, rev in enumerate(revs):
873 for i, rev in enumerate(revs):
843 old = unfi[rev]
874 old = unfi[rev]
844 drevid = drevids[i]
875 drevid = drevids[i]
845 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
876 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
846 newdesc = getdescfromdrev(drev)
877 newdesc = getdescfromdrev(drev)
847 # Make sure commit message contain "Differential Revision"
878 # Make sure commit message contain "Differential Revision"
848 if old.description() != newdesc:
879 if old.description() != newdesc:
849 if old.phase() == phases.public:
880 if old.phase() == phases.public:
850 ui.warn(
881 ui.warn(
851 _(b"warning: not updating public commit %s\n")
882 _(b"warning: not updating public commit %s\n")
852 % scmutil.formatchangeid(old)
883 % scmutil.formatchangeid(old)
853 )
884 )
854 continue
885 continue
855 parents = [
886 parents = [
856 mapping.get(old.p1().node(), (old.p1(),))[0],
887 mapping.get(old.p1().node(), (old.p1(),))[0],
857 mapping.get(old.p2().node(), (old.p2(),))[0],
888 mapping.get(old.p2().node(), (old.p2(),))[0],
858 ]
889 ]
859 new = context.metadataonlyctx(
890 new = context.metadataonlyctx(
860 repo,
891 repo,
861 old,
892 old,
862 parents=parents,
893 parents=parents,
863 text=newdesc,
894 text=newdesc,
864 user=old.user(),
895 user=old.user(),
865 date=old.date(),
896 date=old.date(),
866 extra=old.extra(),
897 extra=old.extra(),
867 )
898 )
868
899
869 newnode = new.commit()
900 newnode = new.commit()
870
901
871 mapping[old.node()] = [newnode]
902 mapping[old.node()] = [newnode]
872 # Update diff property
903 # Update diff property
873 # If it fails just warn and keep going, otherwise the DREV
904 # If it fails just warn and keep going, otherwise the DREV
874 # associations will be lost
905 # associations will be lost
875 try:
906 try:
876 writediffproperties(unfi[newnode], diffmap[old.node()])
907 writediffproperties(unfi[newnode], diffmap[old.node()])
877 except util.urlerr.urlerror:
908 except util.urlerr.urlerror:
878 ui.warnnoi18n(
909 ui.warnnoi18n(
879 b'Failed to update metadata for D%s\n' % drevid
910 b'Failed to update metadata for D%s\n' % drevid
880 )
911 )
881 # Remove local tags since it's no longer necessary
912 # Remove local tags since it's no longer necessary
882 tagname = b'D%d' % drevid
913 tagname = b'D%d' % drevid
883 if tagname in repo.tags():
914 if tagname in repo.tags():
884 tags.tag(
915 tags.tag(
885 repo,
916 repo,
886 tagname,
917 tagname,
887 nullid,
918 nullid,
888 message=None,
919 message=None,
889 user=None,
920 user=None,
890 date=None,
921 date=None,
891 local=True,
922 local=True,
892 )
923 )
893 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
924 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
894 if wnode in mapping:
925 if wnode in mapping:
895 unfi.setparents(mapping[wnode][0])
926 unfi.setparents(mapping[wnode][0])
896
927
897
928
898 # Map from "hg:meta" keys to header understood by "hg import". The order is
929 # Map from "hg:meta" keys to header understood by "hg import". The order is
899 # consistent with "hg export" output.
930 # consistent with "hg export" output.
900 _metanamemap = util.sortdict(
931 _metanamemap = util.sortdict(
901 [
932 [
902 (b'user', b'User'),
933 (b'user', b'User'),
903 (b'date', b'Date'),
934 (b'date', b'Date'),
904 (b'branch', b'Branch'),
935 (b'branch', b'Branch'),
905 (b'node', b'Node ID'),
936 (b'node', b'Node ID'),
906 (b'parent', b'Parent '),
937 (b'parent', b'Parent '),
907 ]
938 ]
908 )
939 )
909
940
910
941
911 def _confirmbeforesend(repo, revs, oldmap):
942 def _confirmbeforesend(repo, revs, oldmap):
912 url, token = readurltoken(repo.ui)
943 url, token = readurltoken(repo.ui)
913 ui = repo.ui
944 ui = repo.ui
914 for rev in revs:
945 for rev in revs:
915 ctx = repo[rev]
946 ctx = repo[rev]
916 desc = ctx.description().splitlines()[0]
947 desc = ctx.description().splitlines()[0]
917 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
948 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
918 if drevid:
949 if drevid:
919 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
950 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
920 else:
951 else:
921 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
952 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
922
953
923 ui.write(
954 ui.write(
924 _(b'%s - %s: %s\n')
955 _(b'%s - %s: %s\n')
925 % (
956 % (
926 drevdesc,
957 drevdesc,
927 ui.label(bytes(ctx), b'phabricator.node'),
958 ui.label(bytes(ctx), b'phabricator.node'),
928 ui.label(desc, b'phabricator.desc'),
959 ui.label(desc, b'phabricator.desc'),
929 )
960 )
930 )
961 )
931
962
932 if ui.promptchoice(
963 if ui.promptchoice(
933 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
964 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
934 ):
965 ):
935 return False
966 return False
936
967
937 return True
968 return True
938
969
939
970
940 _knownstatusnames = {
971 _knownstatusnames = {
941 b'accepted',
972 b'accepted',
942 b'needsreview',
973 b'needsreview',
943 b'needsrevision',
974 b'needsrevision',
944 b'closed',
975 b'closed',
945 b'abandoned',
976 b'abandoned',
946 }
977 }
947
978
948
979
949 def _getstatusname(drev):
980 def _getstatusname(drev):
950 """get normalized status name from a Differential Revision"""
981 """get normalized status name from a Differential Revision"""
951 return drev[b'statusName'].replace(b' ', b'').lower()
982 return drev[b'statusName'].replace(b' ', b'').lower()
952
983
953
984
954 # Small language to specify differential revisions. Support symbols: (), :X,
985 # Small language to specify differential revisions. Support symbols: (), :X,
955 # +, and -.
986 # +, and -.
956
987
957 _elements = {
988 _elements = {
958 # token-type: binding-strength, primary, prefix, infix, suffix
989 # token-type: binding-strength, primary, prefix, infix, suffix
959 b'(': (12, None, (b'group', 1, b')'), None, None),
990 b'(': (12, None, (b'group', 1, b')'), None, None),
960 b':': (8, None, (b'ancestors', 8), None, None),
991 b':': (8, None, (b'ancestors', 8), None, None),
961 b'&': (5, None, None, (b'and_', 5), None),
992 b'&': (5, None, None, (b'and_', 5), None),
962 b'+': (4, None, None, (b'add', 4), None),
993 b'+': (4, None, None, (b'add', 4), None),
963 b'-': (4, None, None, (b'sub', 4), None),
994 b'-': (4, None, None, (b'sub', 4), None),
964 b')': (0, None, None, None, None),
995 b')': (0, None, None, None, None),
965 b'symbol': (0, b'symbol', None, None, None),
996 b'symbol': (0, b'symbol', None, None, None),
966 b'end': (0, None, None, None, None),
997 b'end': (0, None, None, None, None),
967 }
998 }
968
999
969
1000
970 def _tokenize(text):
1001 def _tokenize(text):
971 view = memoryview(text) # zero-copy slice
1002 view = memoryview(text) # zero-copy slice
972 special = b'():+-& '
1003 special = b'():+-& '
973 pos = 0
1004 pos = 0
974 length = len(text)
1005 length = len(text)
975 while pos < length:
1006 while pos < length:
976 symbol = b''.join(
1007 symbol = b''.join(
977 itertools.takewhile(
1008 itertools.takewhile(
978 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1009 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
979 )
1010 )
980 )
1011 )
981 if symbol:
1012 if symbol:
982 yield (b'symbol', symbol, pos)
1013 yield (b'symbol', symbol, pos)
983 pos += len(symbol)
1014 pos += len(symbol)
984 else: # special char, ignore space
1015 else: # special char, ignore space
985 if text[pos] != b' ':
1016 if text[pos] != b' ':
986 yield (text[pos], None, pos)
1017 yield (text[pos], None, pos)
987 pos += 1
1018 pos += 1
988 yield (b'end', None, pos)
1019 yield (b'end', None, pos)
989
1020
990
1021
991 def _parse(text):
1022 def _parse(text):
992 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1023 tree, pos = parser.parser(_elements).parse(_tokenize(text))
993 if pos != len(text):
1024 if pos != len(text):
994 raise error.ParseError(b'invalid token', pos)
1025 raise error.ParseError(b'invalid token', pos)
995 return tree
1026 return tree
996
1027
997
1028
998 def _parsedrev(symbol):
1029 def _parsedrev(symbol):
999 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1030 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1000 if symbol.startswith(b'D') and symbol[1:].isdigit():
1031 if symbol.startswith(b'D') and symbol[1:].isdigit():
1001 return int(symbol[1:])
1032 return int(symbol[1:])
1002 if symbol.isdigit():
1033 if symbol.isdigit():
1003 return int(symbol)
1034 return int(symbol)
1004
1035
1005
1036
1006 def _prefetchdrevs(tree):
1037 def _prefetchdrevs(tree):
1007 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1038 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1008 drevs = set()
1039 drevs = set()
1009 ancestordrevs = set()
1040 ancestordrevs = set()
1010 op = tree[0]
1041 op = tree[0]
1011 if op == b'symbol':
1042 if op == b'symbol':
1012 r = _parsedrev(tree[1])
1043 r = _parsedrev(tree[1])
1013 if r:
1044 if r:
1014 drevs.add(r)
1045 drevs.add(r)
1015 elif op == b'ancestors':
1046 elif op == b'ancestors':
1016 r, a = _prefetchdrevs(tree[1])
1047 r, a = _prefetchdrevs(tree[1])
1017 drevs.update(r)
1048 drevs.update(r)
1018 ancestordrevs.update(r)
1049 ancestordrevs.update(r)
1019 ancestordrevs.update(a)
1050 ancestordrevs.update(a)
1020 else:
1051 else:
1021 for t in tree[1:]:
1052 for t in tree[1:]:
1022 r, a = _prefetchdrevs(t)
1053 r, a = _prefetchdrevs(t)
1023 drevs.update(r)
1054 drevs.update(r)
1024 ancestordrevs.update(a)
1055 ancestordrevs.update(a)
1025 return drevs, ancestordrevs
1056 return drevs, ancestordrevs
1026
1057
1027
1058
1028 def querydrev(repo, spec):
1059 def querydrev(repo, spec):
1029 """return a list of "Differential Revision" dicts
1060 """return a list of "Differential Revision" dicts
1030
1061
1031 spec is a string using a simple query language, see docstring in phabread
1062 spec is a string using a simple query language, see docstring in phabread
1032 for details.
1063 for details.
1033
1064
1034 A "Differential Revision dict" looks like:
1065 A "Differential Revision dict" looks like:
1035
1066
1036 {
1067 {
1037 "id": "2",
1068 "id": "2",
1038 "phid": "PHID-DREV-672qvysjcczopag46qty",
1069 "phid": "PHID-DREV-672qvysjcczopag46qty",
1039 "title": "example",
1070 "title": "example",
1040 "uri": "https://phab.example.com/D2",
1071 "uri": "https://phab.example.com/D2",
1041 "dateCreated": "1499181406",
1072 "dateCreated": "1499181406",
1042 "dateModified": "1499182103",
1073 "dateModified": "1499182103",
1043 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1074 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1044 "status": "0",
1075 "status": "0",
1045 "statusName": "Needs Review",
1076 "statusName": "Needs Review",
1046 "properties": [],
1077 "properties": [],
1047 "branch": null,
1078 "branch": null,
1048 "summary": "",
1079 "summary": "",
1049 "testPlan": "",
1080 "testPlan": "",
1050 "lineCount": "2",
1081 "lineCount": "2",
1051 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1082 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1052 "diffs": [
1083 "diffs": [
1053 "3",
1084 "3",
1054 "4",
1085 "4",
1055 ],
1086 ],
1056 "commits": [],
1087 "commits": [],
1057 "reviewers": [],
1088 "reviewers": [],
1058 "ccs": [],
1089 "ccs": [],
1059 "hashes": [],
1090 "hashes": [],
1060 "auxiliary": {
1091 "auxiliary": {
1061 "phabricator:projects": [],
1092 "phabricator:projects": [],
1062 "phabricator:depends-on": [
1093 "phabricator:depends-on": [
1063 "PHID-DREV-gbapp366kutjebt7agcd"
1094 "PHID-DREV-gbapp366kutjebt7agcd"
1064 ]
1095 ]
1065 },
1096 },
1066 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1097 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1067 "sourcePath": null
1098 "sourcePath": null
1068 }
1099 }
1069 """
1100 """
1070
1101
1071 def fetch(params):
1102 def fetch(params):
1072 """params -> single drev or None"""
1103 """params -> single drev or None"""
1073 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1104 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1074 if key in prefetched:
1105 if key in prefetched:
1075 return prefetched[key]
1106 return prefetched[key]
1076 drevs = callconduit(repo.ui, b'differential.query', params)
1107 drevs = callconduit(repo.ui, b'differential.query', params)
1077 # Fill prefetched with the result
1108 # Fill prefetched with the result
1078 for drev in drevs:
1109 for drev in drevs:
1079 prefetched[drev[b'phid']] = drev
1110 prefetched[drev[b'phid']] = drev
1080 prefetched[int(drev[b'id'])] = drev
1111 prefetched[int(drev[b'id'])] = drev
1081 if key not in prefetched:
1112 if key not in prefetched:
1082 raise error.Abort(
1113 raise error.Abort(
1083 _(b'cannot get Differential Revision %r') % params
1114 _(b'cannot get Differential Revision %r') % params
1084 )
1115 )
1085 return prefetched[key]
1116 return prefetched[key]
1086
1117
1087 def getstack(topdrevids):
1118 def getstack(topdrevids):
1088 """given a top, get a stack from the bottom, [id] -> [id]"""
1119 """given a top, get a stack from the bottom, [id] -> [id]"""
1089 visited = set()
1120 visited = set()
1090 result = []
1121 result = []
1091 queue = [{b'ids': [i]} for i in topdrevids]
1122 queue = [{b'ids': [i]} for i in topdrevids]
1092 while queue:
1123 while queue:
1093 params = queue.pop()
1124 params = queue.pop()
1094 drev = fetch(params)
1125 drev = fetch(params)
1095 if drev[b'id'] in visited:
1126 if drev[b'id'] in visited:
1096 continue
1127 continue
1097 visited.add(drev[b'id'])
1128 visited.add(drev[b'id'])
1098 result.append(int(drev[b'id']))
1129 result.append(int(drev[b'id']))
1099 auxiliary = drev.get(b'auxiliary', {})
1130 auxiliary = drev.get(b'auxiliary', {})
1100 depends = auxiliary.get(b'phabricator:depends-on', [])
1131 depends = auxiliary.get(b'phabricator:depends-on', [])
1101 for phid in depends:
1132 for phid in depends:
1102 queue.append({b'phids': [phid]})
1133 queue.append({b'phids': [phid]})
1103 result.reverse()
1134 result.reverse()
1104 return smartset.baseset(result)
1135 return smartset.baseset(result)
1105
1136
1106 # Initialize prefetch cache
1137 # Initialize prefetch cache
1107 prefetched = {} # {id or phid: drev}
1138 prefetched = {} # {id or phid: drev}
1108
1139
1109 tree = _parse(spec)
1140 tree = _parse(spec)
1110 drevs, ancestordrevs = _prefetchdrevs(tree)
1141 drevs, ancestordrevs = _prefetchdrevs(tree)
1111
1142
1112 # developer config: phabricator.batchsize
1143 # developer config: phabricator.batchsize
1113 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1144 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1114
1145
1115 # Prefetch Differential Revisions in batch
1146 # Prefetch Differential Revisions in batch
1116 tofetch = set(drevs)
1147 tofetch = set(drevs)
1117 for r in ancestordrevs:
1148 for r in ancestordrevs:
1118 tofetch.update(range(max(1, r - batchsize), r + 1))
1149 tofetch.update(range(max(1, r - batchsize), r + 1))
1119 if drevs:
1150 if drevs:
1120 fetch({b'ids': list(tofetch)})
1151 fetch({b'ids': list(tofetch)})
1121 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1152 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1122
1153
1123 # Walk through the tree, return smartsets
1154 # Walk through the tree, return smartsets
1124 def walk(tree):
1155 def walk(tree):
1125 op = tree[0]
1156 op = tree[0]
1126 if op == b'symbol':
1157 if op == b'symbol':
1127 drev = _parsedrev(tree[1])
1158 drev = _parsedrev(tree[1])
1128 if drev:
1159 if drev:
1129 return smartset.baseset([drev])
1160 return smartset.baseset([drev])
1130 elif tree[1] in _knownstatusnames:
1161 elif tree[1] in _knownstatusnames:
1131 drevs = [
1162 drevs = [
1132 r
1163 r
1133 for r in validids
1164 for r in validids
1134 if _getstatusname(prefetched[r]) == tree[1]
1165 if _getstatusname(prefetched[r]) == tree[1]
1135 ]
1166 ]
1136 return smartset.baseset(drevs)
1167 return smartset.baseset(drevs)
1137 else:
1168 else:
1138 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1169 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1139 elif op in {b'and_', b'add', b'sub'}:
1170 elif op in {b'and_', b'add', b'sub'}:
1140 assert len(tree) == 3
1171 assert len(tree) == 3
1141 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1172 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1142 elif op == b'group':
1173 elif op == b'group':
1143 return walk(tree[1])
1174 return walk(tree[1])
1144 elif op == b'ancestors':
1175 elif op == b'ancestors':
1145 return getstack(walk(tree[1]))
1176 return getstack(walk(tree[1]))
1146 else:
1177 else:
1147 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1178 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1148
1179
1149 return [prefetched[r] for r in walk(tree)]
1180 return [prefetched[r] for r in walk(tree)]
1150
1181
1151
1182
1152 def getdescfromdrev(drev):
1183 def getdescfromdrev(drev):
1153 """get description (commit message) from "Differential Revision"
1184 """get description (commit message) from "Differential Revision"
1154
1185
1155 This is similar to differential.getcommitmessage API. But we only care
1186 This is similar to differential.getcommitmessage API. But we only care
1156 about limited fields: title, summary, test plan, and URL.
1187 about limited fields: title, summary, test plan, and URL.
1157 """
1188 """
1158 title = drev[b'title']
1189 title = drev[b'title']
1159 summary = drev[b'summary'].rstrip()
1190 summary = drev[b'summary'].rstrip()
1160 testplan = drev[b'testPlan'].rstrip()
1191 testplan = drev[b'testPlan'].rstrip()
1161 if testplan:
1192 if testplan:
1162 testplan = b'Test Plan:\n%s' % testplan
1193 testplan = b'Test Plan:\n%s' % testplan
1163 uri = b'Differential Revision: %s' % drev[b'uri']
1194 uri = b'Differential Revision: %s' % drev[b'uri']
1164 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1195 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1165
1196
1166
1197
1167 def getdiffmeta(diff):
1198 def getdiffmeta(diff):
1168 """get commit metadata (date, node, user, p1) from a diff object
1199 """get commit metadata (date, node, user, p1) from a diff object
1169
1200
1170 The metadata could be "hg:meta", sent by phabsend, like:
1201 The metadata could be "hg:meta", sent by phabsend, like:
1171
1202
1172 "properties": {
1203 "properties": {
1173 "hg:meta": {
1204 "hg:meta": {
1174 "date": "1499571514 25200",
1205 "date": "1499571514 25200",
1175 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1206 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1176 "user": "Foo Bar <foo@example.com>",
1207 "user": "Foo Bar <foo@example.com>",
1177 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1208 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1178 }
1209 }
1179 }
1210 }
1180
1211
1181 Or converted from "local:commits", sent by "arc", like:
1212 Or converted from "local:commits", sent by "arc", like:
1182
1213
1183 "properties": {
1214 "properties": {
1184 "local:commits": {
1215 "local:commits": {
1185 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1216 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1186 "author": "Foo Bar",
1217 "author": "Foo Bar",
1187 "time": 1499546314,
1218 "time": 1499546314,
1188 "branch": "default",
1219 "branch": "default",
1189 "tag": "",
1220 "tag": "",
1190 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1221 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1191 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1222 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1192 "local": "1000",
1223 "local": "1000",
1193 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1224 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1194 "summary": "...",
1225 "summary": "...",
1195 "message": "...",
1226 "message": "...",
1196 "authorEmail": "foo@example.com"
1227 "authorEmail": "foo@example.com"
1197 }
1228 }
1198 }
1229 }
1199 }
1230 }
1200
1231
1201 Note: metadata extracted from "local:commits" will lose time zone
1232 Note: metadata extracted from "local:commits" will lose time zone
1202 information.
1233 information.
1203 """
1234 """
1204 props = diff.get(b'properties') or {}
1235 props = diff.get(b'properties') or {}
1205 meta = props.get(b'hg:meta')
1236 meta = props.get(b'hg:meta')
1206 if not meta:
1237 if not meta:
1207 if props.get(b'local:commits'):
1238 if props.get(b'local:commits'):
1208 commit = sorted(props[b'local:commits'].values())[0]
1239 commit = sorted(props[b'local:commits'].values())[0]
1209 meta = {}
1240 meta = {}
1210 if b'author' in commit and b'authorEmail' in commit:
1241 if b'author' in commit and b'authorEmail' in commit:
1211 meta[b'user'] = b'%s <%s>' % (
1242 meta[b'user'] = b'%s <%s>' % (
1212 commit[b'author'],
1243 commit[b'author'],
1213 commit[b'authorEmail'],
1244 commit[b'authorEmail'],
1214 )
1245 )
1215 if b'time' in commit:
1246 if b'time' in commit:
1216 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1247 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1217 if b'branch' in commit:
1248 if b'branch' in commit:
1218 meta[b'branch'] = commit[b'branch']
1249 meta[b'branch'] = commit[b'branch']
1219 node = commit.get(b'commit', commit.get(b'rev'))
1250 node = commit.get(b'commit', commit.get(b'rev'))
1220 if node:
1251 if node:
1221 meta[b'node'] = node
1252 meta[b'node'] = node
1222 if len(commit.get(b'parents', ())) >= 1:
1253 if len(commit.get(b'parents', ())) >= 1:
1223 meta[b'parent'] = commit[b'parents'][0]
1254 meta[b'parent'] = commit[b'parents'][0]
1224 else:
1255 else:
1225 meta = {}
1256 meta = {}
1226 if b'date' not in meta and b'dateCreated' in diff:
1257 if b'date' not in meta and b'dateCreated' in diff:
1227 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1258 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1228 if b'branch' not in meta and diff.get(b'branch'):
1259 if b'branch' not in meta and diff.get(b'branch'):
1229 meta[b'branch'] = diff[b'branch']
1260 meta[b'branch'] = diff[b'branch']
1230 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1261 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1231 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1262 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1232 return meta
1263 return meta
1233
1264
1234
1265
1235 def readpatch(repo, drevs, write):
1266 def readpatch(repo, drevs, write):
1236 """generate plain-text patch readable by 'hg import'
1267 """generate plain-text patch readable by 'hg import'
1237
1268
1238 write is usually ui.write. drevs is what "querydrev" returns, results of
1269 write is usually ui.write. drevs is what "querydrev" returns, results of
1239 "differential.query".
1270 "differential.query".
1240 """
1271 """
1241 # Prefetch hg:meta property for all diffs
1272 # Prefetch hg:meta property for all diffs
1242 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1273 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1243 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1274 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1244
1275
1245 # Generate patch for each drev
1276 # Generate patch for each drev
1246 for drev in drevs:
1277 for drev in drevs:
1247 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1278 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1248
1279
1249 diffid = max(int(v) for v in drev[b'diffs'])
1280 diffid = max(int(v) for v in drev[b'diffs'])
1250 body = callconduit(
1281 body = callconduit(
1251 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1282 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1252 )
1283 )
1253 desc = getdescfromdrev(drev)
1284 desc = getdescfromdrev(drev)
1254 header = b'# HG changeset patch\n'
1285 header = b'# HG changeset patch\n'
1255
1286
1256 # Try to preserve metadata from hg:meta property. Write hg patch
1287 # Try to preserve metadata from hg:meta property. Write hg patch
1257 # headers that can be read by the "import" command. See patchheadermap
1288 # headers that can be read by the "import" command. See patchheadermap
1258 # and extract in mercurial/patch.py for supported headers.
1289 # and extract in mercurial/patch.py for supported headers.
1259 meta = getdiffmeta(diffs[b'%d' % diffid])
1290 meta = getdiffmeta(diffs[b'%d' % diffid])
1260 for k in _metanamemap.keys():
1291 for k in _metanamemap.keys():
1261 if k in meta:
1292 if k in meta:
1262 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1293 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1263
1294
1264 content = b'%s%s\n%s' % (header, desc, body)
1295 content = b'%s%s\n%s' % (header, desc, body)
1265 write(content)
1296 write(content)
1266
1297
1267
1298
1268 @vcrcommand(
1299 @vcrcommand(
1269 b'phabread',
1300 b'phabread',
1270 [(b'', b'stack', False, _(b'read dependencies'))],
1301 [(b'', b'stack', False, _(b'read dependencies'))],
1271 _(b'DREVSPEC [OPTIONS]'),
1302 _(b'DREVSPEC [OPTIONS]'),
1272 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1303 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1273 )
1304 )
1274 def phabread(ui, repo, spec, **opts):
1305 def phabread(ui, repo, spec, **opts):
1275 """print patches from Phabricator suitable for importing
1306 """print patches from Phabricator suitable for importing
1276
1307
1277 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1308 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1278 the number ``123``. It could also have common operators like ``+``, ``-``,
1309 the number ``123``. It could also have common operators like ``+``, ``-``,
1279 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1310 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1280 select a stack.
1311 select a stack.
1281
1312
1282 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1313 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1283 could be used to filter patches by status. For performance reason, they
1314 could be used to filter patches by status. For performance reason, they
1284 only represent a subset of non-status selections and cannot be used alone.
1315 only represent a subset of non-status selections and cannot be used alone.
1285
1316
1286 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1317 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1287 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1318 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1288 stack up to D9.
1319 stack up to D9.
1289
1320
1290 If --stack is given, follow dependencies information and read all patches.
1321 If --stack is given, follow dependencies information and read all patches.
1291 It is equivalent to the ``:`` operator.
1322 It is equivalent to the ``:`` operator.
1292 """
1323 """
1293 opts = pycompat.byteskwargs(opts)
1324 opts = pycompat.byteskwargs(opts)
1294 if opts.get(b'stack'):
1325 if opts.get(b'stack'):
1295 spec = b':(%s)' % spec
1326 spec = b':(%s)' % spec
1296 drevs = querydrev(repo, spec)
1327 drevs = querydrev(repo, spec)
1297 readpatch(repo, drevs, ui.write)
1328 readpatch(repo, drevs, ui.write)
1298
1329
1299
1330
1300 @vcrcommand(
1331 @vcrcommand(
1301 b'phabupdate',
1332 b'phabupdate',
1302 [
1333 [
1303 (b'', b'accept', False, _(b'accept revisions')),
1334 (b'', b'accept', False, _(b'accept revisions')),
1304 (b'', b'reject', False, _(b'reject revisions')),
1335 (b'', b'reject', False, _(b'reject revisions')),
1305 (b'', b'abandon', False, _(b'abandon revisions')),
1336 (b'', b'abandon', False, _(b'abandon revisions')),
1306 (b'', b'reclaim', False, _(b'reclaim revisions')),
1337 (b'', b'reclaim', False, _(b'reclaim revisions')),
1307 (b'm', b'comment', b'', _(b'comment on the last revision')),
1338 (b'm', b'comment', b'', _(b'comment on the last revision')),
1308 ],
1339 ],
1309 _(b'DREVSPEC [OPTIONS]'),
1340 _(b'DREVSPEC [OPTIONS]'),
1310 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1341 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1311 )
1342 )
1312 def phabupdate(ui, repo, spec, **opts):
1343 def phabupdate(ui, repo, spec, **opts):
1313 """update Differential Revision in batch
1344 """update Differential Revision in batch
1314
1345
1315 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1346 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1316 """
1347 """
1317 opts = pycompat.byteskwargs(opts)
1348 opts = pycompat.byteskwargs(opts)
1318 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1349 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1319 if len(flags) > 1:
1350 if len(flags) > 1:
1320 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1351 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1321
1352
1322 actions = []
1353 actions = []
1323 for f in flags:
1354 for f in flags:
1324 actions.append({b'type': f, b'value': b'true'})
1355 actions.append({b'type': f, b'value': b'true'})
1325
1356
1326 drevs = querydrev(repo, spec)
1357 drevs = querydrev(repo, spec)
1327 for i, drev in enumerate(drevs):
1358 for i, drev in enumerate(drevs):
1328 if i + 1 == len(drevs) and opts.get(b'comment'):
1359 if i + 1 == len(drevs) and opts.get(b'comment'):
1329 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1360 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1330 if actions:
1361 if actions:
1331 params = {
1362 params = {
1332 b'objectIdentifier': drev[b'phid'],
1363 b'objectIdentifier': drev[b'phid'],
1333 b'transactions': actions,
1364 b'transactions': actions,
1334 }
1365 }
1335 callconduit(ui, b'differential.revision.edit', params)
1366 callconduit(ui, b'differential.revision.edit', params)
1336
1367
1337
1368
1338 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1369 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1339 def template_review(context, mapping):
1370 def template_review(context, mapping):
1340 """:phabreview: Object describing the review for this changeset.
1371 """:phabreview: Object describing the review for this changeset.
1341 Has attributes `url` and `id`.
1372 Has attributes `url` and `id`.
1342 """
1373 """
1343 ctx = context.resource(mapping, b'ctx')
1374 ctx = context.resource(mapping, b'ctx')
1344 m = _differentialrevisiondescre.search(ctx.description())
1375 m = _differentialrevisiondescre.search(ctx.description())
1345 if m:
1376 if m:
1346 return templateutil.hybriddict(
1377 return templateutil.hybriddict(
1347 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1378 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1348 )
1379 )
1349 else:
1380 else:
1350 tags = ctx.repo().nodetags(ctx.node())
1381 tags = ctx.repo().nodetags(ctx.node())
1351 for t in tags:
1382 for t in tags:
1352 if _differentialrevisiontagre.match(t):
1383 if _differentialrevisiontagre.match(t):
1353 url = ctx.repo().ui.config(b'phabricator', b'url')
1384 url = ctx.repo().ui.config(b'phabricator', b'url')
1354 if not url.endswith(b'/'):
1385 if not url.endswith(b'/'):
1355 url += b'/'
1386 url += b'/'
1356 url += t
1387 url += t
1357
1388
1358 return templateutil.hybriddict({b'url': url, b'id': t,})
1389 return templateutil.hybriddict({b'url': url, b'id': t,})
1359 return None
1390 return None
General Comments 0
You need to be logged in to leave comments. Login now