##// END OF EJS Templates
phabricator: add makebinary and addoldbinary functions...
Ian Moody -
r43459:9f802243 default
parent child Browse files
Show More
@@ -1,1457 +1,1495 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import operator
50 import operator
50 import re
51 import re
51
52
52 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
53 from mercurial.i18n import _
54 from mercurial.i18n import _
54 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
55 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
56 from mercurial import (
57 from mercurial import (
57 cmdutil,
58 cmdutil,
58 context,
59 context,
59 encoding,
60 encoding,
60 error,
61 error,
61 exthelper,
62 exthelper,
62 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
63 match,
64 match,
64 mdiff,
65 mdiff,
65 obsutil,
66 obsutil,
66 parser,
67 parser,
67 patch,
68 patch,
68 phases,
69 phases,
69 pycompat,
70 pycompat,
70 scmutil,
71 scmutil,
71 smartset,
72 smartset,
72 tags,
73 tags,
73 templatefilters,
74 templatefilters,
74 templateutil,
75 templateutil,
75 url as urlmod,
76 url as urlmod,
76 util,
77 util,
77 )
78 )
78 from mercurial.utils import (
79 from mercurial.utils import (
79 procutil,
80 procutil,
80 stringutil,
81 stringutil,
81 )
82 )
82
83
83 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # leave the attribute unspecified.
87 # leave the attribute unspecified.
87 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
88
89
89 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
90
91
91 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
92 command = eh.command
93 command = eh.command
93 configtable = eh.configtable
94 configtable = eh.configtable
94 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
95
96
96 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
97 eh.configitem(
98 eh.configitem(
98 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
99 )
100 )
100 eh.configitem(
101 eh.configitem(
101 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
102 )
103 )
103 eh.configitem(
104 eh.configitem(
104 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
105 )
106 )
106 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
107 eh.configitem(
108 eh.configitem(
108 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
109 )
110 )
110 eh.configitem(
111 eh.configitem(
111 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
112 )
113 )
113 eh.configitem(
114 eh.configitem(
114 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
115 )
116 )
116
117
117 colortable = {
118 colortable = {
118 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
122 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
123 b'phabricator.node': b'',
124 b'phabricator.node': b'',
124 }
125 }
125
126
126 _VCR_FLAGS = [
127 _VCR_FLAGS = [
127 (
128 (
128 b'',
129 b'',
129 b'test-vcr',
130 b'test-vcr',
130 b'',
131 b'',
131 _(
132 _(
132 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b' (ADVANCED)'
135 b' (ADVANCED)'
135 ),
136 ),
136 ),
137 ),
137 ]
138 ]
138
139
139
140
140 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
142
143
143 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
144 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
145 return False
146 return False
146 r1params = r1.body.split(b'&')
147 r1params = r1.body.split(b'&')
147 r2params = r2.body.split(b'&')
148 r2params = r2.body.split(b'&')
148 return set(r1params) == set(r2params)
149 return set(r1params) == set(r2params)
149
150
150 def sanitiserequest(request):
151 def sanitiserequest(request):
151 request.body = re.sub(
152 request.body = re.sub(
152 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
153 )
154 )
154 return request
155 return request
155
156
156 def sanitiseresponse(response):
157 def sanitiseresponse(response):
157 if r'set-cookie' in response[r'headers']:
158 if r'set-cookie' in response[r'headers']:
158 del response[r'headers'][r'set-cookie']
159 del response[r'headers'][r'set-cookie']
159 return response
160 return response
160
161
161 def decorate(fn):
162 def decorate(fn):
162 def inner(*args, **kwargs):
163 def inner(*args, **kwargs):
163 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
164 if cassette:
165 if cassette:
165 import hgdemandimport
166 import hgdemandimport
166
167
167 with hgdemandimport.deactivated():
168 with hgdemandimport.deactivated():
168 import vcr as vcrmod
169 import vcr as vcrmod
169 import vcr.stubs as stubs
170 import vcr.stubs as stubs
170
171
171 vcr = vcrmod.VCR(
172 vcr = vcrmod.VCR(
172 serializer=r'json',
173 serializer=r'json',
173 before_record_request=sanitiserequest,
174 before_record_request=sanitiserequest,
174 before_record_response=sanitiseresponse,
175 before_record_response=sanitiseresponse,
175 custom_patches=[
176 custom_patches=[
176 (
177 (
177 urlmod,
178 urlmod,
178 r'httpconnection',
179 r'httpconnection',
179 stubs.VCRHTTPConnection,
180 stubs.VCRHTTPConnection,
180 ),
181 ),
181 (
182 (
182 urlmod,
183 urlmod,
183 r'httpsconnection',
184 r'httpsconnection',
184 stubs.VCRHTTPSConnection,
185 stubs.VCRHTTPSConnection,
185 ),
186 ),
186 ],
187 ],
187 )
188 )
188 vcr.register_matcher(r'hgmatcher', hgmatcher)
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
189 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
190 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
191 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
192
193
193 inner.__name__ = fn.__name__
194 inner.__name__ = fn.__name__
194 inner.__doc__ = fn.__doc__
195 inner.__doc__ = fn.__doc__
195 return command(
196 return command(
196 name,
197 name,
197 fullflags,
198 fullflags,
198 spec,
199 spec,
199 helpcategory=helpcategory,
200 helpcategory=helpcategory,
200 optionalrepo=optionalrepo,
201 optionalrepo=optionalrepo,
201 )(inner)
202 )(inner)
202
203
203 return decorate
204 return decorate
204
205
205
206
206 def urlencodenested(params):
207 def urlencodenested(params):
207 """like urlencode, but works with nested parameters.
208 """like urlencode, but works with nested parameters.
208
209
209 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
210 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
211 urlencode. Note: the encoding is consistent with PHP's http_build_query.
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
212 """
213 """
213 flatparams = util.sortdict()
214 flatparams = util.sortdict()
214
215
215 def process(prefix, obj):
216 def process(prefix, obj):
216 if isinstance(obj, bool):
217 if isinstance(obj, bool):
217 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
218 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
219 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
220 if items is None:
221 if items is None:
221 flatparams[prefix] = obj
222 flatparams[prefix] = obj
222 else:
223 else:
223 for k, v in items(obj):
224 for k, v in items(obj):
224 if prefix:
225 if prefix:
225 process(b'%s[%s]' % (prefix, k), v)
226 process(b'%s[%s]' % (prefix, k), v)
226 else:
227 else:
227 process(k, v)
228 process(k, v)
228
229
229 process(b'', params)
230 process(b'', params)
230 return util.urlreq.urlencode(flatparams)
231 return util.urlreq.urlencode(flatparams)
231
232
232
233
233 def readurltoken(ui):
234 def readurltoken(ui):
234 """return conduit url, token and make sure they exist
235 """return conduit url, token and make sure they exist
235
236
236 Currently read from [auth] config section. In the future, it might
237 Currently read from [auth] config section. In the future, it might
237 make sense to read from .arcconfig and .arcrc as well.
238 make sense to read from .arcconfig and .arcrc as well.
238 """
239 """
239 url = ui.config(b'phabricator', b'url')
240 url = ui.config(b'phabricator', b'url')
240 if not url:
241 if not url:
241 raise error.Abort(
242 raise error.Abort(
242 _(b'config %s.%s is required') % (b'phabricator', b'url')
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
243 )
244 )
244
245
245 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
246 token = None
247 token = None
247
248
248 if res:
249 if res:
249 group, auth = res
250 group, auth = res
250
251
251 ui.debug(b"using auth.%s.* for authentication\n" % group)
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
252
253
253 token = auth.get(b'phabtoken')
254 token = auth.get(b'phabtoken')
254
255
255 if not token:
256 if not token:
256 raise error.Abort(
257 raise error.Abort(
257 _(b'Can\'t find conduit token associated to %s') % (url,)
258 _(b'Can\'t find conduit token associated to %s') % (url,)
258 )
259 )
259
260
260 return url, token
261 return url, token
261
262
262
263
263 def callconduit(ui, name, params):
264 def callconduit(ui, name, params):
264 """call Conduit API, params is a dict. return json.loads result, or None"""
265 """call Conduit API, params is a dict. return json.loads result, or None"""
265 host, token = readurltoken(ui)
266 host, token = readurltoken(ui)
266 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
267 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
268 params = params.copy()
269 params = params.copy()
269 params[b'api.token'] = token
270 params[b'api.token'] = token
270 data = urlencodenested(params)
271 data = urlencodenested(params)
271 curlcmd = ui.config(b'phabricator', b'curlcmd')
272 curlcmd = ui.config(b'phabricator', b'curlcmd')
272 if curlcmd:
273 if curlcmd:
273 sin, sout = procutil.popen2(
274 sin, sout = procutil.popen2(
274 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
275 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
275 )
276 )
276 sin.write(data)
277 sin.write(data)
277 sin.close()
278 sin.close()
278 body = sout.read()
279 body = sout.read()
279 else:
280 else:
280 urlopener = urlmod.opener(ui, authinfo)
281 urlopener = urlmod.opener(ui, authinfo)
281 request = util.urlreq.request(pycompat.strurl(url), data=data)
282 request = util.urlreq.request(pycompat.strurl(url), data=data)
282 with contextlib.closing(urlopener.open(request)) as rsp:
283 with contextlib.closing(urlopener.open(request)) as rsp:
283 body = rsp.read()
284 body = rsp.read()
284 ui.debug(b'Conduit Response: %s\n' % body)
285 ui.debug(b'Conduit Response: %s\n' % body)
285 parsed = pycompat.rapply(
286 parsed = pycompat.rapply(
286 lambda x: encoding.unitolocal(x)
287 lambda x: encoding.unitolocal(x)
287 if isinstance(x, pycompat.unicode)
288 if isinstance(x, pycompat.unicode)
288 else x,
289 else x,
289 # json.loads only accepts bytes from py3.6+
290 # json.loads only accepts bytes from py3.6+
290 json.loads(encoding.unifromlocal(body)),
291 json.loads(encoding.unifromlocal(body)),
291 )
292 )
292 if parsed.get(b'error_code'):
293 if parsed.get(b'error_code'):
293 msg = _(b'Conduit Error (%s): %s') % (
294 msg = _(b'Conduit Error (%s): %s') % (
294 parsed[b'error_code'],
295 parsed[b'error_code'],
295 parsed[b'error_info'],
296 parsed[b'error_info'],
296 )
297 )
297 raise error.Abort(msg)
298 raise error.Abort(msg)
298 return parsed[b'result']
299 return parsed[b'result']
299
300
300
301
301 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
302 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
302 def debugcallconduit(ui, repo, name):
303 def debugcallconduit(ui, repo, name):
303 """call Conduit API
304 """call Conduit API
304
305
305 Call parameters are read from stdin as a JSON blob. Result will be written
306 Call parameters are read from stdin as a JSON blob. Result will be written
306 to stdout as a JSON blob.
307 to stdout as a JSON blob.
307 """
308 """
308 # json.loads only accepts bytes from 3.6+
309 # json.loads only accepts bytes from 3.6+
309 rawparams = encoding.unifromlocal(ui.fin.read())
310 rawparams = encoding.unifromlocal(ui.fin.read())
310 # json.loads only returns unicode strings
311 # json.loads only returns unicode strings
311 params = pycompat.rapply(
312 params = pycompat.rapply(
312 lambda x: encoding.unitolocal(x)
313 lambda x: encoding.unitolocal(x)
313 if isinstance(x, pycompat.unicode)
314 if isinstance(x, pycompat.unicode)
314 else x,
315 else x,
315 json.loads(rawparams),
316 json.loads(rawparams),
316 )
317 )
317 # json.dumps only accepts unicode strings
318 # json.dumps only accepts unicode strings
318 result = pycompat.rapply(
319 result = pycompat.rapply(
319 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
320 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
320 callconduit(ui, name, params),
321 callconduit(ui, name, params),
321 )
322 )
322 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
323 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
323 ui.write(b'%s\n' % encoding.unitolocal(s))
324 ui.write(b'%s\n' % encoding.unitolocal(s))
324
325
325
326
326 def getrepophid(repo):
327 def getrepophid(repo):
327 """given callsign, return repository PHID or None"""
328 """given callsign, return repository PHID or None"""
328 # developer config: phabricator.repophid
329 # developer config: phabricator.repophid
329 repophid = repo.ui.config(b'phabricator', b'repophid')
330 repophid = repo.ui.config(b'phabricator', b'repophid')
330 if repophid:
331 if repophid:
331 return repophid
332 return repophid
332 callsign = repo.ui.config(b'phabricator', b'callsign')
333 callsign = repo.ui.config(b'phabricator', b'callsign')
333 if not callsign:
334 if not callsign:
334 return None
335 return None
335 query = callconduit(
336 query = callconduit(
336 repo.ui,
337 repo.ui,
337 b'diffusion.repository.search',
338 b'diffusion.repository.search',
338 {b'constraints': {b'callsigns': [callsign]}},
339 {b'constraints': {b'callsigns': [callsign]}},
339 )
340 )
340 if len(query[b'data']) == 0:
341 if len(query[b'data']) == 0:
341 return None
342 return None
342 repophid = query[b'data'][0][b'phid']
343 repophid = query[b'data'][0][b'phid']
343 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
344 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
344 return repophid
345 return repophid
345
346
346
347
347 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
348 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
348 _differentialrevisiondescre = re.compile(
349 _differentialrevisiondescre = re.compile(
349 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
350 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
350 )
351 )
351
352
352
353
353 def getoldnodedrevmap(repo, nodelist):
354 def getoldnodedrevmap(repo, nodelist):
354 """find previous nodes that has been sent to Phabricator
355 """find previous nodes that has been sent to Phabricator
355
356
356 return {node: (oldnode, Differential diff, Differential Revision ID)}
357 return {node: (oldnode, Differential diff, Differential Revision ID)}
357 for node in nodelist with known previous sent versions, or associated
358 for node in nodelist with known previous sent versions, or associated
358 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
359 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
359 be ``None``.
360 be ``None``.
360
361
361 Examines commit messages like "Differential Revision:" to get the
362 Examines commit messages like "Differential Revision:" to get the
362 association information.
363 association information.
363
364
364 If such commit message line is not found, examines all precursors and their
365 If such commit message line is not found, examines all precursors and their
365 tags. Tags with format like "D1234" are considered a match and the node
366 tags. Tags with format like "D1234" are considered a match and the node
366 with that tag, and the number after "D" (ex. 1234) will be returned.
367 with that tag, and the number after "D" (ex. 1234) will be returned.
367
368
368 The ``old node``, if not None, is guaranteed to be the last diff of
369 The ``old node``, if not None, is guaranteed to be the last diff of
369 corresponding Differential Revision, and exist in the repo.
370 corresponding Differential Revision, and exist in the repo.
370 """
371 """
371 unfi = repo.unfiltered()
372 unfi = repo.unfiltered()
372 nodemap = unfi.changelog.nodemap
373 nodemap = unfi.changelog.nodemap
373
374
374 result = {} # {node: (oldnode?, lastdiff?, drev)}
375 result = {} # {node: (oldnode?, lastdiff?, drev)}
375 toconfirm = {} # {node: (force, {precnode}, drev)}
376 toconfirm = {} # {node: (force, {precnode}, drev)}
376 for node in nodelist:
377 for node in nodelist:
377 ctx = unfi[node]
378 ctx = unfi[node]
378 # For tags like "D123", put them into "toconfirm" to verify later
379 # For tags like "D123", put them into "toconfirm" to verify later
379 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
380 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
380 for n in precnodes:
381 for n in precnodes:
381 if n in nodemap:
382 if n in nodemap:
382 for tag in unfi.nodetags(n):
383 for tag in unfi.nodetags(n):
383 m = _differentialrevisiontagre.match(tag)
384 m = _differentialrevisiontagre.match(tag)
384 if m:
385 if m:
385 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
386 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
386 continue
387 continue
387
388
388 # Check commit message
389 # Check commit message
389 m = _differentialrevisiondescre.search(ctx.description())
390 m = _differentialrevisiondescre.search(ctx.description())
390 if m:
391 if m:
391 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
392 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
392
393
393 # Double check if tags are genuine by collecting all old nodes from
394 # Double check if tags are genuine by collecting all old nodes from
394 # Phabricator, and expect precursors overlap with it.
395 # Phabricator, and expect precursors overlap with it.
395 if toconfirm:
396 if toconfirm:
396 drevs = [drev for force, precs, drev in toconfirm.values()]
397 drevs = [drev for force, precs, drev in toconfirm.values()]
397 alldiffs = callconduit(
398 alldiffs = callconduit(
398 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
399 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
399 )
400 )
400 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
401 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
401 for newnode, (force, precset, drev) in toconfirm.items():
402 for newnode, (force, precset, drev) in toconfirm.items():
402 diffs = [
403 diffs = [
403 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
404 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
404 ]
405 ]
405
406
406 # "precursors" as known by Phabricator
407 # "precursors" as known by Phabricator
407 phprecset = set(getnode(d) for d in diffs)
408 phprecset = set(getnode(d) for d in diffs)
408
409
409 # Ignore if precursors (Phabricator and local repo) do not overlap,
410 # Ignore if precursors (Phabricator and local repo) do not overlap,
410 # and force is not set (when commit message says nothing)
411 # and force is not set (when commit message says nothing)
411 if not force and not bool(phprecset & precset):
412 if not force and not bool(phprecset & precset):
412 tagname = b'D%d' % drev
413 tagname = b'D%d' % drev
413 tags.tag(
414 tags.tag(
414 repo,
415 repo,
415 tagname,
416 tagname,
416 nullid,
417 nullid,
417 message=None,
418 message=None,
418 user=None,
419 user=None,
419 date=None,
420 date=None,
420 local=True,
421 local=True,
421 )
422 )
422 unfi.ui.warn(
423 unfi.ui.warn(
423 _(
424 _(
424 b'D%s: local tag removed - does not match '
425 b'D%s: local tag removed - does not match '
425 b'Differential history\n'
426 b'Differential history\n'
426 )
427 )
427 % drev
428 % drev
428 )
429 )
429 continue
430 continue
430
431
431 # Find the last node using Phabricator metadata, and make sure it
432 # Find the last node using Phabricator metadata, and make sure it
432 # exists in the repo
433 # exists in the repo
433 oldnode = lastdiff = None
434 oldnode = lastdiff = None
434 if diffs:
435 if diffs:
435 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
436 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
436 oldnode = getnode(lastdiff)
437 oldnode = getnode(lastdiff)
437 if oldnode and oldnode not in nodemap:
438 if oldnode and oldnode not in nodemap:
438 oldnode = None
439 oldnode = None
439
440
440 result[newnode] = (oldnode, lastdiff, drev)
441 result[newnode] = (oldnode, lastdiff, drev)
441
442
442 return result
443 return result
443
444
444
445
445 def getdiff(ctx, diffopts):
446 def getdiff(ctx, diffopts):
446 """plain-text diff without header (user, commit message, etc)"""
447 """plain-text diff without header (user, commit message, etc)"""
447 output = util.stringio()
448 output = util.stringio()
448 for chunk, _label in patch.diffui(
449 for chunk, _label in patch.diffui(
449 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
450 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
450 ):
451 ):
451 output.write(chunk)
452 output.write(chunk)
452 return output.getvalue()
453 return output.getvalue()
453
454
454
455
455 class DiffChangeType(object):
456 class DiffChangeType(object):
456 ADD = 1
457 ADD = 1
457 CHANGE = 2
458 CHANGE = 2
458 DELETE = 3
459 DELETE = 3
459 MOVE_AWAY = 4
460 MOVE_AWAY = 4
460 COPY_AWAY = 5
461 COPY_AWAY = 5
461 MOVE_HERE = 6
462 MOVE_HERE = 6
462 COPY_HERE = 7
463 COPY_HERE = 7
463 MULTICOPY = 8
464 MULTICOPY = 8
464
465
465
466
466 class DiffFileType(object):
467 class DiffFileType(object):
467 TEXT = 1
468 TEXT = 1
468 IMAGE = 2
469 IMAGE = 2
469 BINARY = 3
470 BINARY = 3
470
471
471
472
472 @attr.s
473 @attr.s
473 class phabhunk(dict):
474 class phabhunk(dict):
474 """Represents a Differential hunk, which is owned by a Differential change
475 """Represents a Differential hunk, which is owned by a Differential change
475 """
476 """
476
477
477 oldOffset = attr.ib(default=0) # camelcase-required
478 oldOffset = attr.ib(default=0) # camelcase-required
478 oldLength = attr.ib(default=0) # camelcase-required
479 oldLength = attr.ib(default=0) # camelcase-required
479 newOffset = attr.ib(default=0) # camelcase-required
480 newOffset = attr.ib(default=0) # camelcase-required
480 newLength = attr.ib(default=0) # camelcase-required
481 newLength = attr.ib(default=0) # camelcase-required
481 corpus = attr.ib(default='')
482 corpus = attr.ib(default='')
482 # These get added to the phabchange's equivalents
483 # These get added to the phabchange's equivalents
483 addLines = attr.ib(default=0) # camelcase-required
484 addLines = attr.ib(default=0) # camelcase-required
484 delLines = attr.ib(default=0) # camelcase-required
485 delLines = attr.ib(default=0) # camelcase-required
485
486
486
487
487 @attr.s
488 @attr.s
488 class phabchange(object):
489 class phabchange(object):
489 """Represents a Differential change, owns Differential hunks and owned by a
490 """Represents a Differential change, owns Differential hunks and owned by a
490 Differential diff. Each one represents one file in a diff.
491 Differential diff. Each one represents one file in a diff.
491 """
492 """
492
493
493 currentPath = attr.ib(default=None) # camelcase-required
494 currentPath = attr.ib(default=None) # camelcase-required
494 oldPath = attr.ib(default=None) # camelcase-required
495 oldPath = attr.ib(default=None) # camelcase-required
495 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
496 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
496 metadata = attr.ib(default=attr.Factory(dict))
497 metadata = attr.ib(default=attr.Factory(dict))
497 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 type = attr.ib(default=DiffChangeType.CHANGE)
500 type = attr.ib(default=DiffChangeType.CHANGE)
500 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
501 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
501 commitHash = attr.ib(default=None) # camelcase-required
502 commitHash = attr.ib(default=None) # camelcase-required
502 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
503 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
504 hunks = attr.ib(default=attr.Factory(list))
505 hunks = attr.ib(default=attr.Factory(list))
505
506
506 def copynewmetadatatoold(self):
507 def copynewmetadatatoold(self):
507 for key in list(self.metadata.keys()):
508 for key in list(self.metadata.keys()):
508 newkey = key.replace(b'new:', b'old:')
509 newkey = key.replace(b'new:', b'old:')
509 self.metadata[newkey] = self.metadata[key]
510 self.metadata[newkey] = self.metadata[key]
510
511
511 def addoldmode(self, value):
512 def addoldmode(self, value):
512 self.oldProperties[b'unix:filemode'] = value
513 self.oldProperties[b'unix:filemode'] = value
513
514
514 def addnewmode(self, value):
515 def addnewmode(self, value):
515 self.newProperties[b'unix:filemode'] = value
516 self.newProperties[b'unix:filemode'] = value
516
517
517 def addhunk(self, hunk):
518 def addhunk(self, hunk):
518 if not isinstance(hunk, phabhunk):
519 if not isinstance(hunk, phabhunk):
519 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
520 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
520 self.hunks.append(hunk)
521 self.hunks.append(hunk)
521 # It's useful to include these stats since the Phab web UI shows them,
522 # It's useful to include these stats since the Phab web UI shows them,
522 # and uses them to estimate how large a change a Revision is. Also used
523 # and uses them to estimate how large a change a Revision is. Also used
523 # in email subjects for the [+++--] bit.
524 # in email subjects for the [+++--] bit.
524 self.addLines += hunk.addLines
525 self.addLines += hunk.addLines
525 self.delLines += hunk.delLines
526 self.delLines += hunk.delLines
526
527
527
528
528 @attr.s
529 @attr.s
529 class phabdiff(object):
530 class phabdiff(object):
530 """Represents a Differential diff, owns Differential changes. Corresponds
531 """Represents a Differential diff, owns Differential changes. Corresponds
531 to a commit.
532 to a commit.
532 """
533 """
533
534
534 # Doesn't seem to be any reason to send this (output of uname -n)
535 # Doesn't seem to be any reason to send this (output of uname -n)
535 sourceMachine = attr.ib(default=b'') # camelcase-required
536 sourceMachine = attr.ib(default=b'') # camelcase-required
536 sourcePath = attr.ib(default=b'/') # camelcase-required
537 sourcePath = attr.ib(default=b'/') # camelcase-required
537 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
538 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
538 sourceControlPath = attr.ib(default=b'/') # camelcase-required
539 sourceControlPath = attr.ib(default=b'/') # camelcase-required
539 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
540 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
540 branch = attr.ib(default=b'default')
541 branch = attr.ib(default=b'default')
541 bookmark = attr.ib(default=None)
542 bookmark = attr.ib(default=None)
542 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
543 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
543 lintStatus = attr.ib(default=b'none') # camelcase-required
544 lintStatus = attr.ib(default=b'none') # camelcase-required
544 unitStatus = attr.ib(default=b'none') # camelcase-required
545 unitStatus = attr.ib(default=b'none') # camelcase-required
545 changes = attr.ib(default=attr.Factory(dict))
546 changes = attr.ib(default=attr.Factory(dict))
546 repositoryPHID = attr.ib(default=None) # camelcase-required
547 repositoryPHID = attr.ib(default=None) # camelcase-required
547
548
548 def addchange(self, change):
549 def addchange(self, change):
549 if not isinstance(change, phabchange):
550 if not isinstance(change, phabchange):
550 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
551 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
551 self.changes[change.currentPath] = change
552 self.changes[change.currentPath] = change
552
553
553
554
554 def maketext(pchange, ctx, fname):
555 def maketext(pchange, ctx, fname):
555 """populate the phabchange for a text file"""
556 """populate the phabchange for a text file"""
556 repo = ctx.repo()
557 repo = ctx.repo()
557 fmatcher = match.exact([fname])
558 fmatcher = match.exact([fname])
558 diffopts = mdiff.diffopts(git=True, context=32767)
559 diffopts = mdiff.diffopts(git=True, context=32767)
559 _pfctx, _fctx, header, fhunks = next(
560 _pfctx, _fctx, header, fhunks = next(
560 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
561 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
561 )
562 )
562
563
563 for fhunk in fhunks:
564 for fhunk in fhunks:
564 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
565 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
565 corpus = b''.join(lines[1:])
566 corpus = b''.join(lines[1:])
566 shunk = list(header)
567 shunk = list(header)
567 shunk.extend(lines)
568 shunk.extend(lines)
568 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
569 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
569 patch.diffstatdata(util.iterlines(shunk))
570 patch.diffstatdata(util.iterlines(shunk))
570 )
571 )
571 pchange.addhunk(
572 pchange.addhunk(
572 phabhunk(
573 phabhunk(
573 oldOffset,
574 oldOffset,
574 oldLength,
575 oldLength,
575 newOffset,
576 newOffset,
576 newLength,
577 newLength,
577 corpus,
578 corpus,
578 addLines,
579 addLines,
579 delLines,
580 delLines,
580 )
581 )
581 )
582 )
582
583
583
584
584 def uploadchunks(fctx, fphid):
585 def uploadchunks(fctx, fphid):
585 """upload large binary files as separate chunks.
586 """upload large binary files as separate chunks.
586 Phab requests chunking over 8MiB, and splits into 4MiB chunks
587 Phab requests chunking over 8MiB, and splits into 4MiB chunks
587 """
588 """
588 ui = fctx.repo().ui
589 ui = fctx.repo().ui
589 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
590 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
590 progress = ui.makeprogress(
591 progress = ui.makeprogress(
591 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
592 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
592 )
593 )
593 for chunk in chunks:
594 for chunk in chunks:
594 progress.increment()
595 progress.increment()
595 if chunk[b'complete']:
596 if chunk[b'complete']:
596 continue
597 continue
597 bstart = int(chunk[b'byteStart'])
598 bstart = int(chunk[b'byteStart'])
598 bend = int(chunk[b'byteEnd'])
599 bend = int(chunk[b'byteEnd'])
599 callconduit(
600 callconduit(
600 ui,
601 ui,
601 b'file.uploadchunk',
602 b'file.uploadchunk',
602 {
603 {
603 b'filePHID': fphid,
604 b'filePHID': fphid,
604 b'byteStart': bstart,
605 b'byteStart': bstart,
605 b'data': base64.b64encode(fctx.data()[bstart:bend]),
606 b'data': base64.b64encode(fctx.data()[bstart:bend]),
606 b'dataEncoding': b'base64',
607 b'dataEncoding': b'base64',
607 },
608 },
608 )
609 )
609 progress.complete()
610 progress.complete()
610
611
611
612
612 def uploadfile(fctx):
613 def uploadfile(fctx):
613 """upload binary files to Phabricator"""
614 """upload binary files to Phabricator"""
614 repo = fctx.repo()
615 repo = fctx.repo()
615 ui = repo.ui
616 ui = repo.ui
616 fname = fctx.path()
617 fname = fctx.path()
617 size = fctx.size()
618 size = fctx.size()
618 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
619 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
619
620
620 # an allocate call is required first to see if an upload is even required
621 # an allocate call is required first to see if an upload is even required
621 # (Phab might already have it) and to determine if chunking is needed
622 # (Phab might already have it) and to determine if chunking is needed
622 allocateparams = {
623 allocateparams = {
623 b'name': fname,
624 b'name': fname,
624 b'contentLength': size,
625 b'contentLength': size,
625 b'contentHash': fhash,
626 b'contentHash': fhash,
626 }
627 }
627 filealloc = callconduit(ui, b'file.allocate', allocateparams)
628 filealloc = callconduit(ui, b'file.allocate', allocateparams)
628 fphid = filealloc[b'filePHID']
629 fphid = filealloc[b'filePHID']
629
630
630 if filealloc[b'upload']:
631 if filealloc[b'upload']:
631 ui.write(_(b'uploading %s\n') % bytes(fctx))
632 ui.write(_(b'uploading %s\n') % bytes(fctx))
632 if not fphid:
633 if not fphid:
633 uploadparams = {
634 uploadparams = {
634 b'name': fname,
635 b'name': fname,
635 b'data_base64': base64.b64encode(fctx.data()),
636 b'data_base64': base64.b64encode(fctx.data()),
636 }
637 }
637 fphid = callconduit(ui, b'file.upload', uploadparams)
638 fphid = callconduit(ui, b'file.upload', uploadparams)
638 else:
639 else:
639 uploadchunks(fctx, fphid)
640 uploadchunks(fctx, fphid)
640 else:
641 else:
641 ui.debug(b'server already has %s\n' % bytes(fctx))
642 ui.debug(b'server already has %s\n' % bytes(fctx))
642
643
643 if not fphid:
644 if not fphid:
644 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
645 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
645
646
646 return fphid
647 return fphid
647
648
648
649
650 def addoldbinary(pchange, fctx, originalfname):
651 """add the metadata for the previous version of a binary file to the
652 phabchange for the new version
653 """
654 oldfctx = fctx.p1()[originalfname]
655 if fctx.cmp(oldfctx):
656 # Files differ, add the old one
657 pchange.metadata[b'old:file:size'] = oldfctx.size()
658 mimeguess, _enc = mimetypes.guess_type(
659 encoding.unifromlocal(oldfctx.path())
660 )
661 if mimeguess:
662 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
663 mimeguess
664 )
665 fphid = uploadfile(oldfctx)
666 pchange.metadata[b'old:binary-phid'] = fphid
667 else:
668 # If it's left as IMAGE/BINARY web UI might try to display it
669 pchange.fileType = DiffFileType.TEXT
670 pchange.copynewmetadatatoold()
671
672
673 def makebinary(pchange, fctx):
674 """populate the phabchange for a binary file"""
675 pchange.fileType = DiffFileType.BINARY
676 fphid = uploadfile(fctx)
677 pchange.metadata[b'new:binary-phid'] = fphid
678 pchange.metadata[b'new:file:size'] = fctx.size()
679 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
680 if mimeguess:
681 mimeguess = pycompat.bytestr(mimeguess)
682 pchange.metadata[b'new:file:mime-type'] = mimeguess
683 if mimeguess.startswith(b'image/'):
684 pchange.fileType = DiffFileType.IMAGE
685
686
649 def creatediff(ctx):
687 def creatediff(ctx):
650 """create a Differential Diff"""
688 """create a Differential Diff"""
651 repo = ctx.repo()
689 repo = ctx.repo()
652 repophid = getrepophid(repo)
690 repophid = getrepophid(repo)
653 # Create a "Differential Diff" via "differential.createrawdiff" API
691 # Create a "Differential Diff" via "differential.createrawdiff" API
654 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
692 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
655 if repophid:
693 if repophid:
656 params[b'repositoryPHID'] = repophid
694 params[b'repositoryPHID'] = repophid
657 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
695 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
658 if not diff:
696 if not diff:
659 raise error.Abort(_(b'cannot create diff for %s') % ctx)
697 raise error.Abort(_(b'cannot create diff for %s') % ctx)
660 return diff
698 return diff
661
699
662
700
663 def writediffproperties(ctx, diff):
701 def writediffproperties(ctx, diff):
664 """write metadata to diff so patches could be applied losslessly"""
702 """write metadata to diff so patches could be applied losslessly"""
665 params = {
703 params = {
666 b'diff_id': diff[b'id'],
704 b'diff_id': diff[b'id'],
667 b'name': b'hg:meta',
705 b'name': b'hg:meta',
668 b'data': templatefilters.json(
706 b'data': templatefilters.json(
669 {
707 {
670 b'user': ctx.user(),
708 b'user': ctx.user(),
671 b'date': b'%d %d' % ctx.date(),
709 b'date': b'%d %d' % ctx.date(),
672 b'branch': ctx.branch(),
710 b'branch': ctx.branch(),
673 b'node': ctx.hex(),
711 b'node': ctx.hex(),
674 b'parent': ctx.p1().hex(),
712 b'parent': ctx.p1().hex(),
675 }
713 }
676 ),
714 ),
677 }
715 }
678 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
716 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
679
717
680 params = {
718 params = {
681 b'diff_id': diff[b'id'],
719 b'diff_id': diff[b'id'],
682 b'name': b'local:commits',
720 b'name': b'local:commits',
683 b'data': templatefilters.json(
721 b'data': templatefilters.json(
684 {
722 {
685 ctx.hex(): {
723 ctx.hex(): {
686 b'author': stringutil.person(ctx.user()),
724 b'author': stringutil.person(ctx.user()),
687 b'authorEmail': stringutil.email(ctx.user()),
725 b'authorEmail': stringutil.email(ctx.user()),
688 b'time': int(ctx.date()[0]),
726 b'time': int(ctx.date()[0]),
689 b'commit': ctx.hex(),
727 b'commit': ctx.hex(),
690 b'parents': [ctx.p1().hex()],
728 b'parents': [ctx.p1().hex()],
691 b'branch': ctx.branch(),
729 b'branch': ctx.branch(),
692 },
730 },
693 }
731 }
694 ),
732 ),
695 }
733 }
696 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
734 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
697
735
698
736
699 def createdifferentialrevision(
737 def createdifferentialrevision(
700 ctx,
738 ctx,
701 revid=None,
739 revid=None,
702 parentrevphid=None,
740 parentrevphid=None,
703 oldnode=None,
741 oldnode=None,
704 olddiff=None,
742 olddiff=None,
705 actions=None,
743 actions=None,
706 comment=None,
744 comment=None,
707 ):
745 ):
708 """create or update a Differential Revision
746 """create or update a Differential Revision
709
747
710 If revid is None, create a new Differential Revision, otherwise update
748 If revid is None, create a new Differential Revision, otherwise update
711 revid. If parentrevphid is not None, set it as a dependency.
749 revid. If parentrevphid is not None, set it as a dependency.
712
750
713 If oldnode is not None, check if the patch content (without commit message
751 If oldnode is not None, check if the patch content (without commit message
714 and metadata) has changed before creating another diff.
752 and metadata) has changed before creating another diff.
715
753
716 If actions is not None, they will be appended to the transaction.
754 If actions is not None, they will be appended to the transaction.
717 """
755 """
718 repo = ctx.repo()
756 repo = ctx.repo()
719 if oldnode:
757 if oldnode:
720 diffopts = mdiff.diffopts(git=True, context=32767)
758 diffopts = mdiff.diffopts(git=True, context=32767)
721 oldctx = repo.unfiltered()[oldnode]
759 oldctx = repo.unfiltered()[oldnode]
722 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
760 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
723 else:
761 else:
724 neednewdiff = True
762 neednewdiff = True
725
763
726 transactions = []
764 transactions = []
727 if neednewdiff:
765 if neednewdiff:
728 diff = creatediff(ctx)
766 diff = creatediff(ctx)
729 transactions.append({b'type': b'update', b'value': diff[b'phid']})
767 transactions.append({b'type': b'update', b'value': diff[b'phid']})
730 if comment:
768 if comment:
731 transactions.append({b'type': b'comment', b'value': comment})
769 transactions.append({b'type': b'comment', b'value': comment})
732 else:
770 else:
733 # Even if we don't need to upload a new diff because the patch content
771 # Even if we don't need to upload a new diff because the patch content
734 # does not change. We might still need to update its metadata so
772 # does not change. We might still need to update its metadata so
735 # pushers could know the correct node metadata.
773 # pushers could know the correct node metadata.
736 assert olddiff
774 assert olddiff
737 diff = olddiff
775 diff = olddiff
738 writediffproperties(ctx, diff)
776 writediffproperties(ctx, diff)
739
777
740 # Set the parent Revision every time, so commit re-ordering is picked-up
778 # Set the parent Revision every time, so commit re-ordering is picked-up
741 if parentrevphid:
779 if parentrevphid:
742 transactions.append(
780 transactions.append(
743 {b'type': b'parents.set', b'value': [parentrevphid]}
781 {b'type': b'parents.set', b'value': [parentrevphid]}
744 )
782 )
745
783
746 if actions:
784 if actions:
747 transactions += actions
785 transactions += actions
748
786
749 # Parse commit message and update related fields.
787 # Parse commit message and update related fields.
750 desc = ctx.description()
788 desc = ctx.description()
751 info = callconduit(
789 info = callconduit(
752 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
790 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
753 )
791 )
754 for k, v in info[b'fields'].items():
792 for k, v in info[b'fields'].items():
755 if k in [b'title', b'summary', b'testPlan']:
793 if k in [b'title', b'summary', b'testPlan']:
756 transactions.append({b'type': k, b'value': v})
794 transactions.append({b'type': k, b'value': v})
757
795
758 params = {b'transactions': transactions}
796 params = {b'transactions': transactions}
759 if revid is not None:
797 if revid is not None:
760 # Update an existing Differential Revision
798 # Update an existing Differential Revision
761 params[b'objectIdentifier'] = revid
799 params[b'objectIdentifier'] = revid
762
800
763 revision = callconduit(repo.ui, b'differential.revision.edit', params)
801 revision = callconduit(repo.ui, b'differential.revision.edit', params)
764 if not revision:
802 if not revision:
765 raise error.Abort(_(b'cannot create revision for %s') % ctx)
803 raise error.Abort(_(b'cannot create revision for %s') % ctx)
766
804
767 return revision, diff
805 return revision, diff
768
806
769
807
770 def userphids(repo, names):
808 def userphids(repo, names):
771 """convert user names to PHIDs"""
809 """convert user names to PHIDs"""
772 names = [name.lower() for name in names]
810 names = [name.lower() for name in names]
773 query = {b'constraints': {b'usernames': names}}
811 query = {b'constraints': {b'usernames': names}}
774 result = callconduit(repo.ui, b'user.search', query)
812 result = callconduit(repo.ui, b'user.search', query)
775 # username not found is not an error of the API. So check if we have missed
813 # username not found is not an error of the API. So check if we have missed
776 # some names here.
814 # some names here.
777 data = result[b'data']
815 data = result[b'data']
778 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
816 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
779 unresolved = set(names) - resolved
817 unresolved = set(names) - resolved
780 if unresolved:
818 if unresolved:
781 raise error.Abort(
819 raise error.Abort(
782 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
820 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
783 )
821 )
784 return [entry[b'phid'] for entry in data]
822 return [entry[b'phid'] for entry in data]
785
823
786
824
787 @vcrcommand(
825 @vcrcommand(
788 b'phabsend',
826 b'phabsend',
789 [
827 [
790 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
828 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
791 (b'', b'amend', True, _(b'update commit messages')),
829 (b'', b'amend', True, _(b'update commit messages')),
792 (b'', b'reviewer', [], _(b'specify reviewers')),
830 (b'', b'reviewer', [], _(b'specify reviewers')),
793 (b'', b'blocker', [], _(b'specify blocking reviewers')),
831 (b'', b'blocker', [], _(b'specify blocking reviewers')),
794 (
832 (
795 b'm',
833 b'm',
796 b'comment',
834 b'comment',
797 b'',
835 b'',
798 _(b'add a comment to Revisions with new/updated Diffs'),
836 _(b'add a comment to Revisions with new/updated Diffs'),
799 ),
837 ),
800 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
838 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
801 ],
839 ],
802 _(b'REV [OPTIONS]'),
840 _(b'REV [OPTIONS]'),
803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
841 helpcategory=command.CATEGORY_IMPORT_EXPORT,
804 )
842 )
805 def phabsend(ui, repo, *revs, **opts):
843 def phabsend(ui, repo, *revs, **opts):
806 """upload changesets to Phabricator
844 """upload changesets to Phabricator
807
845
808 If there are multiple revisions specified, they will be send as a stack
846 If there are multiple revisions specified, they will be send as a stack
809 with a linear dependencies relationship using the order specified by the
847 with a linear dependencies relationship using the order specified by the
810 revset.
848 revset.
811
849
812 For the first time uploading changesets, local tags will be created to
850 For the first time uploading changesets, local tags will be created to
813 maintain the association. After the first time, phabsend will check
851 maintain the association. After the first time, phabsend will check
814 obsstore and tags information so it can figure out whether to update an
852 obsstore and tags information so it can figure out whether to update an
815 existing Differential Revision, or create a new one.
853 existing Differential Revision, or create a new one.
816
854
817 If --amend is set, update commit messages so they have the
855 If --amend is set, update commit messages so they have the
818 ``Differential Revision`` URL, remove related tags. This is similar to what
856 ``Differential Revision`` URL, remove related tags. This is similar to what
819 arcanist will do, and is more desired in author-push workflows. Otherwise,
857 arcanist will do, and is more desired in author-push workflows. Otherwise,
820 use local tags to record the ``Differential Revision`` association.
858 use local tags to record the ``Differential Revision`` association.
821
859
822 The --confirm option lets you confirm changesets before sending them. You
860 The --confirm option lets you confirm changesets before sending them. You
823 can also add following to your configuration file to make it default
861 can also add following to your configuration file to make it default
824 behaviour::
862 behaviour::
825
863
826 [phabsend]
864 [phabsend]
827 confirm = true
865 confirm = true
828
866
829 phabsend will check obsstore and the above association to decide whether to
867 phabsend will check obsstore and the above association to decide whether to
830 update an existing Differential Revision, or create a new one.
868 update an existing Differential Revision, or create a new one.
831 """
869 """
832 opts = pycompat.byteskwargs(opts)
870 opts = pycompat.byteskwargs(opts)
833 revs = list(revs) + opts.get(b'rev', [])
871 revs = list(revs) + opts.get(b'rev', [])
834 revs = scmutil.revrange(repo, revs)
872 revs = scmutil.revrange(repo, revs)
835
873
836 if not revs:
874 if not revs:
837 raise error.Abort(_(b'phabsend requires at least one changeset'))
875 raise error.Abort(_(b'phabsend requires at least one changeset'))
838 if opts.get(b'amend'):
876 if opts.get(b'amend'):
839 cmdutil.checkunfinished(repo)
877 cmdutil.checkunfinished(repo)
840
878
841 # {newnode: (oldnode, olddiff, olddrev}
879 # {newnode: (oldnode, olddiff, olddrev}
842 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
880 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
843
881
844 confirm = ui.configbool(b'phabsend', b'confirm')
882 confirm = ui.configbool(b'phabsend', b'confirm')
845 confirm |= bool(opts.get(b'confirm'))
883 confirm |= bool(opts.get(b'confirm'))
846 if confirm:
884 if confirm:
847 confirmed = _confirmbeforesend(repo, revs, oldmap)
885 confirmed = _confirmbeforesend(repo, revs, oldmap)
848 if not confirmed:
886 if not confirmed:
849 raise error.Abort(_(b'phabsend cancelled'))
887 raise error.Abort(_(b'phabsend cancelled'))
850
888
851 actions = []
889 actions = []
852 reviewers = opts.get(b'reviewer', [])
890 reviewers = opts.get(b'reviewer', [])
853 blockers = opts.get(b'blocker', [])
891 blockers = opts.get(b'blocker', [])
854 phids = []
892 phids = []
855 if reviewers:
893 if reviewers:
856 phids.extend(userphids(repo, reviewers))
894 phids.extend(userphids(repo, reviewers))
857 if blockers:
895 if blockers:
858 phids.extend(
896 phids.extend(
859 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
897 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
860 )
898 )
861 if phids:
899 if phids:
862 actions.append({b'type': b'reviewers.add', b'value': phids})
900 actions.append({b'type': b'reviewers.add', b'value': phids})
863
901
864 drevids = [] # [int]
902 drevids = [] # [int]
865 diffmap = {} # {newnode: diff}
903 diffmap = {} # {newnode: diff}
866
904
867 # Send patches one by one so we know their Differential Revision PHIDs and
905 # Send patches one by one so we know their Differential Revision PHIDs and
868 # can provide dependency relationship
906 # can provide dependency relationship
869 lastrevphid = None
907 lastrevphid = None
870 for rev in revs:
908 for rev in revs:
871 ui.debug(b'sending rev %d\n' % rev)
909 ui.debug(b'sending rev %d\n' % rev)
872 ctx = repo[rev]
910 ctx = repo[rev]
873
911
874 # Get Differential Revision ID
912 # Get Differential Revision ID
875 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
913 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
876 if oldnode != ctx.node() or opts.get(b'amend'):
914 if oldnode != ctx.node() or opts.get(b'amend'):
877 # Create or update Differential Revision
915 # Create or update Differential Revision
878 revision, diff = createdifferentialrevision(
916 revision, diff = createdifferentialrevision(
879 ctx,
917 ctx,
880 revid,
918 revid,
881 lastrevphid,
919 lastrevphid,
882 oldnode,
920 oldnode,
883 olddiff,
921 olddiff,
884 actions,
922 actions,
885 opts.get(b'comment'),
923 opts.get(b'comment'),
886 )
924 )
887 diffmap[ctx.node()] = diff
925 diffmap[ctx.node()] = diff
888 newrevid = int(revision[b'object'][b'id'])
926 newrevid = int(revision[b'object'][b'id'])
889 newrevphid = revision[b'object'][b'phid']
927 newrevphid = revision[b'object'][b'phid']
890 if revid:
928 if revid:
891 action = b'updated'
929 action = b'updated'
892 else:
930 else:
893 action = b'created'
931 action = b'created'
894
932
895 # Create a local tag to note the association, if commit message
933 # Create a local tag to note the association, if commit message
896 # does not have it already
934 # does not have it already
897 m = _differentialrevisiondescre.search(ctx.description())
935 m = _differentialrevisiondescre.search(ctx.description())
898 if not m or int(m.group(r'id')) != newrevid:
936 if not m or int(m.group(r'id')) != newrevid:
899 tagname = b'D%d' % newrevid
937 tagname = b'D%d' % newrevid
900 tags.tag(
938 tags.tag(
901 repo,
939 repo,
902 tagname,
940 tagname,
903 ctx.node(),
941 ctx.node(),
904 message=None,
942 message=None,
905 user=None,
943 user=None,
906 date=None,
944 date=None,
907 local=True,
945 local=True,
908 )
946 )
909 else:
947 else:
910 # Nothing changed. But still set "newrevphid" so the next revision
948 # Nothing changed. But still set "newrevphid" so the next revision
911 # could depend on this one and "newrevid" for the summary line.
949 # could depend on this one and "newrevid" for the summary line.
912 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
950 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
913 newrevid = revid
951 newrevid = revid
914 action = b'skipped'
952 action = b'skipped'
915
953
916 actiondesc = ui.label(
954 actiondesc = ui.label(
917 {
955 {
918 b'created': _(b'created'),
956 b'created': _(b'created'),
919 b'skipped': _(b'skipped'),
957 b'skipped': _(b'skipped'),
920 b'updated': _(b'updated'),
958 b'updated': _(b'updated'),
921 }[action],
959 }[action],
922 b'phabricator.action.%s' % action,
960 b'phabricator.action.%s' % action,
923 )
961 )
924 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
962 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
925 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
963 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
926 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
964 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
927 ui.write(
965 ui.write(
928 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
966 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
929 )
967 )
930 drevids.append(newrevid)
968 drevids.append(newrevid)
931 lastrevphid = newrevphid
969 lastrevphid = newrevphid
932
970
933 # Update commit messages and remove tags
971 # Update commit messages and remove tags
934 if opts.get(b'amend'):
972 if opts.get(b'amend'):
935 unfi = repo.unfiltered()
973 unfi = repo.unfiltered()
936 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
974 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
937 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
975 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
938 wnode = unfi[b'.'].node()
976 wnode = unfi[b'.'].node()
939 mapping = {} # {oldnode: [newnode]}
977 mapping = {} # {oldnode: [newnode]}
940 for i, rev in enumerate(revs):
978 for i, rev in enumerate(revs):
941 old = unfi[rev]
979 old = unfi[rev]
942 drevid = drevids[i]
980 drevid = drevids[i]
943 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
981 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
944 newdesc = getdescfromdrev(drev)
982 newdesc = getdescfromdrev(drev)
945 # Make sure commit message contain "Differential Revision"
983 # Make sure commit message contain "Differential Revision"
946 if old.description() != newdesc:
984 if old.description() != newdesc:
947 if old.phase() == phases.public:
985 if old.phase() == phases.public:
948 ui.warn(
986 ui.warn(
949 _(b"warning: not updating public commit %s\n")
987 _(b"warning: not updating public commit %s\n")
950 % scmutil.formatchangeid(old)
988 % scmutil.formatchangeid(old)
951 )
989 )
952 continue
990 continue
953 parents = [
991 parents = [
954 mapping.get(old.p1().node(), (old.p1(),))[0],
992 mapping.get(old.p1().node(), (old.p1(),))[0],
955 mapping.get(old.p2().node(), (old.p2(),))[0],
993 mapping.get(old.p2().node(), (old.p2(),))[0],
956 ]
994 ]
957 new = context.metadataonlyctx(
995 new = context.metadataonlyctx(
958 repo,
996 repo,
959 old,
997 old,
960 parents=parents,
998 parents=parents,
961 text=newdesc,
999 text=newdesc,
962 user=old.user(),
1000 user=old.user(),
963 date=old.date(),
1001 date=old.date(),
964 extra=old.extra(),
1002 extra=old.extra(),
965 )
1003 )
966
1004
967 newnode = new.commit()
1005 newnode = new.commit()
968
1006
969 mapping[old.node()] = [newnode]
1007 mapping[old.node()] = [newnode]
970 # Update diff property
1008 # Update diff property
971 # If it fails just warn and keep going, otherwise the DREV
1009 # If it fails just warn and keep going, otherwise the DREV
972 # associations will be lost
1010 # associations will be lost
973 try:
1011 try:
974 writediffproperties(unfi[newnode], diffmap[old.node()])
1012 writediffproperties(unfi[newnode], diffmap[old.node()])
975 except util.urlerr.urlerror:
1013 except util.urlerr.urlerror:
976 ui.warnnoi18n(
1014 ui.warnnoi18n(
977 b'Failed to update metadata for D%s\n' % drevid
1015 b'Failed to update metadata for D%s\n' % drevid
978 )
1016 )
979 # Remove local tags since it's no longer necessary
1017 # Remove local tags since it's no longer necessary
980 tagname = b'D%d' % drevid
1018 tagname = b'D%d' % drevid
981 if tagname in repo.tags():
1019 if tagname in repo.tags():
982 tags.tag(
1020 tags.tag(
983 repo,
1021 repo,
984 tagname,
1022 tagname,
985 nullid,
1023 nullid,
986 message=None,
1024 message=None,
987 user=None,
1025 user=None,
988 date=None,
1026 date=None,
989 local=True,
1027 local=True,
990 )
1028 )
991 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1029 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
992 if wnode in mapping:
1030 if wnode in mapping:
993 unfi.setparents(mapping[wnode][0])
1031 unfi.setparents(mapping[wnode][0])
994
1032
995
1033
996 # Map from "hg:meta" keys to header understood by "hg import". The order is
1034 # Map from "hg:meta" keys to header understood by "hg import". The order is
997 # consistent with "hg export" output.
1035 # consistent with "hg export" output.
998 _metanamemap = util.sortdict(
1036 _metanamemap = util.sortdict(
999 [
1037 [
1000 (b'user', b'User'),
1038 (b'user', b'User'),
1001 (b'date', b'Date'),
1039 (b'date', b'Date'),
1002 (b'branch', b'Branch'),
1040 (b'branch', b'Branch'),
1003 (b'node', b'Node ID'),
1041 (b'node', b'Node ID'),
1004 (b'parent', b'Parent '),
1042 (b'parent', b'Parent '),
1005 ]
1043 ]
1006 )
1044 )
1007
1045
1008
1046
1009 def _confirmbeforesend(repo, revs, oldmap):
1047 def _confirmbeforesend(repo, revs, oldmap):
1010 url, token = readurltoken(repo.ui)
1048 url, token = readurltoken(repo.ui)
1011 ui = repo.ui
1049 ui = repo.ui
1012 for rev in revs:
1050 for rev in revs:
1013 ctx = repo[rev]
1051 ctx = repo[rev]
1014 desc = ctx.description().splitlines()[0]
1052 desc = ctx.description().splitlines()[0]
1015 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1053 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1016 if drevid:
1054 if drevid:
1017 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1055 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1018 else:
1056 else:
1019 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1057 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1020
1058
1021 ui.write(
1059 ui.write(
1022 _(b'%s - %s: %s\n')
1060 _(b'%s - %s: %s\n')
1023 % (
1061 % (
1024 drevdesc,
1062 drevdesc,
1025 ui.label(bytes(ctx), b'phabricator.node'),
1063 ui.label(bytes(ctx), b'phabricator.node'),
1026 ui.label(desc, b'phabricator.desc'),
1064 ui.label(desc, b'phabricator.desc'),
1027 )
1065 )
1028 )
1066 )
1029
1067
1030 if ui.promptchoice(
1068 if ui.promptchoice(
1031 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1069 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1032 ):
1070 ):
1033 return False
1071 return False
1034
1072
1035 return True
1073 return True
1036
1074
1037
1075
1038 _knownstatusnames = {
1076 _knownstatusnames = {
1039 b'accepted',
1077 b'accepted',
1040 b'needsreview',
1078 b'needsreview',
1041 b'needsrevision',
1079 b'needsrevision',
1042 b'closed',
1080 b'closed',
1043 b'abandoned',
1081 b'abandoned',
1044 }
1082 }
1045
1083
1046
1084
1047 def _getstatusname(drev):
1085 def _getstatusname(drev):
1048 """get normalized status name from a Differential Revision"""
1086 """get normalized status name from a Differential Revision"""
1049 return drev[b'statusName'].replace(b' ', b'').lower()
1087 return drev[b'statusName'].replace(b' ', b'').lower()
1050
1088
1051
1089
1052 # Small language to specify differential revisions. Support symbols: (), :X,
1090 # Small language to specify differential revisions. Support symbols: (), :X,
1053 # +, and -.
1091 # +, and -.
1054
1092
1055 _elements = {
1093 _elements = {
1056 # token-type: binding-strength, primary, prefix, infix, suffix
1094 # token-type: binding-strength, primary, prefix, infix, suffix
1057 b'(': (12, None, (b'group', 1, b')'), None, None),
1095 b'(': (12, None, (b'group', 1, b')'), None, None),
1058 b':': (8, None, (b'ancestors', 8), None, None),
1096 b':': (8, None, (b'ancestors', 8), None, None),
1059 b'&': (5, None, None, (b'and_', 5), None),
1097 b'&': (5, None, None, (b'and_', 5), None),
1060 b'+': (4, None, None, (b'add', 4), None),
1098 b'+': (4, None, None, (b'add', 4), None),
1061 b'-': (4, None, None, (b'sub', 4), None),
1099 b'-': (4, None, None, (b'sub', 4), None),
1062 b')': (0, None, None, None, None),
1100 b')': (0, None, None, None, None),
1063 b'symbol': (0, b'symbol', None, None, None),
1101 b'symbol': (0, b'symbol', None, None, None),
1064 b'end': (0, None, None, None, None),
1102 b'end': (0, None, None, None, None),
1065 }
1103 }
1066
1104
1067
1105
1068 def _tokenize(text):
1106 def _tokenize(text):
1069 view = memoryview(text) # zero-copy slice
1107 view = memoryview(text) # zero-copy slice
1070 special = b'():+-& '
1108 special = b'():+-& '
1071 pos = 0
1109 pos = 0
1072 length = len(text)
1110 length = len(text)
1073 while pos < length:
1111 while pos < length:
1074 symbol = b''.join(
1112 symbol = b''.join(
1075 itertools.takewhile(
1113 itertools.takewhile(
1076 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1114 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1077 )
1115 )
1078 )
1116 )
1079 if symbol:
1117 if symbol:
1080 yield (b'symbol', symbol, pos)
1118 yield (b'symbol', symbol, pos)
1081 pos += len(symbol)
1119 pos += len(symbol)
1082 else: # special char, ignore space
1120 else: # special char, ignore space
1083 if text[pos] != b' ':
1121 if text[pos] != b' ':
1084 yield (text[pos], None, pos)
1122 yield (text[pos], None, pos)
1085 pos += 1
1123 pos += 1
1086 yield (b'end', None, pos)
1124 yield (b'end', None, pos)
1087
1125
1088
1126
1089 def _parse(text):
1127 def _parse(text):
1090 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1128 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1091 if pos != len(text):
1129 if pos != len(text):
1092 raise error.ParseError(b'invalid token', pos)
1130 raise error.ParseError(b'invalid token', pos)
1093 return tree
1131 return tree
1094
1132
1095
1133
1096 def _parsedrev(symbol):
1134 def _parsedrev(symbol):
1097 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1135 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1098 if symbol.startswith(b'D') and symbol[1:].isdigit():
1136 if symbol.startswith(b'D') and symbol[1:].isdigit():
1099 return int(symbol[1:])
1137 return int(symbol[1:])
1100 if symbol.isdigit():
1138 if symbol.isdigit():
1101 return int(symbol)
1139 return int(symbol)
1102
1140
1103
1141
1104 def _prefetchdrevs(tree):
1142 def _prefetchdrevs(tree):
1105 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1143 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1106 drevs = set()
1144 drevs = set()
1107 ancestordrevs = set()
1145 ancestordrevs = set()
1108 op = tree[0]
1146 op = tree[0]
1109 if op == b'symbol':
1147 if op == b'symbol':
1110 r = _parsedrev(tree[1])
1148 r = _parsedrev(tree[1])
1111 if r:
1149 if r:
1112 drevs.add(r)
1150 drevs.add(r)
1113 elif op == b'ancestors':
1151 elif op == b'ancestors':
1114 r, a = _prefetchdrevs(tree[1])
1152 r, a = _prefetchdrevs(tree[1])
1115 drevs.update(r)
1153 drevs.update(r)
1116 ancestordrevs.update(r)
1154 ancestordrevs.update(r)
1117 ancestordrevs.update(a)
1155 ancestordrevs.update(a)
1118 else:
1156 else:
1119 for t in tree[1:]:
1157 for t in tree[1:]:
1120 r, a = _prefetchdrevs(t)
1158 r, a = _prefetchdrevs(t)
1121 drevs.update(r)
1159 drevs.update(r)
1122 ancestordrevs.update(a)
1160 ancestordrevs.update(a)
1123 return drevs, ancestordrevs
1161 return drevs, ancestordrevs
1124
1162
1125
1163
1126 def querydrev(repo, spec):
1164 def querydrev(repo, spec):
1127 """return a list of "Differential Revision" dicts
1165 """return a list of "Differential Revision" dicts
1128
1166
1129 spec is a string using a simple query language, see docstring in phabread
1167 spec is a string using a simple query language, see docstring in phabread
1130 for details.
1168 for details.
1131
1169
1132 A "Differential Revision dict" looks like:
1170 A "Differential Revision dict" looks like:
1133
1171
1134 {
1172 {
1135 "id": "2",
1173 "id": "2",
1136 "phid": "PHID-DREV-672qvysjcczopag46qty",
1174 "phid": "PHID-DREV-672qvysjcczopag46qty",
1137 "title": "example",
1175 "title": "example",
1138 "uri": "https://phab.example.com/D2",
1176 "uri": "https://phab.example.com/D2",
1139 "dateCreated": "1499181406",
1177 "dateCreated": "1499181406",
1140 "dateModified": "1499182103",
1178 "dateModified": "1499182103",
1141 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1179 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1142 "status": "0",
1180 "status": "0",
1143 "statusName": "Needs Review",
1181 "statusName": "Needs Review",
1144 "properties": [],
1182 "properties": [],
1145 "branch": null,
1183 "branch": null,
1146 "summary": "",
1184 "summary": "",
1147 "testPlan": "",
1185 "testPlan": "",
1148 "lineCount": "2",
1186 "lineCount": "2",
1149 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1187 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1150 "diffs": [
1188 "diffs": [
1151 "3",
1189 "3",
1152 "4",
1190 "4",
1153 ],
1191 ],
1154 "commits": [],
1192 "commits": [],
1155 "reviewers": [],
1193 "reviewers": [],
1156 "ccs": [],
1194 "ccs": [],
1157 "hashes": [],
1195 "hashes": [],
1158 "auxiliary": {
1196 "auxiliary": {
1159 "phabricator:projects": [],
1197 "phabricator:projects": [],
1160 "phabricator:depends-on": [
1198 "phabricator:depends-on": [
1161 "PHID-DREV-gbapp366kutjebt7agcd"
1199 "PHID-DREV-gbapp366kutjebt7agcd"
1162 ]
1200 ]
1163 },
1201 },
1164 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1202 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1165 "sourcePath": null
1203 "sourcePath": null
1166 }
1204 }
1167 """
1205 """
1168
1206
1169 def fetch(params):
1207 def fetch(params):
1170 """params -> single drev or None"""
1208 """params -> single drev or None"""
1171 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1209 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1172 if key in prefetched:
1210 if key in prefetched:
1173 return prefetched[key]
1211 return prefetched[key]
1174 drevs = callconduit(repo.ui, b'differential.query', params)
1212 drevs = callconduit(repo.ui, b'differential.query', params)
1175 # Fill prefetched with the result
1213 # Fill prefetched with the result
1176 for drev in drevs:
1214 for drev in drevs:
1177 prefetched[drev[b'phid']] = drev
1215 prefetched[drev[b'phid']] = drev
1178 prefetched[int(drev[b'id'])] = drev
1216 prefetched[int(drev[b'id'])] = drev
1179 if key not in prefetched:
1217 if key not in prefetched:
1180 raise error.Abort(
1218 raise error.Abort(
1181 _(b'cannot get Differential Revision %r') % params
1219 _(b'cannot get Differential Revision %r') % params
1182 )
1220 )
1183 return prefetched[key]
1221 return prefetched[key]
1184
1222
1185 def getstack(topdrevids):
1223 def getstack(topdrevids):
1186 """given a top, get a stack from the bottom, [id] -> [id]"""
1224 """given a top, get a stack from the bottom, [id] -> [id]"""
1187 visited = set()
1225 visited = set()
1188 result = []
1226 result = []
1189 queue = [{b'ids': [i]} for i in topdrevids]
1227 queue = [{b'ids': [i]} for i in topdrevids]
1190 while queue:
1228 while queue:
1191 params = queue.pop()
1229 params = queue.pop()
1192 drev = fetch(params)
1230 drev = fetch(params)
1193 if drev[b'id'] in visited:
1231 if drev[b'id'] in visited:
1194 continue
1232 continue
1195 visited.add(drev[b'id'])
1233 visited.add(drev[b'id'])
1196 result.append(int(drev[b'id']))
1234 result.append(int(drev[b'id']))
1197 auxiliary = drev.get(b'auxiliary', {})
1235 auxiliary = drev.get(b'auxiliary', {})
1198 depends = auxiliary.get(b'phabricator:depends-on', [])
1236 depends = auxiliary.get(b'phabricator:depends-on', [])
1199 for phid in depends:
1237 for phid in depends:
1200 queue.append({b'phids': [phid]})
1238 queue.append({b'phids': [phid]})
1201 result.reverse()
1239 result.reverse()
1202 return smartset.baseset(result)
1240 return smartset.baseset(result)
1203
1241
1204 # Initialize prefetch cache
1242 # Initialize prefetch cache
1205 prefetched = {} # {id or phid: drev}
1243 prefetched = {} # {id or phid: drev}
1206
1244
1207 tree = _parse(spec)
1245 tree = _parse(spec)
1208 drevs, ancestordrevs = _prefetchdrevs(tree)
1246 drevs, ancestordrevs = _prefetchdrevs(tree)
1209
1247
1210 # developer config: phabricator.batchsize
1248 # developer config: phabricator.batchsize
1211 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1249 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1212
1250
1213 # Prefetch Differential Revisions in batch
1251 # Prefetch Differential Revisions in batch
1214 tofetch = set(drevs)
1252 tofetch = set(drevs)
1215 for r in ancestordrevs:
1253 for r in ancestordrevs:
1216 tofetch.update(range(max(1, r - batchsize), r + 1))
1254 tofetch.update(range(max(1, r - batchsize), r + 1))
1217 if drevs:
1255 if drevs:
1218 fetch({b'ids': list(tofetch)})
1256 fetch({b'ids': list(tofetch)})
1219 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1257 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1220
1258
1221 # Walk through the tree, return smartsets
1259 # Walk through the tree, return smartsets
1222 def walk(tree):
1260 def walk(tree):
1223 op = tree[0]
1261 op = tree[0]
1224 if op == b'symbol':
1262 if op == b'symbol':
1225 drev = _parsedrev(tree[1])
1263 drev = _parsedrev(tree[1])
1226 if drev:
1264 if drev:
1227 return smartset.baseset([drev])
1265 return smartset.baseset([drev])
1228 elif tree[1] in _knownstatusnames:
1266 elif tree[1] in _knownstatusnames:
1229 drevs = [
1267 drevs = [
1230 r
1268 r
1231 for r in validids
1269 for r in validids
1232 if _getstatusname(prefetched[r]) == tree[1]
1270 if _getstatusname(prefetched[r]) == tree[1]
1233 ]
1271 ]
1234 return smartset.baseset(drevs)
1272 return smartset.baseset(drevs)
1235 else:
1273 else:
1236 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1274 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1237 elif op in {b'and_', b'add', b'sub'}:
1275 elif op in {b'and_', b'add', b'sub'}:
1238 assert len(tree) == 3
1276 assert len(tree) == 3
1239 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1277 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1240 elif op == b'group':
1278 elif op == b'group':
1241 return walk(tree[1])
1279 return walk(tree[1])
1242 elif op == b'ancestors':
1280 elif op == b'ancestors':
1243 return getstack(walk(tree[1]))
1281 return getstack(walk(tree[1]))
1244 else:
1282 else:
1245 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1283 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1246
1284
1247 return [prefetched[r] for r in walk(tree)]
1285 return [prefetched[r] for r in walk(tree)]
1248
1286
1249
1287
1250 def getdescfromdrev(drev):
1288 def getdescfromdrev(drev):
1251 """get description (commit message) from "Differential Revision"
1289 """get description (commit message) from "Differential Revision"
1252
1290
1253 This is similar to differential.getcommitmessage API. But we only care
1291 This is similar to differential.getcommitmessage API. But we only care
1254 about limited fields: title, summary, test plan, and URL.
1292 about limited fields: title, summary, test plan, and URL.
1255 """
1293 """
1256 title = drev[b'title']
1294 title = drev[b'title']
1257 summary = drev[b'summary'].rstrip()
1295 summary = drev[b'summary'].rstrip()
1258 testplan = drev[b'testPlan'].rstrip()
1296 testplan = drev[b'testPlan'].rstrip()
1259 if testplan:
1297 if testplan:
1260 testplan = b'Test Plan:\n%s' % testplan
1298 testplan = b'Test Plan:\n%s' % testplan
1261 uri = b'Differential Revision: %s' % drev[b'uri']
1299 uri = b'Differential Revision: %s' % drev[b'uri']
1262 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1300 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1263
1301
1264
1302
1265 def getdiffmeta(diff):
1303 def getdiffmeta(diff):
1266 """get commit metadata (date, node, user, p1) from a diff object
1304 """get commit metadata (date, node, user, p1) from a diff object
1267
1305
1268 The metadata could be "hg:meta", sent by phabsend, like:
1306 The metadata could be "hg:meta", sent by phabsend, like:
1269
1307
1270 "properties": {
1308 "properties": {
1271 "hg:meta": {
1309 "hg:meta": {
1272 "date": "1499571514 25200",
1310 "date": "1499571514 25200",
1273 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1311 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1274 "user": "Foo Bar <foo@example.com>",
1312 "user": "Foo Bar <foo@example.com>",
1275 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1313 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1276 }
1314 }
1277 }
1315 }
1278
1316
1279 Or converted from "local:commits", sent by "arc", like:
1317 Or converted from "local:commits", sent by "arc", like:
1280
1318
1281 "properties": {
1319 "properties": {
1282 "local:commits": {
1320 "local:commits": {
1283 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1321 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1284 "author": "Foo Bar",
1322 "author": "Foo Bar",
1285 "time": 1499546314,
1323 "time": 1499546314,
1286 "branch": "default",
1324 "branch": "default",
1287 "tag": "",
1325 "tag": "",
1288 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1326 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1289 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1327 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1290 "local": "1000",
1328 "local": "1000",
1291 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1329 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1292 "summary": "...",
1330 "summary": "...",
1293 "message": "...",
1331 "message": "...",
1294 "authorEmail": "foo@example.com"
1332 "authorEmail": "foo@example.com"
1295 }
1333 }
1296 }
1334 }
1297 }
1335 }
1298
1336
1299 Note: metadata extracted from "local:commits" will lose time zone
1337 Note: metadata extracted from "local:commits" will lose time zone
1300 information.
1338 information.
1301 """
1339 """
1302 props = diff.get(b'properties') or {}
1340 props = diff.get(b'properties') or {}
1303 meta = props.get(b'hg:meta')
1341 meta = props.get(b'hg:meta')
1304 if not meta:
1342 if not meta:
1305 if props.get(b'local:commits'):
1343 if props.get(b'local:commits'):
1306 commit = sorted(props[b'local:commits'].values())[0]
1344 commit = sorted(props[b'local:commits'].values())[0]
1307 meta = {}
1345 meta = {}
1308 if b'author' in commit and b'authorEmail' in commit:
1346 if b'author' in commit and b'authorEmail' in commit:
1309 meta[b'user'] = b'%s <%s>' % (
1347 meta[b'user'] = b'%s <%s>' % (
1310 commit[b'author'],
1348 commit[b'author'],
1311 commit[b'authorEmail'],
1349 commit[b'authorEmail'],
1312 )
1350 )
1313 if b'time' in commit:
1351 if b'time' in commit:
1314 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1352 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1315 if b'branch' in commit:
1353 if b'branch' in commit:
1316 meta[b'branch'] = commit[b'branch']
1354 meta[b'branch'] = commit[b'branch']
1317 node = commit.get(b'commit', commit.get(b'rev'))
1355 node = commit.get(b'commit', commit.get(b'rev'))
1318 if node:
1356 if node:
1319 meta[b'node'] = node
1357 meta[b'node'] = node
1320 if len(commit.get(b'parents', ())) >= 1:
1358 if len(commit.get(b'parents', ())) >= 1:
1321 meta[b'parent'] = commit[b'parents'][0]
1359 meta[b'parent'] = commit[b'parents'][0]
1322 else:
1360 else:
1323 meta = {}
1361 meta = {}
1324 if b'date' not in meta and b'dateCreated' in diff:
1362 if b'date' not in meta and b'dateCreated' in diff:
1325 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1363 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1326 if b'branch' not in meta and diff.get(b'branch'):
1364 if b'branch' not in meta and diff.get(b'branch'):
1327 meta[b'branch'] = diff[b'branch']
1365 meta[b'branch'] = diff[b'branch']
1328 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1366 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1329 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1367 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1330 return meta
1368 return meta
1331
1369
1332
1370
1333 def readpatch(repo, drevs, write):
1371 def readpatch(repo, drevs, write):
1334 """generate plain-text patch readable by 'hg import'
1372 """generate plain-text patch readable by 'hg import'
1335
1373
1336 write is usually ui.write. drevs is what "querydrev" returns, results of
1374 write is usually ui.write. drevs is what "querydrev" returns, results of
1337 "differential.query".
1375 "differential.query".
1338 """
1376 """
1339 # Prefetch hg:meta property for all diffs
1377 # Prefetch hg:meta property for all diffs
1340 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1378 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1341 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1379 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1342
1380
1343 # Generate patch for each drev
1381 # Generate patch for each drev
1344 for drev in drevs:
1382 for drev in drevs:
1345 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1383 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1346
1384
1347 diffid = max(int(v) for v in drev[b'diffs'])
1385 diffid = max(int(v) for v in drev[b'diffs'])
1348 body = callconduit(
1386 body = callconduit(
1349 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1387 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1350 )
1388 )
1351 desc = getdescfromdrev(drev)
1389 desc = getdescfromdrev(drev)
1352 header = b'# HG changeset patch\n'
1390 header = b'# HG changeset patch\n'
1353
1391
1354 # Try to preserve metadata from hg:meta property. Write hg patch
1392 # Try to preserve metadata from hg:meta property. Write hg patch
1355 # headers that can be read by the "import" command. See patchheadermap
1393 # headers that can be read by the "import" command. See patchheadermap
1356 # and extract in mercurial/patch.py for supported headers.
1394 # and extract in mercurial/patch.py for supported headers.
1357 meta = getdiffmeta(diffs[b'%d' % diffid])
1395 meta = getdiffmeta(diffs[b'%d' % diffid])
1358 for k in _metanamemap.keys():
1396 for k in _metanamemap.keys():
1359 if k in meta:
1397 if k in meta:
1360 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1398 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1361
1399
1362 content = b'%s%s\n%s' % (header, desc, body)
1400 content = b'%s%s\n%s' % (header, desc, body)
1363 write(content)
1401 write(content)
1364
1402
1365
1403
1366 @vcrcommand(
1404 @vcrcommand(
1367 b'phabread',
1405 b'phabread',
1368 [(b'', b'stack', False, _(b'read dependencies'))],
1406 [(b'', b'stack', False, _(b'read dependencies'))],
1369 _(b'DREVSPEC [OPTIONS]'),
1407 _(b'DREVSPEC [OPTIONS]'),
1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1408 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1371 )
1409 )
1372 def phabread(ui, repo, spec, **opts):
1410 def phabread(ui, repo, spec, **opts):
1373 """print patches from Phabricator suitable for importing
1411 """print patches from Phabricator suitable for importing
1374
1412
1375 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1413 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1376 the number ``123``. It could also have common operators like ``+``, ``-``,
1414 the number ``123``. It could also have common operators like ``+``, ``-``,
1377 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1415 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1378 select a stack.
1416 select a stack.
1379
1417
1380 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1418 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1381 could be used to filter patches by status. For performance reason, they
1419 could be used to filter patches by status. For performance reason, they
1382 only represent a subset of non-status selections and cannot be used alone.
1420 only represent a subset of non-status selections and cannot be used alone.
1383
1421
1384 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1422 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1385 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1423 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1386 stack up to D9.
1424 stack up to D9.
1387
1425
1388 If --stack is given, follow dependencies information and read all patches.
1426 If --stack is given, follow dependencies information and read all patches.
1389 It is equivalent to the ``:`` operator.
1427 It is equivalent to the ``:`` operator.
1390 """
1428 """
1391 opts = pycompat.byteskwargs(opts)
1429 opts = pycompat.byteskwargs(opts)
1392 if opts.get(b'stack'):
1430 if opts.get(b'stack'):
1393 spec = b':(%s)' % spec
1431 spec = b':(%s)' % spec
1394 drevs = querydrev(repo, spec)
1432 drevs = querydrev(repo, spec)
1395 readpatch(repo, drevs, ui.write)
1433 readpatch(repo, drevs, ui.write)
1396
1434
1397
1435
1398 @vcrcommand(
1436 @vcrcommand(
1399 b'phabupdate',
1437 b'phabupdate',
1400 [
1438 [
1401 (b'', b'accept', False, _(b'accept revisions')),
1439 (b'', b'accept', False, _(b'accept revisions')),
1402 (b'', b'reject', False, _(b'reject revisions')),
1440 (b'', b'reject', False, _(b'reject revisions')),
1403 (b'', b'abandon', False, _(b'abandon revisions')),
1441 (b'', b'abandon', False, _(b'abandon revisions')),
1404 (b'', b'reclaim', False, _(b'reclaim revisions')),
1442 (b'', b'reclaim', False, _(b'reclaim revisions')),
1405 (b'm', b'comment', b'', _(b'comment on the last revision')),
1443 (b'm', b'comment', b'', _(b'comment on the last revision')),
1406 ],
1444 ],
1407 _(b'DREVSPEC [OPTIONS]'),
1445 _(b'DREVSPEC [OPTIONS]'),
1408 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1446 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1409 )
1447 )
1410 def phabupdate(ui, repo, spec, **opts):
1448 def phabupdate(ui, repo, spec, **opts):
1411 """update Differential Revision in batch
1449 """update Differential Revision in batch
1412
1450
1413 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1451 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1414 """
1452 """
1415 opts = pycompat.byteskwargs(opts)
1453 opts = pycompat.byteskwargs(opts)
1416 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1454 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1417 if len(flags) > 1:
1455 if len(flags) > 1:
1418 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1456 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1419
1457
1420 actions = []
1458 actions = []
1421 for f in flags:
1459 for f in flags:
1422 actions.append({b'type': f, b'value': b'true'})
1460 actions.append({b'type': f, b'value': b'true'})
1423
1461
1424 drevs = querydrev(repo, spec)
1462 drevs = querydrev(repo, spec)
1425 for i, drev in enumerate(drevs):
1463 for i, drev in enumerate(drevs):
1426 if i + 1 == len(drevs) and opts.get(b'comment'):
1464 if i + 1 == len(drevs) and opts.get(b'comment'):
1427 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1465 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1428 if actions:
1466 if actions:
1429 params = {
1467 params = {
1430 b'objectIdentifier': drev[b'phid'],
1468 b'objectIdentifier': drev[b'phid'],
1431 b'transactions': actions,
1469 b'transactions': actions,
1432 }
1470 }
1433 callconduit(ui, b'differential.revision.edit', params)
1471 callconduit(ui, b'differential.revision.edit', params)
1434
1472
1435
1473
1436 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1474 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1437 def template_review(context, mapping):
1475 def template_review(context, mapping):
1438 """:phabreview: Object describing the review for this changeset.
1476 """:phabreview: Object describing the review for this changeset.
1439 Has attributes `url` and `id`.
1477 Has attributes `url` and `id`.
1440 """
1478 """
1441 ctx = context.resource(mapping, b'ctx')
1479 ctx = context.resource(mapping, b'ctx')
1442 m = _differentialrevisiondescre.search(ctx.description())
1480 m = _differentialrevisiondescre.search(ctx.description())
1443 if m:
1481 if m:
1444 return templateutil.hybriddict(
1482 return templateutil.hybriddict(
1445 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1483 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1446 )
1484 )
1447 else:
1485 else:
1448 tags = ctx.repo().nodetags(ctx.node())
1486 tags = ctx.repo().nodetags(ctx.node())
1449 for t in tags:
1487 for t in tags:
1450 if _differentialrevisiontagre.match(t):
1488 if _differentialrevisiontagre.match(t):
1451 url = ctx.repo().ui.config(b'phabricator', b'url')
1489 url = ctx.repo().ui.config(b'phabricator', b'url')
1452 if not url.endswith(b'/'):
1490 if not url.endswith(b'/'):
1453 url += b'/'
1491 url += b'/'
1454 url += t
1492 url += t
1455
1493
1456 return templateutil.hybriddict({b'url': url, b'id': t,})
1494 return templateutil.hybriddict({b'url': url, b'id': t,})
1457 return None
1495 return None
General Comments 0
You need to be logged in to leave comments. Login now