##// END OF EJS Templates
phabupdate: allow revisions to be commandeered...
Matt Harbison -
r45699:383b8c77 default
parent child Browse files
Show More
@@ -1,2299 +1,2301 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.debug
118 # developer config: phabricator.debug
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'debug', default=False,
120 b'phabricator', b'debug', default=False,
121 )
121 )
122 # developer config: phabricator.repophid
122 # developer config: phabricator.repophid
123 eh.configitem(
123 eh.configitem(
124 b'phabricator', b'repophid', default=None,
124 b'phabricator', b'repophid', default=None,
125 )
125 )
126 eh.configitem(
126 eh.configitem(
127 b'phabricator', b'url', default=None,
127 b'phabricator', b'url', default=None,
128 )
128 )
129 eh.configitem(
129 eh.configitem(
130 b'phabsend', b'confirm', default=False,
130 b'phabsend', b'confirm', default=False,
131 )
131 )
132 eh.configitem(
132 eh.configitem(
133 b'phabimport', b'secret', default=False,
133 b'phabimport', b'secret', default=False,
134 )
134 )
135 eh.configitem(
135 eh.configitem(
136 b'phabimport', b'obsolete', default=False,
136 b'phabimport', b'obsolete', default=False,
137 )
137 )
138
138
139 colortable = {
139 colortable = {
140 b'phabricator.action.created': b'green',
140 b'phabricator.action.created': b'green',
141 b'phabricator.action.skipped': b'magenta',
141 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.updated': b'magenta',
142 b'phabricator.action.updated': b'magenta',
143 b'phabricator.desc': b'',
143 b'phabricator.desc': b'',
144 b'phabricator.drev': b'bold',
144 b'phabricator.drev': b'bold',
145 b'phabricator.node': b'',
145 b'phabricator.node': b'',
146 b'phabricator.status.abandoned': b'magenta dim',
146 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.accepted': b'green bold',
147 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.closed': b'green',
148 b'phabricator.status.closed': b'green',
149 b'phabricator.status.needsreview': b'yellow',
149 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsrevision': b'red',
150 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.changesplanned': b'red',
151 b'phabricator.status.changesplanned': b'red',
152 }
152 }
153
153
154 _VCR_FLAGS = [
154 _VCR_FLAGS = [
155 (
155 (
156 b'',
156 b'',
157 b'test-vcr',
157 b'test-vcr',
158 b'',
158 b'',
159 _(
159 _(
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b', otherwise will mock all http requests using the specified vcr file.'
161 b', otherwise will mock all http requests using the specified vcr file.'
162 b' (ADVANCED)'
162 b' (ADVANCED)'
163 ),
163 ),
164 ),
164 ),
165 ]
165 ]
166
166
167
167
168 @eh.wrapfunction(localrepo, "loadhgrc")
168 @eh.wrapfunction(localrepo, "loadhgrc")
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """
171 """
172 result = False
172 result = False
173 arcconfig = {}
173 arcconfig = {}
174
174
175 try:
175 try:
176 # json.loads only accepts bytes from 3.6+
176 # json.loads only accepts bytes from 3.6+
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 # json.loads only returns unicode strings
178 # json.loads only returns unicode strings
179 arcconfig = pycompat.rapply(
179 arcconfig = pycompat.rapply(
180 lambda x: encoding.unitolocal(x)
180 lambda x: encoding.unitolocal(x)
181 if isinstance(x, pycompat.unicode)
181 if isinstance(x, pycompat.unicode)
182 else x,
182 else x,
183 pycompat.json_loads(rawparams),
183 pycompat.json_loads(rawparams),
184 )
184 )
185
185
186 result = True
186 result = True
187 except ValueError:
187 except ValueError:
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 except IOError:
189 except IOError:
190 pass
190 pass
191
191
192 cfg = util.sortdict()
192 cfg = util.sortdict()
193
193
194 if b"repository.callsign" in arcconfig:
194 if b"repository.callsign" in arcconfig:
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196
196
197 if b"phabricator.uri" in arcconfig:
197 if b"phabricator.uri" in arcconfig:
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199
199
200 if cfg:
200 if cfg:
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202
202
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204
204
205
205
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 fullflags = flags + _VCR_FLAGS
207 fullflags = flags + _VCR_FLAGS
208
208
209 def hgmatcher(r1, r2):
209 def hgmatcher(r1, r2):
210 if r1.uri != r2.uri or r1.method != r2.method:
210 if r1.uri != r2.uri or r1.method != r2.method:
211 return False
211 return False
212 r1params = util.urlreq.parseqs(r1.body)
212 r1params = util.urlreq.parseqs(r1.body)
213 r2params = util.urlreq.parseqs(r2.body)
213 r2params = util.urlreq.parseqs(r2.body)
214 for key in r1params:
214 for key in r1params:
215 if key not in r2params:
215 if key not in r2params:
216 return False
216 return False
217 value = r1params[key][0]
217 value = r1params[key][0]
218 # we want to compare json payloads without worrying about ordering
218 # we want to compare json payloads without worrying about ordering
219 if value.startswith(b'{') and value.endswith(b'}'):
219 if value.startswith(b'{') and value.endswith(b'}'):
220 r1json = pycompat.json_loads(value)
220 r1json = pycompat.json_loads(value)
221 r2json = pycompat.json_loads(r2params[key][0])
221 r2json = pycompat.json_loads(r2params[key][0])
222 if r1json != r2json:
222 if r1json != r2json:
223 return False
223 return False
224 elif r2params[key][0] != value:
224 elif r2params[key][0] != value:
225 return False
225 return False
226 return True
226 return True
227
227
228 def sanitiserequest(request):
228 def sanitiserequest(request):
229 request.body = re.sub(
229 request.body = re.sub(
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 )
231 )
232 return request
232 return request
233
233
234 def sanitiseresponse(response):
234 def sanitiseresponse(response):
235 if 'set-cookie' in response['headers']:
235 if 'set-cookie' in response['headers']:
236 del response['headers']['set-cookie']
236 del response['headers']['set-cookie']
237 return response
237 return response
238
238
239 def decorate(fn):
239 def decorate(fn):
240 def inner(*args, **kwargs):
240 def inner(*args, **kwargs):
241 if kwargs.get('test_vcr'):
241 if kwargs.get('test_vcr'):
242 cassette = pycompat.fsdecode(kwargs.pop('test_vcr'))
242 cassette = pycompat.fsdecode(kwargs.pop('test_vcr'))
243 import hgdemandimport
243 import hgdemandimport
244
244
245 with hgdemandimport.deactivated():
245 with hgdemandimport.deactivated():
246 import vcr as vcrmod
246 import vcr as vcrmod
247 import vcr.stubs as stubs
247 import vcr.stubs as stubs
248
248
249 vcr = vcrmod.VCR(
249 vcr = vcrmod.VCR(
250 serializer='json',
250 serializer='json',
251 before_record_request=sanitiserequest,
251 before_record_request=sanitiserequest,
252 before_record_response=sanitiseresponse,
252 before_record_response=sanitiseresponse,
253 custom_patches=[
253 custom_patches=[
254 (
254 (
255 urlmod,
255 urlmod,
256 'httpconnection',
256 'httpconnection',
257 stubs.VCRHTTPConnection,
257 stubs.VCRHTTPConnection,
258 ),
258 ),
259 (
259 (
260 urlmod,
260 urlmod,
261 'httpsconnection',
261 'httpsconnection',
262 stubs.VCRHTTPSConnection,
262 stubs.VCRHTTPSConnection,
263 ),
263 ),
264 ],
264 ],
265 )
265 )
266 vcr.register_matcher('hgmatcher', hgmatcher)
266 vcr.register_matcher('hgmatcher', hgmatcher)
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
268 return fn(*args, **kwargs)
268 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
270
270
271 cmd = util.checksignature(inner, depth=2)
271 cmd = util.checksignature(inner, depth=2)
272 cmd.__name__ = fn.__name__
272 cmd.__name__ = fn.__name__
273 cmd.__doc__ = fn.__doc__
273 cmd.__doc__ = fn.__doc__
274
274
275 return command(
275 return command(
276 name,
276 name,
277 fullflags,
277 fullflags,
278 spec,
278 spec,
279 helpcategory=helpcategory,
279 helpcategory=helpcategory,
280 optionalrepo=optionalrepo,
280 optionalrepo=optionalrepo,
281 )(cmd)
281 )(cmd)
282
282
283 return decorate
283 return decorate
284
284
285
285
286 def _debug(ui, *msg, **opts):
286 def _debug(ui, *msg, **opts):
287 """write debug output for Phabricator if ``phabricator.debug`` is set
287 """write debug output for Phabricator if ``phabricator.debug`` is set
288
288
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 printed with the --debug argument.
290 printed with the --debug argument.
291 """
291 """
292 if ui.configbool(b"phabricator", b"debug"):
292 if ui.configbool(b"phabricator", b"debug"):
293 flag = ui.debugflag
293 flag = ui.debugflag
294 try:
294 try:
295 ui.debugflag = True
295 ui.debugflag = True
296 ui.write(*msg, **opts)
296 ui.write(*msg, **opts)
297 finally:
297 finally:
298 ui.debugflag = flag
298 ui.debugflag = flag
299
299
300
300
301 def urlencodenested(params):
301 def urlencodenested(params):
302 """like urlencode, but works with nested parameters.
302 """like urlencode, but works with nested parameters.
303
303
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
307 """
307 """
308 flatparams = util.sortdict()
308 flatparams = util.sortdict()
309
309
310 def process(prefix, obj):
310 def process(prefix, obj):
311 if isinstance(obj, bool):
311 if isinstance(obj, bool):
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
315 if items is None:
315 if items is None:
316 flatparams[prefix] = obj
316 flatparams[prefix] = obj
317 else:
317 else:
318 for k, v in items(obj):
318 for k, v in items(obj):
319 if prefix:
319 if prefix:
320 process(b'%s[%s]' % (prefix, k), v)
320 process(b'%s[%s]' % (prefix, k), v)
321 else:
321 else:
322 process(k, v)
322 process(k, v)
323
323
324 process(b'', params)
324 process(b'', params)
325 return util.urlreq.urlencode(flatparams)
325 return util.urlreq.urlencode(flatparams)
326
326
327
327
328 def readurltoken(ui):
328 def readurltoken(ui):
329 """return conduit url, token and make sure they exist
329 """return conduit url, token and make sure they exist
330
330
331 Currently read from [auth] config section. In the future, it might
331 Currently read from [auth] config section. In the future, it might
332 make sense to read from .arcconfig and .arcrc as well.
332 make sense to read from .arcconfig and .arcrc as well.
333 """
333 """
334 url = ui.config(b'phabricator', b'url')
334 url = ui.config(b'phabricator', b'url')
335 if not url:
335 if not url:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
338 )
338 )
339
339
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
341 token = None
341 token = None
342
342
343 if res:
343 if res:
344 group, auth = res
344 group, auth = res
345
345
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
347
347
348 token = auth.get(b'phabtoken')
348 token = auth.get(b'phabtoken')
349
349
350 if not token:
350 if not token:
351 raise error.Abort(
351 raise error.Abort(
352 _(b'Can\'t find conduit token associated to %s') % (url,)
352 _(b'Can\'t find conduit token associated to %s') % (url,)
353 )
353 )
354
354
355 return url, token
355 return url, token
356
356
357
357
358 def callconduit(ui, name, params):
358 def callconduit(ui, name, params):
359 """call Conduit API, params is a dict. return json.loads result, or None"""
359 """call Conduit API, params is a dict. return json.loads result, or None"""
360 host, token = readurltoken(ui)
360 host, token = readurltoken(ui)
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
363 params = params.copy()
363 params = params.copy()
364 params[b'__conduit__'] = {
364 params[b'__conduit__'] = {
365 b'token': token,
365 b'token': token,
366 }
366 }
367 rawdata = {
367 rawdata = {
368 b'params': templatefilters.json(params),
368 b'params': templatefilters.json(params),
369 b'output': b'json',
369 b'output': b'json',
370 b'__conduit__': 1,
370 b'__conduit__': 1,
371 }
371 }
372 data = urlencodenested(rawdata)
372 data = urlencodenested(rawdata)
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
374 if curlcmd:
374 if curlcmd:
375 sin, sout = procutil.popen2(
375 sin, sout = procutil.popen2(
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
377 )
377 )
378 sin.write(data)
378 sin.write(data)
379 sin.close()
379 sin.close()
380 body = sout.read()
380 body = sout.read()
381 else:
381 else:
382 urlopener = urlmod.opener(ui, authinfo)
382 urlopener = urlmod.opener(ui, authinfo)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
384 with contextlib.closing(urlopener.open(request)) as rsp:
384 with contextlib.closing(urlopener.open(request)) as rsp:
385 body = rsp.read()
385 body = rsp.read()
386 ui.debug(b'Conduit Response: %s\n' % body)
386 ui.debug(b'Conduit Response: %s\n' % body)
387 parsed = pycompat.rapply(
387 parsed = pycompat.rapply(
388 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
389 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
390 else x,
390 else x,
391 # json.loads only accepts bytes from py3.6+
391 # json.loads only accepts bytes from py3.6+
392 pycompat.json_loads(encoding.unifromlocal(body)),
392 pycompat.json_loads(encoding.unifromlocal(body)),
393 )
393 )
394 if parsed.get(b'error_code'):
394 if parsed.get(b'error_code'):
395 msg = _(b'Conduit Error (%s): %s') % (
395 msg = _(b'Conduit Error (%s): %s') % (
396 parsed[b'error_code'],
396 parsed[b'error_code'],
397 parsed[b'error_info'],
397 parsed[b'error_info'],
398 )
398 )
399 raise error.Abort(msg)
399 raise error.Abort(msg)
400 return parsed[b'result']
400 return parsed[b'result']
401
401
402
402
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
404 def debugcallconduit(ui, repo, name):
404 def debugcallconduit(ui, repo, name):
405 """call Conduit API
405 """call Conduit API
406
406
407 Call parameters are read from stdin as a JSON blob. Result will be written
407 Call parameters are read from stdin as a JSON blob. Result will be written
408 to stdout as a JSON blob.
408 to stdout as a JSON blob.
409 """
409 """
410 # json.loads only accepts bytes from 3.6+
410 # json.loads only accepts bytes from 3.6+
411 rawparams = encoding.unifromlocal(ui.fin.read())
411 rawparams = encoding.unifromlocal(ui.fin.read())
412 # json.loads only returns unicode strings
412 # json.loads only returns unicode strings
413 params = pycompat.rapply(
413 params = pycompat.rapply(
414 lambda x: encoding.unitolocal(x)
414 lambda x: encoding.unitolocal(x)
415 if isinstance(x, pycompat.unicode)
415 if isinstance(x, pycompat.unicode)
416 else x,
416 else x,
417 pycompat.json_loads(rawparams),
417 pycompat.json_loads(rawparams),
418 )
418 )
419 # json.dumps only accepts unicode strings
419 # json.dumps only accepts unicode strings
420 result = pycompat.rapply(
420 result = pycompat.rapply(
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
422 callconduit(ui, name, params),
422 callconduit(ui, name, params),
423 )
423 )
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
426
426
427
427
428 def getrepophid(repo):
428 def getrepophid(repo):
429 """given callsign, return repository PHID or None"""
429 """given callsign, return repository PHID or None"""
430 # developer config: phabricator.repophid
430 # developer config: phabricator.repophid
431 repophid = repo.ui.config(b'phabricator', b'repophid')
431 repophid = repo.ui.config(b'phabricator', b'repophid')
432 if repophid:
432 if repophid:
433 return repophid
433 return repophid
434 callsign = repo.ui.config(b'phabricator', b'callsign')
434 callsign = repo.ui.config(b'phabricator', b'callsign')
435 if not callsign:
435 if not callsign:
436 return None
436 return None
437 query = callconduit(
437 query = callconduit(
438 repo.ui,
438 repo.ui,
439 b'diffusion.repository.search',
439 b'diffusion.repository.search',
440 {b'constraints': {b'callsigns': [callsign]}},
440 {b'constraints': {b'callsigns': [callsign]}},
441 )
441 )
442 if len(query[b'data']) == 0:
442 if len(query[b'data']) == 0:
443 return None
443 return None
444 repophid = query[b'data'][0][b'phid']
444 repophid = query[b'data'][0][b'phid']
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
446 return repophid
446 return repophid
447
447
448
448
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
450 _differentialrevisiondescre = re.compile(
450 _differentialrevisiondescre = re.compile(
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
452 )
452 )
453
453
454
454
455 def getoldnodedrevmap(repo, nodelist):
455 def getoldnodedrevmap(repo, nodelist):
456 """find previous nodes that has been sent to Phabricator
456 """find previous nodes that has been sent to Phabricator
457
457
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
459 for node in nodelist with known previous sent versions, or associated
459 for node in nodelist with known previous sent versions, or associated
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
461 be ``None``.
461 be ``None``.
462
462
463 Examines commit messages like "Differential Revision:" to get the
463 Examines commit messages like "Differential Revision:" to get the
464 association information.
464 association information.
465
465
466 If such commit message line is not found, examines all precursors and their
466 If such commit message line is not found, examines all precursors and their
467 tags. Tags with format like "D1234" are considered a match and the node
467 tags. Tags with format like "D1234" are considered a match and the node
468 with that tag, and the number after "D" (ex. 1234) will be returned.
468 with that tag, and the number after "D" (ex. 1234) will be returned.
469
469
470 The ``old node``, if not None, is guaranteed to be the last diff of
470 The ``old node``, if not None, is guaranteed to be the last diff of
471 corresponding Differential Revision, and exist in the repo.
471 corresponding Differential Revision, and exist in the repo.
472 """
472 """
473 unfi = repo.unfiltered()
473 unfi = repo.unfiltered()
474 has_node = unfi.changelog.index.has_node
474 has_node = unfi.changelog.index.has_node
475
475
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
477 # ordered for test stability when printing new -> old mapping below
477 # ordered for test stability when printing new -> old mapping below
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
479 for node in nodelist:
479 for node in nodelist:
480 ctx = unfi[node]
480 ctx = unfi[node]
481 # For tags like "D123", put them into "toconfirm" to verify later
481 # For tags like "D123", put them into "toconfirm" to verify later
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
483 for n in precnodes:
483 for n in precnodes:
484 if has_node(n):
484 if has_node(n):
485 for tag in unfi.nodetags(n):
485 for tag in unfi.nodetags(n):
486 m = _differentialrevisiontagre.match(tag)
486 m = _differentialrevisiontagre.match(tag)
487 if m:
487 if m:
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
489 break
489 break
490 else:
490 else:
491 continue # move to next predecessor
491 continue # move to next predecessor
492 break # found a tag, stop
492 break # found a tag, stop
493 else:
493 else:
494 # Check commit message
494 # Check commit message
495 m = _differentialrevisiondescre.search(ctx.description())
495 m = _differentialrevisiondescre.search(ctx.description())
496 if m:
496 if m:
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
498
498
499 # Double check if tags are genuine by collecting all old nodes from
499 # Double check if tags are genuine by collecting all old nodes from
500 # Phabricator, and expect precursors overlap with it.
500 # Phabricator, and expect precursors overlap with it.
501 if toconfirm:
501 if toconfirm:
502 drevs = [drev for force, precs, drev in toconfirm.values()]
502 drevs = [drev for force, precs, drev in toconfirm.values()]
503 alldiffs = callconduit(
503 alldiffs = callconduit(
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
505 )
505 )
506
506
507 def getnodes(d, precset):
507 def getnodes(d, precset):
508 # Ignore other nodes that were combined into the Differential
508 # Ignore other nodes that were combined into the Differential
509 # that aren't predecessors of the current local node.
509 # that aren't predecessors of the current local node.
510 return [n for n in getlocalcommits(d) if n in precset]
510 return [n for n in getlocalcommits(d) if n in precset]
511
511
512 for newnode, (force, precset, drev) in toconfirm.items():
512 for newnode, (force, precset, drev) in toconfirm.items():
513 diffs = [
513 diffs = [
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
515 ]
515 ]
516
516
517 # local predecessors known by Phabricator
517 # local predecessors known by Phabricator
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
519
519
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
521 # and force is not set (when commit message says nothing)
521 # and force is not set (when commit message says nothing)
522 if not force and not phprecset:
522 if not force and not phprecset:
523 tagname = b'D%d' % drev
523 tagname = b'D%d' % drev
524 tags.tag(
524 tags.tag(
525 repo,
525 repo,
526 tagname,
526 tagname,
527 nullid,
527 nullid,
528 message=None,
528 message=None,
529 user=None,
529 user=None,
530 date=None,
530 date=None,
531 local=True,
531 local=True,
532 )
532 )
533 unfi.ui.warn(
533 unfi.ui.warn(
534 _(
534 _(
535 b'D%d: local tag removed - does not match '
535 b'D%d: local tag removed - does not match '
536 b'Differential history\n'
536 b'Differential history\n'
537 )
537 )
538 % drev
538 % drev
539 )
539 )
540 continue
540 continue
541
541
542 # Find the last node using Phabricator metadata, and make sure it
542 # Find the last node using Phabricator metadata, and make sure it
543 # exists in the repo
543 # exists in the repo
544 oldnode = lastdiff = None
544 oldnode = lastdiff = None
545 if diffs:
545 if diffs:
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
547 oldnodes = getnodes(lastdiff, precset)
547 oldnodes = getnodes(lastdiff, precset)
548
548
549 _debug(
549 _debug(
550 unfi.ui,
550 unfi.ui,
551 b"%s mapped to old nodes %s\n"
551 b"%s mapped to old nodes %s\n"
552 % (
552 % (
553 short(newnode),
553 short(newnode),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 ),
555 ),
556 )
556 )
557
557
558 # If this commit was the result of `hg fold` after submission,
558 # If this commit was the result of `hg fold` after submission,
559 # and now resubmitted with --fold, the easiest thing to do is
559 # and now resubmitted with --fold, the easiest thing to do is
560 # to leave the node clear. This only results in creating a new
560 # to leave the node clear. This only results in creating a new
561 # diff for the _same_ Differential Revision if this commit is
561 # diff for the _same_ Differential Revision if this commit is
562 # the first or last in the selected range. If we picked a node
562 # the first or last in the selected range. If we picked a node
563 # from the list instead, it would have to be the lowest if at
563 # from the list instead, it would have to be the lowest if at
564 # the beginning of the --fold range, or the highest at the end.
564 # the beginning of the --fold range, or the highest at the end.
565 # Otherwise, one or more of the nodes wouldn't be considered in
565 # Otherwise, one or more of the nodes wouldn't be considered in
566 # the diff, and the Differential wouldn't be properly updated.
566 # the diff, and the Differential wouldn't be properly updated.
567 # If this commit is the result of `hg split` in the same
567 # If this commit is the result of `hg split` in the same
568 # scenario, there is a single oldnode here (and multiple
568 # scenario, there is a single oldnode here (and multiple
569 # newnodes mapped to it). That makes it the same as the normal
569 # newnodes mapped to it). That makes it the same as the normal
570 # case, as the edges of the newnode range cleanly maps to one
570 # case, as the edges of the newnode range cleanly maps to one
571 # oldnode each.
571 # oldnode each.
572 if len(oldnodes) == 1:
572 if len(oldnodes) == 1:
573 oldnode = oldnodes[0]
573 oldnode = oldnodes[0]
574 if oldnode and not has_node(oldnode):
574 if oldnode and not has_node(oldnode):
575 oldnode = None
575 oldnode = None
576
576
577 result[newnode] = (oldnode, lastdiff, drev)
577 result[newnode] = (oldnode, lastdiff, drev)
578
578
579 return result
579 return result
580
580
581
581
582 def getdrevmap(repo, revs):
582 def getdrevmap(repo, revs):
583 """Return a dict mapping each rev in `revs` to their Differential Revision
583 """Return a dict mapping each rev in `revs` to their Differential Revision
584 ID or None.
584 ID or None.
585 """
585 """
586 result = {}
586 result = {}
587 for rev in revs:
587 for rev in revs:
588 result[rev] = None
588 result[rev] = None
589 ctx = repo[rev]
589 ctx = repo[rev]
590 # Check commit message
590 # Check commit message
591 m = _differentialrevisiondescre.search(ctx.description())
591 m = _differentialrevisiondescre.search(ctx.description())
592 if m:
592 if m:
593 result[rev] = int(m.group('id'))
593 result[rev] = int(m.group('id'))
594 continue
594 continue
595 # Check tags
595 # Check tags
596 for tag in repo.nodetags(ctx.node()):
596 for tag in repo.nodetags(ctx.node()):
597 m = _differentialrevisiontagre.match(tag)
597 m = _differentialrevisiontagre.match(tag)
598 if m:
598 if m:
599 result[rev] = int(m.group(1))
599 result[rev] = int(m.group(1))
600 break
600 break
601
601
602 return result
602 return result
603
603
604
604
605 def getdiff(basectx, ctx, diffopts):
605 def getdiff(basectx, ctx, diffopts):
606 """plain-text diff without header (user, commit message, etc)"""
606 """plain-text diff without header (user, commit message, etc)"""
607 output = util.stringio()
607 output = util.stringio()
608 for chunk, _label in patch.diffui(
608 for chunk, _label in patch.diffui(
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
610 ):
610 ):
611 output.write(chunk)
611 output.write(chunk)
612 return output.getvalue()
612 return output.getvalue()
613
613
614
614
615 class DiffChangeType(object):
615 class DiffChangeType(object):
616 ADD = 1
616 ADD = 1
617 CHANGE = 2
617 CHANGE = 2
618 DELETE = 3
618 DELETE = 3
619 MOVE_AWAY = 4
619 MOVE_AWAY = 4
620 COPY_AWAY = 5
620 COPY_AWAY = 5
621 MOVE_HERE = 6
621 MOVE_HERE = 6
622 COPY_HERE = 7
622 COPY_HERE = 7
623 MULTICOPY = 8
623 MULTICOPY = 8
624
624
625
625
626 class DiffFileType(object):
626 class DiffFileType(object):
627 TEXT = 1
627 TEXT = 1
628 IMAGE = 2
628 IMAGE = 2
629 BINARY = 3
629 BINARY = 3
630
630
631
631
632 @attr.s
632 @attr.s
633 class phabhunk(dict):
633 class phabhunk(dict):
634 """Represents a Differential hunk, which is owned by a Differential change
634 """Represents a Differential hunk, which is owned by a Differential change
635 """
635 """
636
636
637 oldOffset = attr.ib(default=0) # camelcase-required
637 oldOffset = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
641 corpus = attr.ib(default='')
641 corpus = attr.ib(default='')
642 # These get added to the phabchange's equivalents
642 # These get added to the phabchange's equivalents
643 addLines = attr.ib(default=0) # camelcase-required
643 addLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
645
645
646
646
647 @attr.s
647 @attr.s
648 class phabchange(object):
648 class phabchange(object):
649 """Represents a Differential change, owns Differential hunks and owned by a
649 """Represents a Differential change, owns Differential hunks and owned by a
650 Differential diff. Each one represents one file in a diff.
650 Differential diff. Each one represents one file in a diff.
651 """
651 """
652
652
653 currentPath = attr.ib(default=None) # camelcase-required
653 currentPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
656 metadata = attr.ib(default=attr.Factory(dict))
656 metadata = attr.ib(default=attr.Factory(dict))
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 type = attr.ib(default=DiffChangeType.CHANGE)
659 type = attr.ib(default=DiffChangeType.CHANGE)
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
664 hunks = attr.ib(default=attr.Factory(list))
664 hunks = attr.ib(default=attr.Factory(list))
665
665
666 def copynewmetadatatoold(self):
666 def copynewmetadatatoold(self):
667 for key in list(self.metadata.keys()):
667 for key in list(self.metadata.keys()):
668 newkey = key.replace(b'new:', b'old:')
668 newkey = key.replace(b'new:', b'old:')
669 self.metadata[newkey] = self.metadata[key]
669 self.metadata[newkey] = self.metadata[key]
670
670
671 def addoldmode(self, value):
671 def addoldmode(self, value):
672 self.oldProperties[b'unix:filemode'] = value
672 self.oldProperties[b'unix:filemode'] = value
673
673
674 def addnewmode(self, value):
674 def addnewmode(self, value):
675 self.newProperties[b'unix:filemode'] = value
675 self.newProperties[b'unix:filemode'] = value
676
676
677 def addhunk(self, hunk):
677 def addhunk(self, hunk):
678 if not isinstance(hunk, phabhunk):
678 if not isinstance(hunk, phabhunk):
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
681 # It's useful to include these stats since the Phab web UI shows them,
681 # It's useful to include these stats since the Phab web UI shows them,
682 # and uses them to estimate how large a change a Revision is. Also used
682 # and uses them to estimate how large a change a Revision is. Also used
683 # in email subjects for the [+++--] bit.
683 # in email subjects for the [+++--] bit.
684 self.addLines += hunk.addLines
684 self.addLines += hunk.addLines
685 self.delLines += hunk.delLines
685 self.delLines += hunk.delLines
686
686
687
687
688 @attr.s
688 @attr.s
689 class phabdiff(object):
689 class phabdiff(object):
690 """Represents a Differential diff, owns Differential changes. Corresponds
690 """Represents a Differential diff, owns Differential changes. Corresponds
691 to a commit.
691 to a commit.
692 """
692 """
693
693
694 # Doesn't seem to be any reason to send this (output of uname -n)
694 # Doesn't seem to be any reason to send this (output of uname -n)
695 sourceMachine = attr.ib(default=b'') # camelcase-required
695 sourceMachine = attr.ib(default=b'') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
700 branch = attr.ib(default=b'default')
700 branch = attr.ib(default=b'default')
701 bookmark = attr.ib(default=None)
701 bookmark = attr.ib(default=None)
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
705 changes = attr.ib(default=attr.Factory(dict))
705 changes = attr.ib(default=attr.Factory(dict))
706 repositoryPHID = attr.ib(default=None) # camelcase-required
706 repositoryPHID = attr.ib(default=None) # camelcase-required
707
707
708 def addchange(self, change):
708 def addchange(self, change):
709 if not isinstance(change, phabchange):
709 if not isinstance(change, phabchange):
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
711 self.changes[change.currentPath] = pycompat.byteskwargs(
711 self.changes[change.currentPath] = pycompat.byteskwargs(
712 attr.asdict(change)
712 attr.asdict(change)
713 )
713 )
714
714
715
715
716 def maketext(pchange, basectx, ctx, fname):
716 def maketext(pchange, basectx, ctx, fname):
717 """populate the phabchange for a text file"""
717 """populate the phabchange for a text file"""
718 repo = ctx.repo()
718 repo = ctx.repo()
719 fmatcher = match.exact([fname])
719 fmatcher = match.exact([fname])
720 diffopts = mdiff.diffopts(git=True, context=32767)
720 diffopts = mdiff.diffopts(git=True, context=32767)
721 _pfctx, _fctx, header, fhunks = next(
721 _pfctx, _fctx, header, fhunks = next(
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
723 )
723 )
724
724
725 for fhunk in fhunks:
725 for fhunk in fhunks:
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
727 corpus = b''.join(lines[1:])
727 corpus = b''.join(lines[1:])
728 shunk = list(header)
728 shunk = list(header)
729 shunk.extend(lines)
729 shunk.extend(lines)
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
731 patch.diffstatdata(util.iterlines(shunk))
731 patch.diffstatdata(util.iterlines(shunk))
732 )
732 )
733 pchange.addhunk(
733 pchange.addhunk(
734 phabhunk(
734 phabhunk(
735 oldOffset,
735 oldOffset,
736 oldLength,
736 oldLength,
737 newOffset,
737 newOffset,
738 newLength,
738 newLength,
739 corpus,
739 corpus,
740 addLines,
740 addLines,
741 delLines,
741 delLines,
742 )
742 )
743 )
743 )
744
744
745
745
746 def uploadchunks(fctx, fphid):
746 def uploadchunks(fctx, fphid):
747 """upload large binary files as separate chunks.
747 """upload large binary files as separate chunks.
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
749 """
749 """
750 ui = fctx.repo().ui
750 ui = fctx.repo().ui
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
752 with ui.makeprogress(
752 with ui.makeprogress(
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
754 ) as progress:
754 ) as progress:
755 for chunk in chunks:
755 for chunk in chunks:
756 progress.increment()
756 progress.increment()
757 if chunk[b'complete']:
757 if chunk[b'complete']:
758 continue
758 continue
759 bstart = int(chunk[b'byteStart'])
759 bstart = int(chunk[b'byteStart'])
760 bend = int(chunk[b'byteEnd'])
760 bend = int(chunk[b'byteEnd'])
761 callconduit(
761 callconduit(
762 ui,
762 ui,
763 b'file.uploadchunk',
763 b'file.uploadchunk',
764 {
764 {
765 b'filePHID': fphid,
765 b'filePHID': fphid,
766 b'byteStart': bstart,
766 b'byteStart': bstart,
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
768 b'dataEncoding': b'base64',
768 b'dataEncoding': b'base64',
769 },
769 },
770 )
770 )
771
771
772
772
773 def uploadfile(fctx):
773 def uploadfile(fctx):
774 """upload binary files to Phabricator"""
774 """upload binary files to Phabricator"""
775 repo = fctx.repo()
775 repo = fctx.repo()
776 ui = repo.ui
776 ui = repo.ui
777 fname = fctx.path()
777 fname = fctx.path()
778 size = fctx.size()
778 size = fctx.size()
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
780
780
781 # an allocate call is required first to see if an upload is even required
781 # an allocate call is required first to see if an upload is even required
782 # (Phab might already have it) and to determine if chunking is needed
782 # (Phab might already have it) and to determine if chunking is needed
783 allocateparams = {
783 allocateparams = {
784 b'name': fname,
784 b'name': fname,
785 b'contentLength': size,
785 b'contentLength': size,
786 b'contentHash': fhash,
786 b'contentHash': fhash,
787 }
787 }
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
789 fphid = filealloc[b'filePHID']
789 fphid = filealloc[b'filePHID']
790
790
791 if filealloc[b'upload']:
791 if filealloc[b'upload']:
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
793 if not fphid:
793 if not fphid:
794 uploadparams = {
794 uploadparams = {
795 b'name': fname,
795 b'name': fname,
796 b'data_base64': base64.b64encode(fctx.data()),
796 b'data_base64': base64.b64encode(fctx.data()),
797 }
797 }
798 fphid = callconduit(ui, b'file.upload', uploadparams)
798 fphid = callconduit(ui, b'file.upload', uploadparams)
799 else:
799 else:
800 uploadchunks(fctx, fphid)
800 uploadchunks(fctx, fphid)
801 else:
801 else:
802 ui.debug(b'server already has %s\n' % bytes(fctx))
802 ui.debug(b'server already has %s\n' % bytes(fctx))
803
803
804 if not fphid:
804 if not fphid:
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
806
806
807 return fphid
807 return fphid
808
808
809
809
810 def addoldbinary(pchange, oldfctx, fctx):
810 def addoldbinary(pchange, oldfctx, fctx):
811 """add the metadata for the previous version of a binary file to the
811 """add the metadata for the previous version of a binary file to the
812 phabchange for the new version
812 phabchange for the new version
813
813
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
815 version of the file, or None if the file is being removed.
815 version of the file, or None if the file is being removed.
816 """
816 """
817 if not fctx or fctx.cmp(oldfctx):
817 if not fctx or fctx.cmp(oldfctx):
818 # Files differ, add the old one
818 # Files differ, add the old one
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
820 mimeguess, _enc = mimetypes.guess_type(
820 mimeguess, _enc = mimetypes.guess_type(
821 encoding.unifromlocal(oldfctx.path())
821 encoding.unifromlocal(oldfctx.path())
822 )
822 )
823 if mimeguess:
823 if mimeguess:
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
825 mimeguess
825 mimeguess
826 )
826 )
827 fphid = uploadfile(oldfctx)
827 fphid = uploadfile(oldfctx)
828 pchange.metadata[b'old:binary-phid'] = fphid
828 pchange.metadata[b'old:binary-phid'] = fphid
829 else:
829 else:
830 # If it's left as IMAGE/BINARY web UI might try to display it
830 # If it's left as IMAGE/BINARY web UI might try to display it
831 pchange.fileType = DiffFileType.TEXT
831 pchange.fileType = DiffFileType.TEXT
832 pchange.copynewmetadatatoold()
832 pchange.copynewmetadatatoold()
833
833
834
834
835 def makebinary(pchange, fctx):
835 def makebinary(pchange, fctx):
836 """populate the phabchange for a binary file"""
836 """populate the phabchange for a binary file"""
837 pchange.fileType = DiffFileType.BINARY
837 pchange.fileType = DiffFileType.BINARY
838 fphid = uploadfile(fctx)
838 fphid = uploadfile(fctx)
839 pchange.metadata[b'new:binary-phid'] = fphid
839 pchange.metadata[b'new:binary-phid'] = fphid
840 pchange.metadata[b'new:file:size'] = fctx.size()
840 pchange.metadata[b'new:file:size'] = fctx.size()
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
842 if mimeguess:
842 if mimeguess:
843 mimeguess = pycompat.bytestr(mimeguess)
843 mimeguess = pycompat.bytestr(mimeguess)
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
845 if mimeguess.startswith(b'image/'):
845 if mimeguess.startswith(b'image/'):
846 pchange.fileType = DiffFileType.IMAGE
846 pchange.fileType = DiffFileType.IMAGE
847
847
848
848
849 # Copied from mercurial/patch.py
849 # Copied from mercurial/patch.py
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
851
851
852
852
853 def notutf8(fctx):
853 def notutf8(fctx):
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
855 as binary
855 as binary
856 """
856 """
857 try:
857 try:
858 fctx.data().decode('utf-8')
858 fctx.data().decode('utf-8')
859 return False
859 return False
860 except UnicodeDecodeError:
860 except UnicodeDecodeError:
861 fctx.repo().ui.write(
861 fctx.repo().ui.write(
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
863 % fctx.path()
863 % fctx.path()
864 )
864 )
865 return True
865 return True
866
866
867
867
868 def addremoved(pdiff, basectx, ctx, removed):
868 def addremoved(pdiff, basectx, ctx, removed):
869 """add removed files to the phabdiff. Shouldn't include moves"""
869 """add removed files to the phabdiff. Shouldn't include moves"""
870 for fname in removed:
870 for fname in removed:
871 pchange = phabchange(
871 pchange = phabchange(
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
873 )
873 )
874 oldfctx = basectx.p1()[fname]
874 oldfctx = basectx.p1()[fname]
875 pchange.addoldmode(gitmode[oldfctx.flags()])
875 pchange.addoldmode(gitmode[oldfctx.flags()])
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
877 maketext(pchange, basectx, ctx, fname)
877 maketext(pchange, basectx, ctx, fname)
878
878
879 pdiff.addchange(pchange)
879 pdiff.addchange(pchange)
880
880
881
881
882 def addmodified(pdiff, basectx, ctx, modified):
882 def addmodified(pdiff, basectx, ctx, modified):
883 """add modified files to the phabdiff"""
883 """add modified files to the phabdiff"""
884 for fname in modified:
884 for fname in modified:
885 fctx = ctx[fname]
885 fctx = ctx[fname]
886 oldfctx = basectx.p1()[fname]
886 oldfctx = basectx.p1()[fname]
887 pchange = phabchange(currentPath=fname, oldPath=fname)
887 pchange = phabchange(currentPath=fname, oldPath=fname)
888 filemode = gitmode[fctx.flags()]
888 filemode = gitmode[fctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
890 if filemode != originalmode:
890 if filemode != originalmode:
891 pchange.addoldmode(originalmode)
891 pchange.addoldmode(originalmode)
892 pchange.addnewmode(filemode)
892 pchange.addnewmode(filemode)
893
893
894 if (
894 if (
895 fctx.isbinary()
895 fctx.isbinary()
896 or notutf8(fctx)
896 or notutf8(fctx)
897 or oldfctx.isbinary()
897 or oldfctx.isbinary()
898 or notutf8(oldfctx)
898 or notutf8(oldfctx)
899 ):
899 ):
900 makebinary(pchange, fctx)
900 makebinary(pchange, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
902 else:
902 else:
903 maketext(pchange, basectx, ctx, fname)
903 maketext(pchange, basectx, ctx, fname)
904
904
905 pdiff.addchange(pchange)
905 pdiff.addchange(pchange)
906
906
907
907
908 def addadded(pdiff, basectx, ctx, added, removed):
908 def addadded(pdiff, basectx, ctx, added, removed):
909 """add file adds to the phabdiff, both new files and copies/moves"""
909 """add file adds to the phabdiff, both new files and copies/moves"""
910 # Keep track of files that've been recorded as moved/copied, so if there are
910 # Keep track of files that've been recorded as moved/copied, so if there are
911 # additional copies we can mark them (moves get removed from removed)
911 # additional copies we can mark them (moves get removed from removed)
912 copiedchanges = {}
912 copiedchanges = {}
913 movedchanges = {}
913 movedchanges = {}
914
914
915 copy = {}
915 copy = {}
916 if basectx != ctx:
916 if basectx != ctx:
917 copy = copies.pathcopies(basectx.p1(), ctx)
917 copy = copies.pathcopies(basectx.p1(), ctx)
918
918
919 for fname in added:
919 for fname in added:
920 fctx = ctx[fname]
920 fctx = ctx[fname]
921 oldfctx = None
921 oldfctx = None
922 pchange = phabchange(currentPath=fname)
922 pchange = phabchange(currentPath=fname)
923
923
924 filemode = gitmode[fctx.flags()]
924 filemode = gitmode[fctx.flags()]
925
925
926 if copy:
926 if copy:
927 originalfname = copy.get(fname, fname)
927 originalfname = copy.get(fname, fname)
928 else:
928 else:
929 originalfname = fname
929 originalfname = fname
930 if fctx.renamed():
930 if fctx.renamed():
931 originalfname = fctx.renamed()[0]
931 originalfname = fctx.renamed()[0]
932
932
933 renamed = fname != originalfname
933 renamed = fname != originalfname
934
934
935 if renamed:
935 if renamed:
936 oldfctx = basectx.p1()[originalfname]
936 oldfctx = basectx.p1()[originalfname]
937 originalmode = gitmode[oldfctx.flags()]
937 originalmode = gitmode[oldfctx.flags()]
938 pchange.oldPath = originalfname
938 pchange.oldPath = originalfname
939
939
940 if originalfname in removed:
940 if originalfname in removed:
941 origpchange = phabchange(
941 origpchange = phabchange(
942 currentPath=originalfname,
942 currentPath=originalfname,
943 oldPath=originalfname,
943 oldPath=originalfname,
944 type=DiffChangeType.MOVE_AWAY,
944 type=DiffChangeType.MOVE_AWAY,
945 awayPaths=[fname],
945 awayPaths=[fname],
946 )
946 )
947 movedchanges[originalfname] = origpchange
947 movedchanges[originalfname] = origpchange
948 removed.remove(originalfname)
948 removed.remove(originalfname)
949 pchange.type = DiffChangeType.MOVE_HERE
949 pchange.type = DiffChangeType.MOVE_HERE
950 elif originalfname in movedchanges:
950 elif originalfname in movedchanges:
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
952 movedchanges[originalfname].awayPaths.append(fname)
952 movedchanges[originalfname].awayPaths.append(fname)
953 pchange.type = DiffChangeType.COPY_HERE
953 pchange.type = DiffChangeType.COPY_HERE
954 else: # pure copy
954 else: # pure copy
955 if originalfname not in copiedchanges:
955 if originalfname not in copiedchanges:
956 origpchange = phabchange(
956 origpchange = phabchange(
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
958 )
958 )
959 copiedchanges[originalfname] = origpchange
959 copiedchanges[originalfname] = origpchange
960 else:
960 else:
961 origpchange = copiedchanges[originalfname]
961 origpchange = copiedchanges[originalfname]
962 origpchange.awayPaths.append(fname)
962 origpchange.awayPaths.append(fname)
963 pchange.type = DiffChangeType.COPY_HERE
963 pchange.type = DiffChangeType.COPY_HERE
964
964
965 if filemode != originalmode:
965 if filemode != originalmode:
966 pchange.addoldmode(originalmode)
966 pchange.addoldmode(originalmode)
967 pchange.addnewmode(filemode)
967 pchange.addnewmode(filemode)
968 else: # Brand-new file
968 else: # Brand-new file
969 pchange.addnewmode(gitmode[fctx.flags()])
969 pchange.addnewmode(gitmode[fctx.flags()])
970 pchange.type = DiffChangeType.ADD
970 pchange.type = DiffChangeType.ADD
971
971
972 if (
972 if (
973 fctx.isbinary()
973 fctx.isbinary()
974 or notutf8(fctx)
974 or notutf8(fctx)
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
976 ):
976 ):
977 makebinary(pchange, fctx)
977 makebinary(pchange, fctx)
978 if renamed:
978 if renamed:
979 addoldbinary(pchange, oldfctx, fctx)
979 addoldbinary(pchange, oldfctx, fctx)
980 else:
980 else:
981 maketext(pchange, basectx, ctx, fname)
981 maketext(pchange, basectx, ctx, fname)
982
982
983 pdiff.addchange(pchange)
983 pdiff.addchange(pchange)
984
984
985 for _path, copiedchange in copiedchanges.items():
985 for _path, copiedchange in copiedchanges.items():
986 pdiff.addchange(copiedchange)
986 pdiff.addchange(copiedchange)
987 for _path, movedchange in movedchanges.items():
987 for _path, movedchange in movedchanges.items():
988 pdiff.addchange(movedchange)
988 pdiff.addchange(movedchange)
989
989
990
990
991 def creatediff(basectx, ctx):
991 def creatediff(basectx, ctx):
992 """create a Differential Diff"""
992 """create a Differential Diff"""
993 repo = ctx.repo()
993 repo = ctx.repo()
994 repophid = getrepophid(repo)
994 repophid = getrepophid(repo)
995 # Create a "Differential Diff" via "differential.creatediff" API
995 # Create a "Differential Diff" via "differential.creatediff" API
996 pdiff = phabdiff(
996 pdiff = phabdiff(
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
998 branch=b'%s' % ctx.branch(),
998 branch=b'%s' % ctx.branch(),
999 )
999 )
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1001 # addadded will remove moved files from removed, so addremoved won't get
1001 # addadded will remove moved files from removed, so addremoved won't get
1002 # them
1002 # them
1003 addadded(pdiff, basectx, ctx, added, removed)
1003 addadded(pdiff, basectx, ctx, added, removed)
1004 addmodified(pdiff, basectx, ctx, modified)
1004 addmodified(pdiff, basectx, ctx, modified)
1005 addremoved(pdiff, basectx, ctx, removed)
1005 addremoved(pdiff, basectx, ctx, removed)
1006 if repophid:
1006 if repophid:
1007 pdiff.repositoryPHID = repophid
1007 pdiff.repositoryPHID = repophid
1008 diff = callconduit(
1008 diff = callconduit(
1009 repo.ui,
1009 repo.ui,
1010 b'differential.creatediff',
1010 b'differential.creatediff',
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1012 )
1012 )
1013 if not diff:
1013 if not diff:
1014 if basectx != ctx:
1014 if basectx != ctx:
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1016 else:
1016 else:
1017 msg = _(b'cannot create diff for %s') % ctx
1017 msg = _(b'cannot create diff for %s') % ctx
1018 raise error.Abort(msg)
1018 raise error.Abort(msg)
1019 return diff
1019 return diff
1020
1020
1021
1021
1022 def writediffproperties(ctxs, diff):
1022 def writediffproperties(ctxs, diff):
1023 """write metadata to diff so patches could be applied losslessly
1023 """write metadata to diff so patches could be applied losslessly
1024
1024
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1026 The list is generally a single commit, but may be several when using
1026 The list is generally a single commit, but may be several when using
1027 ``phabsend --fold``.
1027 ``phabsend --fold``.
1028 """
1028 """
1029 # creatediff returns with a diffid but query returns with an id
1029 # creatediff returns with a diffid but query returns with an id
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1031 basectx = ctxs[0]
1031 basectx = ctxs[0]
1032 tipctx = ctxs[-1]
1032 tipctx = ctxs[-1]
1033
1033
1034 params = {
1034 params = {
1035 b'diff_id': diffid,
1035 b'diff_id': diffid,
1036 b'name': b'hg:meta',
1036 b'name': b'hg:meta',
1037 b'data': templatefilters.json(
1037 b'data': templatefilters.json(
1038 {
1038 {
1039 b'user': tipctx.user(),
1039 b'user': tipctx.user(),
1040 b'date': b'%d %d' % tipctx.date(),
1040 b'date': b'%d %d' % tipctx.date(),
1041 b'branch': tipctx.branch(),
1041 b'branch': tipctx.branch(),
1042 b'node': tipctx.hex(),
1042 b'node': tipctx.hex(),
1043 b'parent': basectx.p1().hex(),
1043 b'parent': basectx.p1().hex(),
1044 }
1044 }
1045 ),
1045 ),
1046 }
1046 }
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1048
1048
1049 commits = {}
1049 commits = {}
1050 for ctx in ctxs:
1050 for ctx in ctxs:
1051 commits[ctx.hex()] = {
1051 commits[ctx.hex()] = {
1052 b'author': stringutil.person(ctx.user()),
1052 b'author': stringutil.person(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1054 b'time': int(ctx.date()[0]),
1054 b'time': int(ctx.date()[0]),
1055 b'commit': ctx.hex(),
1055 b'commit': ctx.hex(),
1056 b'parents': [ctx.p1().hex()],
1056 b'parents': [ctx.p1().hex()],
1057 b'branch': ctx.branch(),
1057 b'branch': ctx.branch(),
1058 }
1058 }
1059 params = {
1059 params = {
1060 b'diff_id': diffid,
1060 b'diff_id': diffid,
1061 b'name': b'local:commits',
1061 b'name': b'local:commits',
1062 b'data': templatefilters.json(commits),
1062 b'data': templatefilters.json(commits),
1063 }
1063 }
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1065
1065
1066
1066
1067 def createdifferentialrevision(
1067 def createdifferentialrevision(
1068 ctxs,
1068 ctxs,
1069 revid=None,
1069 revid=None,
1070 parentrevphid=None,
1070 parentrevphid=None,
1071 oldbasenode=None,
1071 oldbasenode=None,
1072 oldnode=None,
1072 oldnode=None,
1073 olddiff=None,
1073 olddiff=None,
1074 actions=None,
1074 actions=None,
1075 comment=None,
1075 comment=None,
1076 ):
1076 ):
1077 """create or update a Differential Revision
1077 """create or update a Differential Revision
1078
1078
1079 If revid is None, create a new Differential Revision, otherwise update
1079 If revid is None, create a new Differential Revision, otherwise update
1080 revid. If parentrevphid is not None, set it as a dependency.
1080 revid. If parentrevphid is not None, set it as a dependency.
1081
1081
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1083 be a list of that single context. Otherwise, it is a list that covers the
1083 be a list of that single context. Otherwise, it is a list that covers the
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1085 to include and ``ctxs[-1]`` is the last.
1085 to include and ``ctxs[-1]`` is the last.
1086
1086
1087 If oldnode is not None, check if the patch content (without commit message
1087 If oldnode is not None, check if the patch content (without commit message
1088 and metadata) has changed before creating another diff. For a Revision with
1088 and metadata) has changed before creating another diff. For a Revision with
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1092 corresponds to ``ctxs[-1]``.
1092 corresponds to ``ctxs[-1]``.
1093
1093
1094 If actions is not None, they will be appended to the transaction.
1094 If actions is not None, they will be appended to the transaction.
1095 """
1095 """
1096 ctx = ctxs[-1]
1096 ctx = ctxs[-1]
1097 basectx = ctxs[0]
1097 basectx = ctxs[0]
1098
1098
1099 repo = ctx.repo()
1099 repo = ctx.repo()
1100 if oldnode:
1100 if oldnode:
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1102 unfi = repo.unfiltered()
1102 unfi = repo.unfiltered()
1103 oldctx = unfi[oldnode]
1103 oldctx = unfi[oldnode]
1104 oldbasectx = unfi[oldbasenode]
1104 oldbasectx = unfi[oldbasenode]
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1106 oldbasectx, oldctx, diffopts
1106 oldbasectx, oldctx, diffopts
1107 )
1107 )
1108 else:
1108 else:
1109 neednewdiff = True
1109 neednewdiff = True
1110
1110
1111 transactions = []
1111 transactions = []
1112 if neednewdiff:
1112 if neednewdiff:
1113 diff = creatediff(basectx, ctx)
1113 diff = creatediff(basectx, ctx)
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1115 if comment:
1115 if comment:
1116 transactions.append({b'type': b'comment', b'value': comment})
1116 transactions.append({b'type': b'comment', b'value': comment})
1117 else:
1117 else:
1118 # Even if we don't need to upload a new diff because the patch content
1118 # Even if we don't need to upload a new diff because the patch content
1119 # does not change. We might still need to update its metadata so
1119 # does not change. We might still need to update its metadata so
1120 # pushers could know the correct node metadata.
1120 # pushers could know the correct node metadata.
1121 assert olddiff
1121 assert olddiff
1122 diff = olddiff
1122 diff = olddiff
1123 writediffproperties(ctxs, diff)
1123 writediffproperties(ctxs, diff)
1124
1124
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1126 if parentrevphid:
1126 if parentrevphid:
1127 transactions.append(
1127 transactions.append(
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1129 )
1129 )
1130
1130
1131 if actions:
1131 if actions:
1132 transactions += actions
1132 transactions += actions
1133
1133
1134 # When folding multiple local commits into a single review, arcanist will
1134 # When folding multiple local commits into a single review, arcanist will
1135 # take the summary line of the first commit as the title, and then
1135 # take the summary line of the first commit as the title, and then
1136 # concatenate the rest of the remaining messages (including each of their
1136 # concatenate the rest of the remaining messages (including each of their
1137 # first lines) to the rest of the first commit message (each separated by
1137 # first lines) to the rest of the first commit message (each separated by
1138 # an empty line), and use that as the summary field. Do the same here.
1138 # an empty line), and use that as the summary field. Do the same here.
1139 # For commits with only a one line message, there is no summary field, as
1139 # For commits with only a one line message, there is no summary field, as
1140 # this gets assigned to the title.
1140 # this gets assigned to the title.
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1142
1142
1143 for i, _ctx in enumerate(ctxs):
1143 for i, _ctx in enumerate(ctxs):
1144 # Parse commit message and update related fields.
1144 # Parse commit message and update related fields.
1145 desc = _ctx.description()
1145 desc = _ctx.description()
1146 info = callconduit(
1146 info = callconduit(
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1148 )
1148 )
1149
1149
1150 for k in [b'title', b'summary', b'testPlan']:
1150 for k in [b'title', b'summary', b'testPlan']:
1151 v = info[b'fields'].get(k)
1151 v = info[b'fields'].get(k)
1152 if not v:
1152 if not v:
1153 continue
1153 continue
1154
1154
1155 if i == 0:
1155 if i == 0:
1156 # Title, summary and test plan (if present) are taken verbatim
1156 # Title, summary and test plan (if present) are taken verbatim
1157 # for the first commit.
1157 # for the first commit.
1158 fields[k] = v.rstrip()
1158 fields[k] = v.rstrip()
1159 continue
1159 continue
1160 elif k == b'title':
1160 elif k == b'title':
1161 # Add subsequent titles (i.e. the first line of the commit
1161 # Add subsequent titles (i.e. the first line of the commit
1162 # message) back to the summary.
1162 # message) back to the summary.
1163 k = b'summary'
1163 k = b'summary'
1164
1164
1165 # Append any current field to the existing composite field
1165 # Append any current field to the existing composite field
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1167
1167
1168 for k, v in fields.items():
1168 for k, v in fields.items():
1169 transactions.append({b'type': k, b'value': v})
1169 transactions.append({b'type': k, b'value': v})
1170
1170
1171 params = {b'transactions': transactions}
1171 params = {b'transactions': transactions}
1172 if revid is not None:
1172 if revid is not None:
1173 # Update an existing Differential Revision
1173 # Update an existing Differential Revision
1174 params[b'objectIdentifier'] = revid
1174 params[b'objectIdentifier'] = revid
1175
1175
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1177 if not revision:
1177 if not revision:
1178 if len(ctxs) == 1:
1178 if len(ctxs) == 1:
1179 msg = _(b'cannot create revision for %s') % ctx
1179 msg = _(b'cannot create revision for %s') % ctx
1180 else:
1180 else:
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1182 raise error.Abort(msg)
1182 raise error.Abort(msg)
1183
1183
1184 return revision, diff
1184 return revision, diff
1185
1185
1186
1186
1187 def userphids(ui, names):
1187 def userphids(ui, names):
1188 """convert user names to PHIDs"""
1188 """convert user names to PHIDs"""
1189 names = [name.lower() for name in names]
1189 names = [name.lower() for name in names]
1190 query = {b'constraints': {b'usernames': names}}
1190 query = {b'constraints': {b'usernames': names}}
1191 result = callconduit(ui, b'user.search', query)
1191 result = callconduit(ui, b'user.search', query)
1192 # username not found is not an error of the API. So check if we have missed
1192 # username not found is not an error of the API. So check if we have missed
1193 # some names here.
1193 # some names here.
1194 data = result[b'data']
1194 data = result[b'data']
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1196 unresolved = set(names) - resolved
1196 unresolved = set(names) - resolved
1197 if unresolved:
1197 if unresolved:
1198 raise error.Abort(
1198 raise error.Abort(
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1200 )
1200 )
1201 return [entry[b'phid'] for entry in data]
1201 return [entry[b'phid'] for entry in data]
1202
1202
1203
1203
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1206
1206
1207 This is a utility function for the sending phase of ``phabsend``, which
1207 This is a utility function for the sending phase of ``phabsend``, which
1208 makes it easier to show a status for all local commits with `--fold``.
1208 makes it easier to show a status for all local commits with `--fold``.
1209 """
1209 """
1210 actiondesc = ui.label(
1210 actiondesc = ui.label(
1211 {
1211 {
1212 b'created': _(b'created'),
1212 b'created': _(b'created'),
1213 b'skipped': _(b'skipped'),
1213 b'skipped': _(b'skipped'),
1214 b'updated': _(b'updated'),
1214 b'updated': _(b'updated'),
1215 }[action],
1215 }[action],
1216 b'phabricator.action.%s' % action,
1216 b'phabricator.action.%s' % action,
1217 )
1217 )
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1222
1222
1223
1223
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1226
1226
1227 This is a utility function for the amend phase of ``phabsend``, which
1227 This is a utility function for the amend phase of ``phabsend``, which
1228 converts failures to warning messages.
1228 converts failures to warning messages.
1229 """
1229 """
1230 _debug(
1230 _debug(
1231 unfi.ui,
1231 unfi.ui,
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1233 )
1233 )
1234
1234
1235 try:
1235 try:
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1237 except util.urlerr.urlerror:
1237 except util.urlerr.urlerror:
1238 # If it fails just warn and keep going, otherwise the DREV
1238 # If it fails just warn and keep going, otherwise the DREV
1239 # associations will be lost
1239 # associations will be lost
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1241
1241
1242
1242
1243 @vcrcommand(
1243 @vcrcommand(
1244 b'phabsend',
1244 b'phabsend',
1245 [
1245 [
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1250 (
1250 (
1251 b'm',
1251 b'm',
1252 b'comment',
1252 b'comment',
1253 b'',
1253 b'',
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1255 ),
1255 ),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1258 ],
1258 ],
1259 _(b'REV [OPTIONS]'),
1259 _(b'REV [OPTIONS]'),
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1261 )
1261 )
1262 def phabsend(ui, repo, *revs, **opts):
1262 def phabsend(ui, repo, *revs, **opts):
1263 """upload changesets to Phabricator
1263 """upload changesets to Phabricator
1264
1264
1265 If there are multiple revisions specified, they will be send as a stack
1265 If there are multiple revisions specified, they will be send as a stack
1266 with a linear dependencies relationship using the order specified by the
1266 with a linear dependencies relationship using the order specified by the
1267 revset.
1267 revset.
1268
1268
1269 For the first time uploading changesets, local tags will be created to
1269 For the first time uploading changesets, local tags will be created to
1270 maintain the association. After the first time, phabsend will check
1270 maintain the association. After the first time, phabsend will check
1271 obsstore and tags information so it can figure out whether to update an
1271 obsstore and tags information so it can figure out whether to update an
1272 existing Differential Revision, or create a new one.
1272 existing Differential Revision, or create a new one.
1273
1273
1274 If --amend is set, update commit messages so they have the
1274 If --amend is set, update commit messages so they have the
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1277 use local tags to record the ``Differential Revision`` association.
1277 use local tags to record the ``Differential Revision`` association.
1278
1278
1279 The --confirm option lets you confirm changesets before sending them. You
1279 The --confirm option lets you confirm changesets before sending them. You
1280 can also add following to your configuration file to make it default
1280 can also add following to your configuration file to make it default
1281 behaviour::
1281 behaviour::
1282
1282
1283 [phabsend]
1283 [phabsend]
1284 confirm = true
1284 confirm = true
1285
1285
1286 By default, a separate review will be created for each commit that is
1286 By default, a separate review will be created for each commit that is
1287 selected, and will have the same parent/child relationship in Phabricator.
1287 selected, and will have the same parent/child relationship in Phabricator.
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1289 as if diffed from the parent of the first revision to the last. The commit
1289 as if diffed from the parent of the first revision to the last. The commit
1290 messages are concatenated in the summary field on Phabricator.
1290 messages are concatenated in the summary field on Phabricator.
1291
1291
1292 phabsend will check obsstore and the above association to decide whether to
1292 phabsend will check obsstore and the above association to decide whether to
1293 update an existing Differential Revision, or create a new one.
1293 update an existing Differential Revision, or create a new one.
1294 """
1294 """
1295 opts = pycompat.byteskwargs(opts)
1295 opts = pycompat.byteskwargs(opts)
1296 revs = list(revs) + opts.get(b'rev', [])
1296 revs = list(revs) + opts.get(b'rev', [])
1297 revs = scmutil.revrange(repo, revs)
1297 revs = scmutil.revrange(repo, revs)
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1299
1299
1300 if not revs:
1300 if not revs:
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1302 if opts.get(b'amend'):
1302 if opts.get(b'amend'):
1303 cmdutil.checkunfinished(repo)
1303 cmdutil.checkunfinished(repo)
1304
1304
1305 ctxs = [repo[rev] for rev in revs]
1305 ctxs = [repo[rev] for rev in revs]
1306
1306
1307 if any(c for c in ctxs if c.obsolete()):
1307 if any(c for c in ctxs if c.obsolete()):
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1309
1309
1310 # Ensure the local commits are an unbroken range. The semantics of the
1310 # Ensure the local commits are an unbroken range. The semantics of the
1311 # --fold option implies this, and the auto restacking of orphans requires
1311 # --fold option implies this, and the auto restacking of orphans requires
1312 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1312 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1313 # get A' as a parent.
1313 # get A' as a parent.
1314 def _fail_nonlinear_revs(revs, skiprev, revtype):
1314 def _fail_nonlinear_revs(revs, skiprev, revtype):
1315 badnodes = [repo[r].node() for r in revs if r != skiprev]
1315 badnodes = [repo[r].node() for r in revs if r != skiprev]
1316 raise error.Abort(
1316 raise error.Abort(
1317 _(b"cannot phabsend multiple %s revisions: %s")
1317 _(b"cannot phabsend multiple %s revisions: %s")
1318 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1318 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1319 hint=_(b"the revisions must form a linear chain"),
1319 hint=_(b"the revisions must form a linear chain"),
1320 )
1320 )
1321
1321
1322 heads = repo.revs(b'heads(%ld)', revs)
1322 heads = repo.revs(b'heads(%ld)', revs)
1323 if len(heads) > 1:
1323 if len(heads) > 1:
1324 _fail_nonlinear_revs(heads, heads.max(), b"head")
1324 _fail_nonlinear_revs(heads, heads.max(), b"head")
1325
1325
1326 roots = repo.revs(b'roots(%ld)', revs)
1326 roots = repo.revs(b'roots(%ld)', revs)
1327 if len(roots) > 1:
1327 if len(roots) > 1:
1328 _fail_nonlinear_revs(roots, roots.min(), b"root")
1328 _fail_nonlinear_revs(roots, roots.min(), b"root")
1329
1329
1330 fold = opts.get(b'fold')
1330 fold = opts.get(b'fold')
1331 if fold:
1331 if fold:
1332 if len(revs) == 1:
1332 if len(revs) == 1:
1333 # TODO: just switch to --no-fold instead?
1333 # TODO: just switch to --no-fold instead?
1334 raise error.Abort(_(b"cannot fold a single revision"))
1334 raise error.Abort(_(b"cannot fold a single revision"))
1335
1335
1336 # There's no clear way to manage multiple commits with a Dxxx tag, so
1336 # There's no clear way to manage multiple commits with a Dxxx tag, so
1337 # require the amend option. (We could append "_nnn", but then it
1337 # require the amend option. (We could append "_nnn", but then it
1338 # becomes jumbled if earlier commits are added to an update.) It should
1338 # becomes jumbled if earlier commits are added to an update.) It should
1339 # lock the repo and ensure that the range is editable, but that would
1339 # lock the repo and ensure that the range is editable, but that would
1340 # make the code pretty convoluted. The default behavior of `arc` is to
1340 # make the code pretty convoluted. The default behavior of `arc` is to
1341 # create a new review anyway.
1341 # create a new review anyway.
1342 if not opts.get(b"amend"):
1342 if not opts.get(b"amend"):
1343 raise error.Abort(_(b"cannot fold with --no-amend"))
1343 raise error.Abort(_(b"cannot fold with --no-amend"))
1344
1344
1345 # It might be possible to bucketize the revisions by the DREV value, and
1345 # It might be possible to bucketize the revisions by the DREV value, and
1346 # iterate over those groups when posting, and then again when amending.
1346 # iterate over those groups when posting, and then again when amending.
1347 # But for simplicity, require all selected revisions to be for the same
1347 # But for simplicity, require all selected revisions to be for the same
1348 # DREV (if present). Adding local revisions to an existing DREV is
1348 # DREV (if present). Adding local revisions to an existing DREV is
1349 # acceptable.
1349 # acceptable.
1350 drevmatchers = [
1350 drevmatchers = [
1351 _differentialrevisiondescre.search(ctx.description())
1351 _differentialrevisiondescre.search(ctx.description())
1352 for ctx in ctxs
1352 for ctx in ctxs
1353 ]
1353 ]
1354 if len({m.group('url') for m in drevmatchers if m}) > 1:
1354 if len({m.group('url') for m in drevmatchers if m}) > 1:
1355 raise error.Abort(
1355 raise error.Abort(
1356 _(b"cannot fold revisions with different DREV values")
1356 _(b"cannot fold revisions with different DREV values")
1357 )
1357 )
1358
1358
1359 # {newnode: (oldnode, olddiff, olddrev}
1359 # {newnode: (oldnode, olddiff, olddrev}
1360 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1360 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1361
1361
1362 confirm = ui.configbool(b'phabsend', b'confirm')
1362 confirm = ui.configbool(b'phabsend', b'confirm')
1363 confirm |= bool(opts.get(b'confirm'))
1363 confirm |= bool(opts.get(b'confirm'))
1364 if confirm:
1364 if confirm:
1365 confirmed = _confirmbeforesend(repo, revs, oldmap)
1365 confirmed = _confirmbeforesend(repo, revs, oldmap)
1366 if not confirmed:
1366 if not confirmed:
1367 raise error.Abort(_(b'phabsend cancelled'))
1367 raise error.Abort(_(b'phabsend cancelled'))
1368
1368
1369 actions = []
1369 actions = []
1370 reviewers = opts.get(b'reviewer', [])
1370 reviewers = opts.get(b'reviewer', [])
1371 blockers = opts.get(b'blocker', [])
1371 blockers = opts.get(b'blocker', [])
1372 phids = []
1372 phids = []
1373 if reviewers:
1373 if reviewers:
1374 phids.extend(userphids(repo.ui, reviewers))
1374 phids.extend(userphids(repo.ui, reviewers))
1375 if blockers:
1375 if blockers:
1376 phids.extend(
1376 phids.extend(
1377 map(
1377 map(
1378 lambda phid: b'blocking(%s)' % phid,
1378 lambda phid: b'blocking(%s)' % phid,
1379 userphids(repo.ui, blockers),
1379 userphids(repo.ui, blockers),
1380 )
1380 )
1381 )
1381 )
1382 if phids:
1382 if phids:
1383 actions.append({b'type': b'reviewers.add', b'value': phids})
1383 actions.append({b'type': b'reviewers.add', b'value': phids})
1384
1384
1385 drevids = [] # [int]
1385 drevids = [] # [int]
1386 diffmap = {} # {newnode: diff}
1386 diffmap = {} # {newnode: diff}
1387
1387
1388 # Send patches one by one so we know their Differential Revision PHIDs and
1388 # Send patches one by one so we know their Differential Revision PHIDs and
1389 # can provide dependency relationship
1389 # can provide dependency relationship
1390 lastrevphid = None
1390 lastrevphid = None
1391 for ctx in ctxs:
1391 for ctx in ctxs:
1392 if fold:
1392 if fold:
1393 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1393 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1394 else:
1394 else:
1395 ui.debug(b'sending rev %d\n' % ctx.rev())
1395 ui.debug(b'sending rev %d\n' % ctx.rev())
1396
1396
1397 # Get Differential Revision ID
1397 # Get Differential Revision ID
1398 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1398 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1399 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1399 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1400
1400
1401 if fold:
1401 if fold:
1402 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1402 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1403 ctxs[-1].node(), (None, None, None)
1403 ctxs[-1].node(), (None, None, None)
1404 )
1404 )
1405
1405
1406 if oldnode != ctx.node() or opts.get(b'amend'):
1406 if oldnode != ctx.node() or opts.get(b'amend'):
1407 # Create or update Differential Revision
1407 # Create or update Differential Revision
1408 revision, diff = createdifferentialrevision(
1408 revision, diff = createdifferentialrevision(
1409 ctxs if fold else [ctx],
1409 ctxs if fold else [ctx],
1410 revid,
1410 revid,
1411 lastrevphid,
1411 lastrevphid,
1412 oldbasenode,
1412 oldbasenode,
1413 oldnode,
1413 oldnode,
1414 olddiff,
1414 olddiff,
1415 actions,
1415 actions,
1416 opts.get(b'comment'),
1416 opts.get(b'comment'),
1417 )
1417 )
1418
1418
1419 if fold:
1419 if fold:
1420 for ctx in ctxs:
1420 for ctx in ctxs:
1421 diffmap[ctx.node()] = diff
1421 diffmap[ctx.node()] = diff
1422 else:
1422 else:
1423 diffmap[ctx.node()] = diff
1423 diffmap[ctx.node()] = diff
1424
1424
1425 newrevid = int(revision[b'object'][b'id'])
1425 newrevid = int(revision[b'object'][b'id'])
1426 newrevphid = revision[b'object'][b'phid']
1426 newrevphid = revision[b'object'][b'phid']
1427 if revid:
1427 if revid:
1428 action = b'updated'
1428 action = b'updated'
1429 else:
1429 else:
1430 action = b'created'
1430 action = b'created'
1431
1431
1432 # Create a local tag to note the association, if commit message
1432 # Create a local tag to note the association, if commit message
1433 # does not have it already
1433 # does not have it already
1434 if not fold:
1434 if not fold:
1435 m = _differentialrevisiondescre.search(ctx.description())
1435 m = _differentialrevisiondescre.search(ctx.description())
1436 if not m or int(m.group('id')) != newrevid:
1436 if not m or int(m.group('id')) != newrevid:
1437 tagname = b'D%d' % newrevid
1437 tagname = b'D%d' % newrevid
1438 tags.tag(
1438 tags.tag(
1439 repo,
1439 repo,
1440 tagname,
1440 tagname,
1441 ctx.node(),
1441 ctx.node(),
1442 message=None,
1442 message=None,
1443 user=None,
1443 user=None,
1444 date=None,
1444 date=None,
1445 local=True,
1445 local=True,
1446 )
1446 )
1447 else:
1447 else:
1448 # Nothing changed. But still set "newrevphid" so the next revision
1448 # Nothing changed. But still set "newrevphid" so the next revision
1449 # could depend on this one and "newrevid" for the summary line.
1449 # could depend on this one and "newrevid" for the summary line.
1450 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1450 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1451 newrevid = revid
1451 newrevid = revid
1452 action = b'skipped'
1452 action = b'skipped'
1453
1453
1454 drevids.append(newrevid)
1454 drevids.append(newrevid)
1455 lastrevphid = newrevphid
1455 lastrevphid = newrevphid
1456
1456
1457 if fold:
1457 if fold:
1458 for c in ctxs:
1458 for c in ctxs:
1459 if oldmap.get(c.node(), (None, None, None))[2]:
1459 if oldmap.get(c.node(), (None, None, None))[2]:
1460 action = b'updated'
1460 action = b'updated'
1461 else:
1461 else:
1462 action = b'created'
1462 action = b'created'
1463 _print_phabsend_action(ui, c, newrevid, action)
1463 _print_phabsend_action(ui, c, newrevid, action)
1464 break
1464 break
1465
1465
1466 _print_phabsend_action(ui, ctx, newrevid, action)
1466 _print_phabsend_action(ui, ctx, newrevid, action)
1467
1467
1468 # Update commit messages and remove tags
1468 # Update commit messages and remove tags
1469 if opts.get(b'amend'):
1469 if opts.get(b'amend'):
1470 unfi = repo.unfiltered()
1470 unfi = repo.unfiltered()
1471 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1471 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1472 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1472 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1473 # Eagerly evaluate commits to restabilize before creating new
1473 # Eagerly evaluate commits to restabilize before creating new
1474 # commits. The selected revisions are excluded because they are
1474 # commits. The selected revisions are excluded because they are
1475 # automatically restacked as part of the submission process.
1475 # automatically restacked as part of the submission process.
1476 restack = [
1476 restack = [
1477 c
1477 c
1478 for c in repo.set(
1478 for c in repo.set(
1479 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1479 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1480 revs,
1480 revs,
1481 revs,
1481 revs,
1482 )
1482 )
1483 ]
1483 ]
1484 wnode = unfi[b'.'].node()
1484 wnode = unfi[b'.'].node()
1485 mapping = {} # {oldnode: [newnode]}
1485 mapping = {} # {oldnode: [newnode]}
1486 newnodes = []
1486 newnodes = []
1487
1487
1488 drevid = drevids[0]
1488 drevid = drevids[0]
1489
1489
1490 for i, rev in enumerate(revs):
1490 for i, rev in enumerate(revs):
1491 old = unfi[rev]
1491 old = unfi[rev]
1492 if not fold:
1492 if not fold:
1493 drevid = drevids[i]
1493 drevid = drevids[i]
1494 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1494 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1495
1495
1496 newdesc = get_amended_desc(drev, old, fold)
1496 newdesc = get_amended_desc(drev, old, fold)
1497 # Make sure commit message contain "Differential Revision"
1497 # Make sure commit message contain "Differential Revision"
1498 if (
1498 if (
1499 old.description() != newdesc
1499 old.description() != newdesc
1500 or old.p1().node() in mapping
1500 or old.p1().node() in mapping
1501 or old.p2().node() in mapping
1501 or old.p2().node() in mapping
1502 ):
1502 ):
1503 if old.phase() == phases.public:
1503 if old.phase() == phases.public:
1504 ui.warn(
1504 ui.warn(
1505 _(b"warning: not updating public commit %s\n")
1505 _(b"warning: not updating public commit %s\n")
1506 % scmutil.formatchangeid(old)
1506 % scmutil.formatchangeid(old)
1507 )
1507 )
1508 continue
1508 continue
1509 parents = [
1509 parents = [
1510 mapping.get(old.p1().node(), (old.p1(),))[0],
1510 mapping.get(old.p1().node(), (old.p1(),))[0],
1511 mapping.get(old.p2().node(), (old.p2(),))[0],
1511 mapping.get(old.p2().node(), (old.p2(),))[0],
1512 ]
1512 ]
1513 new = context.metadataonlyctx(
1513 new = context.metadataonlyctx(
1514 repo,
1514 repo,
1515 old,
1515 old,
1516 parents=parents,
1516 parents=parents,
1517 text=newdesc,
1517 text=newdesc,
1518 user=old.user(),
1518 user=old.user(),
1519 date=old.date(),
1519 date=old.date(),
1520 extra=old.extra(),
1520 extra=old.extra(),
1521 )
1521 )
1522
1522
1523 newnode = new.commit()
1523 newnode = new.commit()
1524
1524
1525 mapping[old.node()] = [newnode]
1525 mapping[old.node()] = [newnode]
1526
1526
1527 if fold:
1527 if fold:
1528 # Defer updating the (single) Diff until all nodes are
1528 # Defer updating the (single) Diff until all nodes are
1529 # collected. No tags were created, so none need to be
1529 # collected. No tags were created, so none need to be
1530 # removed.
1530 # removed.
1531 newnodes.append(newnode)
1531 newnodes.append(newnode)
1532 continue
1532 continue
1533
1533
1534 _amend_diff_properties(
1534 _amend_diff_properties(
1535 unfi, drevid, [newnode], diffmap[old.node()]
1535 unfi, drevid, [newnode], diffmap[old.node()]
1536 )
1536 )
1537
1537
1538 # Remove local tags since it's no longer necessary
1538 # Remove local tags since it's no longer necessary
1539 tagname = b'D%d' % drevid
1539 tagname = b'D%d' % drevid
1540 if tagname in repo.tags():
1540 if tagname in repo.tags():
1541 tags.tag(
1541 tags.tag(
1542 repo,
1542 repo,
1543 tagname,
1543 tagname,
1544 nullid,
1544 nullid,
1545 message=None,
1545 message=None,
1546 user=None,
1546 user=None,
1547 date=None,
1547 date=None,
1548 local=True,
1548 local=True,
1549 )
1549 )
1550 elif fold:
1550 elif fold:
1551 # When folding multiple commits into one review with
1551 # When folding multiple commits into one review with
1552 # --fold, track even the commits that weren't amended, so
1552 # --fold, track even the commits that weren't amended, so
1553 # that their association isn't lost if the properties are
1553 # that their association isn't lost if the properties are
1554 # rewritten below.
1554 # rewritten below.
1555 newnodes.append(old.node())
1555 newnodes.append(old.node())
1556
1556
1557 # If the submitted commits are public, no amend takes place so
1557 # If the submitted commits are public, no amend takes place so
1558 # there are no newnodes and therefore no diff update to do.
1558 # there are no newnodes and therefore no diff update to do.
1559 if fold and newnodes:
1559 if fold and newnodes:
1560 diff = diffmap[old.node()]
1560 diff = diffmap[old.node()]
1561
1561
1562 # The diff object in diffmap doesn't have the local commits
1562 # The diff object in diffmap doesn't have the local commits
1563 # because that could be returned from differential.creatediff,
1563 # because that could be returned from differential.creatediff,
1564 # not differential.querydiffs. So use the queried diff (if
1564 # not differential.querydiffs. So use the queried diff (if
1565 # present), or force the amend (a new revision is being posted.)
1565 # present), or force the amend (a new revision is being posted.)
1566 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1566 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1567 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1567 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1568 _amend_diff_properties(unfi, drevid, newnodes, diff)
1568 _amend_diff_properties(unfi, drevid, newnodes, diff)
1569 else:
1569 else:
1570 _debug(
1570 _debug(
1571 ui,
1571 ui,
1572 b"local commit list for D%d is already up-to-date\n"
1572 b"local commit list for D%d is already up-to-date\n"
1573 % drevid,
1573 % drevid,
1574 )
1574 )
1575 elif fold:
1575 elif fold:
1576 _debug(ui, b"no newnodes to update\n")
1576 _debug(ui, b"no newnodes to update\n")
1577
1577
1578 # Restack any children of first-time submissions that were orphaned
1578 # Restack any children of first-time submissions that were orphaned
1579 # in the process. The ctx won't report that it is an orphan until
1579 # in the process. The ctx won't report that it is an orphan until
1580 # the cleanup takes place below.
1580 # the cleanup takes place below.
1581 for old in restack:
1581 for old in restack:
1582 parents = [
1582 parents = [
1583 mapping.get(old.p1().node(), (old.p1(),))[0],
1583 mapping.get(old.p1().node(), (old.p1(),))[0],
1584 mapping.get(old.p2().node(), (old.p2(),))[0],
1584 mapping.get(old.p2().node(), (old.p2(),))[0],
1585 ]
1585 ]
1586 new = context.metadataonlyctx(
1586 new = context.metadataonlyctx(
1587 repo,
1587 repo,
1588 old,
1588 old,
1589 parents=parents,
1589 parents=parents,
1590 text=old.description(),
1590 text=old.description(),
1591 user=old.user(),
1591 user=old.user(),
1592 date=old.date(),
1592 date=old.date(),
1593 extra=old.extra(),
1593 extra=old.extra(),
1594 )
1594 )
1595
1595
1596 newnode = new.commit()
1596 newnode = new.commit()
1597
1597
1598 # Don't obsolete unselected descendants of nodes that have not
1598 # Don't obsolete unselected descendants of nodes that have not
1599 # been changed in this transaction- that results in an error.
1599 # been changed in this transaction- that results in an error.
1600 if newnode != old.node():
1600 if newnode != old.node():
1601 mapping[old.node()] = [newnode]
1601 mapping[old.node()] = [newnode]
1602 _debug(
1602 _debug(
1603 ui,
1603 ui,
1604 b"restabilizing %s as %s\n"
1604 b"restabilizing %s as %s\n"
1605 % (short(old.node()), short(newnode)),
1605 % (short(old.node()), short(newnode)),
1606 )
1606 )
1607 else:
1607 else:
1608 _debug(
1608 _debug(
1609 ui,
1609 ui,
1610 b"not restabilizing unchanged %s\n" % short(old.node()),
1610 b"not restabilizing unchanged %s\n" % short(old.node()),
1611 )
1611 )
1612
1612
1613 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1613 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1614 if wnode in mapping:
1614 if wnode in mapping:
1615 unfi.setparents(mapping[wnode][0])
1615 unfi.setparents(mapping[wnode][0])
1616
1616
1617
1617
1618 # Map from "hg:meta" keys to header understood by "hg import". The order is
1618 # Map from "hg:meta" keys to header understood by "hg import". The order is
1619 # consistent with "hg export" output.
1619 # consistent with "hg export" output.
1620 _metanamemap = util.sortdict(
1620 _metanamemap = util.sortdict(
1621 [
1621 [
1622 (b'user', b'User'),
1622 (b'user', b'User'),
1623 (b'date', b'Date'),
1623 (b'date', b'Date'),
1624 (b'branch', b'Branch'),
1624 (b'branch', b'Branch'),
1625 (b'node', b'Node ID'),
1625 (b'node', b'Node ID'),
1626 (b'parent', b'Parent '),
1626 (b'parent', b'Parent '),
1627 ]
1627 ]
1628 )
1628 )
1629
1629
1630
1630
1631 def _confirmbeforesend(repo, revs, oldmap):
1631 def _confirmbeforesend(repo, revs, oldmap):
1632 url, token = readurltoken(repo.ui)
1632 url, token = readurltoken(repo.ui)
1633 ui = repo.ui
1633 ui = repo.ui
1634 for rev in revs:
1634 for rev in revs:
1635 ctx = repo[rev]
1635 ctx = repo[rev]
1636 desc = ctx.description().splitlines()[0]
1636 desc = ctx.description().splitlines()[0]
1637 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1637 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1638 if drevid:
1638 if drevid:
1639 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1639 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1640 else:
1640 else:
1641 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1641 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1642
1642
1643 ui.write(
1643 ui.write(
1644 _(b'%s - %s: %s\n')
1644 _(b'%s - %s: %s\n')
1645 % (
1645 % (
1646 drevdesc,
1646 drevdesc,
1647 ui.label(bytes(ctx), b'phabricator.node'),
1647 ui.label(bytes(ctx), b'phabricator.node'),
1648 ui.label(desc, b'phabricator.desc'),
1648 ui.label(desc, b'phabricator.desc'),
1649 )
1649 )
1650 )
1650 )
1651
1651
1652 if ui.promptchoice(
1652 if ui.promptchoice(
1653 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1653 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1654 ):
1654 ):
1655 return False
1655 return False
1656
1656
1657 return True
1657 return True
1658
1658
1659
1659
1660 _knownstatusnames = {
1660 _knownstatusnames = {
1661 b'accepted',
1661 b'accepted',
1662 b'needsreview',
1662 b'needsreview',
1663 b'needsrevision',
1663 b'needsrevision',
1664 b'closed',
1664 b'closed',
1665 b'abandoned',
1665 b'abandoned',
1666 b'changesplanned',
1666 b'changesplanned',
1667 }
1667 }
1668
1668
1669
1669
1670 def _getstatusname(drev):
1670 def _getstatusname(drev):
1671 """get normalized status name from a Differential Revision"""
1671 """get normalized status name from a Differential Revision"""
1672 return drev[b'statusName'].replace(b' ', b'').lower()
1672 return drev[b'statusName'].replace(b' ', b'').lower()
1673
1673
1674
1674
1675 # Small language to specify differential revisions. Support symbols: (), :X,
1675 # Small language to specify differential revisions. Support symbols: (), :X,
1676 # +, and -.
1676 # +, and -.
1677
1677
1678 _elements = {
1678 _elements = {
1679 # token-type: binding-strength, primary, prefix, infix, suffix
1679 # token-type: binding-strength, primary, prefix, infix, suffix
1680 b'(': (12, None, (b'group', 1, b')'), None, None),
1680 b'(': (12, None, (b'group', 1, b')'), None, None),
1681 b':': (8, None, (b'ancestors', 8), None, None),
1681 b':': (8, None, (b'ancestors', 8), None, None),
1682 b'&': (5, None, None, (b'and_', 5), None),
1682 b'&': (5, None, None, (b'and_', 5), None),
1683 b'+': (4, None, None, (b'add', 4), None),
1683 b'+': (4, None, None, (b'add', 4), None),
1684 b'-': (4, None, None, (b'sub', 4), None),
1684 b'-': (4, None, None, (b'sub', 4), None),
1685 b')': (0, None, None, None, None),
1685 b')': (0, None, None, None, None),
1686 b'symbol': (0, b'symbol', None, None, None),
1686 b'symbol': (0, b'symbol', None, None, None),
1687 b'end': (0, None, None, None, None),
1687 b'end': (0, None, None, None, None),
1688 }
1688 }
1689
1689
1690
1690
1691 def _tokenize(text):
1691 def _tokenize(text):
1692 view = memoryview(text) # zero-copy slice
1692 view = memoryview(text) # zero-copy slice
1693 special = b'():+-& '
1693 special = b'():+-& '
1694 pos = 0
1694 pos = 0
1695 length = len(text)
1695 length = len(text)
1696 while pos < length:
1696 while pos < length:
1697 symbol = b''.join(
1697 symbol = b''.join(
1698 itertools.takewhile(
1698 itertools.takewhile(
1699 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1699 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1700 )
1700 )
1701 )
1701 )
1702 if symbol:
1702 if symbol:
1703 yield (b'symbol', symbol, pos)
1703 yield (b'symbol', symbol, pos)
1704 pos += len(symbol)
1704 pos += len(symbol)
1705 else: # special char, ignore space
1705 else: # special char, ignore space
1706 if text[pos : pos + 1] != b' ':
1706 if text[pos : pos + 1] != b' ':
1707 yield (text[pos : pos + 1], None, pos)
1707 yield (text[pos : pos + 1], None, pos)
1708 pos += 1
1708 pos += 1
1709 yield (b'end', None, pos)
1709 yield (b'end', None, pos)
1710
1710
1711
1711
1712 def _parse(text):
1712 def _parse(text):
1713 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1713 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1714 if pos != len(text):
1714 if pos != len(text):
1715 raise error.ParseError(b'invalid token', pos)
1715 raise error.ParseError(b'invalid token', pos)
1716 return tree
1716 return tree
1717
1717
1718
1718
1719 def _parsedrev(symbol):
1719 def _parsedrev(symbol):
1720 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1720 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1721 if symbol.startswith(b'D') and symbol[1:].isdigit():
1721 if symbol.startswith(b'D') and symbol[1:].isdigit():
1722 return int(symbol[1:])
1722 return int(symbol[1:])
1723 if symbol.isdigit():
1723 if symbol.isdigit():
1724 return int(symbol)
1724 return int(symbol)
1725
1725
1726
1726
1727 def _prefetchdrevs(tree):
1727 def _prefetchdrevs(tree):
1728 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1728 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1729 drevs = set()
1729 drevs = set()
1730 ancestordrevs = set()
1730 ancestordrevs = set()
1731 op = tree[0]
1731 op = tree[0]
1732 if op == b'symbol':
1732 if op == b'symbol':
1733 r = _parsedrev(tree[1])
1733 r = _parsedrev(tree[1])
1734 if r:
1734 if r:
1735 drevs.add(r)
1735 drevs.add(r)
1736 elif op == b'ancestors':
1736 elif op == b'ancestors':
1737 r, a = _prefetchdrevs(tree[1])
1737 r, a = _prefetchdrevs(tree[1])
1738 drevs.update(r)
1738 drevs.update(r)
1739 ancestordrevs.update(r)
1739 ancestordrevs.update(r)
1740 ancestordrevs.update(a)
1740 ancestordrevs.update(a)
1741 else:
1741 else:
1742 for t in tree[1:]:
1742 for t in tree[1:]:
1743 r, a = _prefetchdrevs(t)
1743 r, a = _prefetchdrevs(t)
1744 drevs.update(r)
1744 drevs.update(r)
1745 ancestordrevs.update(a)
1745 ancestordrevs.update(a)
1746 return drevs, ancestordrevs
1746 return drevs, ancestordrevs
1747
1747
1748
1748
1749 def querydrev(ui, spec):
1749 def querydrev(ui, spec):
1750 """return a list of "Differential Revision" dicts
1750 """return a list of "Differential Revision" dicts
1751
1751
1752 spec is a string using a simple query language, see docstring in phabread
1752 spec is a string using a simple query language, see docstring in phabread
1753 for details.
1753 for details.
1754
1754
1755 A "Differential Revision dict" looks like:
1755 A "Differential Revision dict" looks like:
1756
1756
1757 {
1757 {
1758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1759 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1759 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1760 "auxiliary": {
1760 "auxiliary": {
1761 "phabricator:depends-on": [
1761 "phabricator:depends-on": [
1762 "PHID-DREV-gbapp366kutjebt7agcd"
1762 "PHID-DREV-gbapp366kutjebt7agcd"
1763 ]
1763 ]
1764 "phabricator:projects": [],
1764 "phabricator:projects": [],
1765 },
1765 },
1766 "branch": "default",
1766 "branch": "default",
1767 "ccs": [],
1767 "ccs": [],
1768 "commits": [],
1768 "commits": [],
1769 "dateCreated": "1499181406",
1769 "dateCreated": "1499181406",
1770 "dateModified": "1499182103",
1770 "dateModified": "1499182103",
1771 "diffs": [
1771 "diffs": [
1772 "3",
1772 "3",
1773 "4",
1773 "4",
1774 ],
1774 ],
1775 "hashes": [],
1775 "hashes": [],
1776 "id": "2",
1776 "id": "2",
1777 "lineCount": "2",
1777 "lineCount": "2",
1778 "phid": "PHID-DREV-672qvysjcczopag46qty",
1778 "phid": "PHID-DREV-672qvysjcczopag46qty",
1779 "properties": {},
1779 "properties": {},
1780 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1780 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1781 "reviewers": [],
1781 "reviewers": [],
1782 "sourcePath": null
1782 "sourcePath": null
1783 "status": "0",
1783 "status": "0",
1784 "statusName": "Needs Review",
1784 "statusName": "Needs Review",
1785 "summary": "",
1785 "summary": "",
1786 "testPlan": "",
1786 "testPlan": "",
1787 "title": "example",
1787 "title": "example",
1788 "uri": "https://phab.example.com/D2",
1788 "uri": "https://phab.example.com/D2",
1789 }
1789 }
1790 """
1790 """
1791 # TODO: replace differential.query and differential.querydiffs with
1791 # TODO: replace differential.query and differential.querydiffs with
1792 # differential.diff.search because the former (and their output) are
1792 # differential.diff.search because the former (and their output) are
1793 # frozen, and planned to be deprecated and removed.
1793 # frozen, and planned to be deprecated and removed.
1794
1794
1795 def fetch(params):
1795 def fetch(params):
1796 """params -> single drev or None"""
1796 """params -> single drev or None"""
1797 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1797 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1798 if key in prefetched:
1798 if key in prefetched:
1799 return prefetched[key]
1799 return prefetched[key]
1800 drevs = callconduit(ui, b'differential.query', params)
1800 drevs = callconduit(ui, b'differential.query', params)
1801 # Fill prefetched with the result
1801 # Fill prefetched with the result
1802 for drev in drevs:
1802 for drev in drevs:
1803 prefetched[drev[b'phid']] = drev
1803 prefetched[drev[b'phid']] = drev
1804 prefetched[int(drev[b'id'])] = drev
1804 prefetched[int(drev[b'id'])] = drev
1805 if key not in prefetched:
1805 if key not in prefetched:
1806 raise error.Abort(
1806 raise error.Abort(
1807 _(b'cannot get Differential Revision %r') % params
1807 _(b'cannot get Differential Revision %r') % params
1808 )
1808 )
1809 return prefetched[key]
1809 return prefetched[key]
1810
1810
1811 def getstack(topdrevids):
1811 def getstack(topdrevids):
1812 """given a top, get a stack from the bottom, [id] -> [id]"""
1812 """given a top, get a stack from the bottom, [id] -> [id]"""
1813 visited = set()
1813 visited = set()
1814 result = []
1814 result = []
1815 queue = [{b'ids': [i]} for i in topdrevids]
1815 queue = [{b'ids': [i]} for i in topdrevids]
1816 while queue:
1816 while queue:
1817 params = queue.pop()
1817 params = queue.pop()
1818 drev = fetch(params)
1818 drev = fetch(params)
1819 if drev[b'id'] in visited:
1819 if drev[b'id'] in visited:
1820 continue
1820 continue
1821 visited.add(drev[b'id'])
1821 visited.add(drev[b'id'])
1822 result.append(int(drev[b'id']))
1822 result.append(int(drev[b'id']))
1823 auxiliary = drev.get(b'auxiliary', {})
1823 auxiliary = drev.get(b'auxiliary', {})
1824 depends = auxiliary.get(b'phabricator:depends-on', [])
1824 depends = auxiliary.get(b'phabricator:depends-on', [])
1825 for phid in depends:
1825 for phid in depends:
1826 queue.append({b'phids': [phid]})
1826 queue.append({b'phids': [phid]})
1827 result.reverse()
1827 result.reverse()
1828 return smartset.baseset(result)
1828 return smartset.baseset(result)
1829
1829
1830 # Initialize prefetch cache
1830 # Initialize prefetch cache
1831 prefetched = {} # {id or phid: drev}
1831 prefetched = {} # {id or phid: drev}
1832
1832
1833 tree = _parse(spec)
1833 tree = _parse(spec)
1834 drevs, ancestordrevs = _prefetchdrevs(tree)
1834 drevs, ancestordrevs = _prefetchdrevs(tree)
1835
1835
1836 # developer config: phabricator.batchsize
1836 # developer config: phabricator.batchsize
1837 batchsize = ui.configint(b'phabricator', b'batchsize')
1837 batchsize = ui.configint(b'phabricator', b'batchsize')
1838
1838
1839 # Prefetch Differential Revisions in batch
1839 # Prefetch Differential Revisions in batch
1840 tofetch = set(drevs)
1840 tofetch = set(drevs)
1841 for r in ancestordrevs:
1841 for r in ancestordrevs:
1842 tofetch.update(range(max(1, r - batchsize), r + 1))
1842 tofetch.update(range(max(1, r - batchsize), r + 1))
1843 if drevs:
1843 if drevs:
1844 fetch({b'ids': list(tofetch)})
1844 fetch({b'ids': list(tofetch)})
1845 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1845 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1846
1846
1847 # Walk through the tree, return smartsets
1847 # Walk through the tree, return smartsets
1848 def walk(tree):
1848 def walk(tree):
1849 op = tree[0]
1849 op = tree[0]
1850 if op == b'symbol':
1850 if op == b'symbol':
1851 drev = _parsedrev(tree[1])
1851 drev = _parsedrev(tree[1])
1852 if drev:
1852 if drev:
1853 return smartset.baseset([drev])
1853 return smartset.baseset([drev])
1854 elif tree[1] in _knownstatusnames:
1854 elif tree[1] in _knownstatusnames:
1855 drevs = [
1855 drevs = [
1856 r
1856 r
1857 for r in validids
1857 for r in validids
1858 if _getstatusname(prefetched[r]) == tree[1]
1858 if _getstatusname(prefetched[r]) == tree[1]
1859 ]
1859 ]
1860 return smartset.baseset(drevs)
1860 return smartset.baseset(drevs)
1861 else:
1861 else:
1862 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1862 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1863 elif op in {b'and_', b'add', b'sub'}:
1863 elif op in {b'and_', b'add', b'sub'}:
1864 assert len(tree) == 3
1864 assert len(tree) == 3
1865 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1865 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1866 elif op == b'group':
1866 elif op == b'group':
1867 return walk(tree[1])
1867 return walk(tree[1])
1868 elif op == b'ancestors':
1868 elif op == b'ancestors':
1869 return getstack(walk(tree[1]))
1869 return getstack(walk(tree[1]))
1870 else:
1870 else:
1871 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1871 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1872
1872
1873 return [prefetched[r] for r in walk(tree)]
1873 return [prefetched[r] for r in walk(tree)]
1874
1874
1875
1875
1876 def getdescfromdrev(drev):
1876 def getdescfromdrev(drev):
1877 """get description (commit message) from "Differential Revision"
1877 """get description (commit message) from "Differential Revision"
1878
1878
1879 This is similar to differential.getcommitmessage API. But we only care
1879 This is similar to differential.getcommitmessage API. But we only care
1880 about limited fields: title, summary, test plan, and URL.
1880 about limited fields: title, summary, test plan, and URL.
1881 """
1881 """
1882 title = drev[b'title']
1882 title = drev[b'title']
1883 summary = drev[b'summary'].rstrip()
1883 summary = drev[b'summary'].rstrip()
1884 testplan = drev[b'testPlan'].rstrip()
1884 testplan = drev[b'testPlan'].rstrip()
1885 if testplan:
1885 if testplan:
1886 testplan = b'Test Plan:\n%s' % testplan
1886 testplan = b'Test Plan:\n%s' % testplan
1887 uri = b'Differential Revision: %s' % drev[b'uri']
1887 uri = b'Differential Revision: %s' % drev[b'uri']
1888 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1888 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1889
1889
1890
1890
1891 def get_amended_desc(drev, ctx, folded):
1891 def get_amended_desc(drev, ctx, folded):
1892 """similar to ``getdescfromdrev``, but supports a folded series of commits
1892 """similar to ``getdescfromdrev``, but supports a folded series of commits
1893
1893
1894 This is used when determining if an individual commit needs to have its
1894 This is used when determining if an individual commit needs to have its
1895 message amended after posting it for review. The determination is made for
1895 message amended after posting it for review. The determination is made for
1896 each individual commit, even when they were folded into one review.
1896 each individual commit, even when they were folded into one review.
1897 """
1897 """
1898 if not folded:
1898 if not folded:
1899 return getdescfromdrev(drev)
1899 return getdescfromdrev(drev)
1900
1900
1901 uri = b'Differential Revision: %s' % drev[b'uri']
1901 uri = b'Differential Revision: %s' % drev[b'uri']
1902
1902
1903 # Since the commit messages were combined when posting multiple commits
1903 # Since the commit messages were combined when posting multiple commits
1904 # with --fold, the fields can't be read from Phabricator here, or *all*
1904 # with --fold, the fields can't be read from Phabricator here, or *all*
1905 # affected local revisions will end up with the same commit message after
1905 # affected local revisions will end up with the same commit message after
1906 # the URI is amended in. Append in the DREV line, or update it if it
1906 # the URI is amended in. Append in the DREV line, or update it if it
1907 # exists. At worst, this means commit message or test plan updates on
1907 # exists. At worst, this means commit message or test plan updates on
1908 # Phabricator aren't propagated back to the repository, but that seems
1908 # Phabricator aren't propagated back to the repository, but that seems
1909 # reasonable for the case where local commits are effectively combined
1909 # reasonable for the case where local commits are effectively combined
1910 # in Phabricator.
1910 # in Phabricator.
1911 m = _differentialrevisiondescre.search(ctx.description())
1911 m = _differentialrevisiondescre.search(ctx.description())
1912 if not m:
1912 if not m:
1913 return b'\n\n'.join([ctx.description(), uri])
1913 return b'\n\n'.join([ctx.description(), uri])
1914
1914
1915 return _differentialrevisiondescre.sub(uri, ctx.description())
1915 return _differentialrevisiondescre.sub(uri, ctx.description())
1916
1916
1917
1917
1918 def getlocalcommits(diff):
1918 def getlocalcommits(diff):
1919 """get the set of local commits from a diff object
1919 """get the set of local commits from a diff object
1920
1920
1921 See ``getdiffmeta()`` for an example diff object.
1921 See ``getdiffmeta()`` for an example diff object.
1922 """
1922 """
1923 props = diff.get(b'properties') or {}
1923 props = diff.get(b'properties') or {}
1924 commits = props.get(b'local:commits') or {}
1924 commits = props.get(b'local:commits') or {}
1925 if len(commits) > 1:
1925 if len(commits) > 1:
1926 return {bin(c) for c in commits.keys()}
1926 return {bin(c) for c in commits.keys()}
1927
1927
1928 # Storing the diff metadata predates storing `local:commits`, so continue
1928 # Storing the diff metadata predates storing `local:commits`, so continue
1929 # to use that in the --no-fold case.
1929 # to use that in the --no-fold case.
1930 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1930 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1931
1931
1932
1932
1933 def getdiffmeta(diff):
1933 def getdiffmeta(diff):
1934 """get commit metadata (date, node, user, p1) from a diff object
1934 """get commit metadata (date, node, user, p1) from a diff object
1935
1935
1936 The metadata could be "hg:meta", sent by phabsend, like:
1936 The metadata could be "hg:meta", sent by phabsend, like:
1937
1937
1938 "properties": {
1938 "properties": {
1939 "hg:meta": {
1939 "hg:meta": {
1940 "branch": "default",
1940 "branch": "default",
1941 "date": "1499571514 25200",
1941 "date": "1499571514 25200",
1942 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1942 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1943 "user": "Foo Bar <foo@example.com>",
1943 "user": "Foo Bar <foo@example.com>",
1944 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1944 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1945 }
1945 }
1946 }
1946 }
1947
1947
1948 Or converted from "local:commits", sent by "arc", like:
1948 Or converted from "local:commits", sent by "arc", like:
1949
1949
1950 "properties": {
1950 "properties": {
1951 "local:commits": {
1951 "local:commits": {
1952 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1952 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1953 "author": "Foo Bar",
1953 "author": "Foo Bar",
1954 "authorEmail": "foo@example.com"
1954 "authorEmail": "foo@example.com"
1955 "branch": "default",
1955 "branch": "default",
1956 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1956 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1957 "local": "1000",
1957 "local": "1000",
1958 "message": "...",
1958 "message": "...",
1959 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1959 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1960 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1960 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1961 "summary": "...",
1961 "summary": "...",
1962 "tag": "",
1962 "tag": "",
1963 "time": 1499546314,
1963 "time": 1499546314,
1964 }
1964 }
1965 }
1965 }
1966 }
1966 }
1967
1967
1968 Note: metadata extracted from "local:commits" will lose time zone
1968 Note: metadata extracted from "local:commits" will lose time zone
1969 information.
1969 information.
1970 """
1970 """
1971 props = diff.get(b'properties') or {}
1971 props = diff.get(b'properties') or {}
1972 meta = props.get(b'hg:meta')
1972 meta = props.get(b'hg:meta')
1973 if not meta:
1973 if not meta:
1974 if props.get(b'local:commits'):
1974 if props.get(b'local:commits'):
1975 commit = sorted(props[b'local:commits'].values())[0]
1975 commit = sorted(props[b'local:commits'].values())[0]
1976 meta = {}
1976 meta = {}
1977 if b'author' in commit and b'authorEmail' in commit:
1977 if b'author' in commit and b'authorEmail' in commit:
1978 meta[b'user'] = b'%s <%s>' % (
1978 meta[b'user'] = b'%s <%s>' % (
1979 commit[b'author'],
1979 commit[b'author'],
1980 commit[b'authorEmail'],
1980 commit[b'authorEmail'],
1981 )
1981 )
1982 if b'time' in commit:
1982 if b'time' in commit:
1983 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1983 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1984 if b'branch' in commit:
1984 if b'branch' in commit:
1985 meta[b'branch'] = commit[b'branch']
1985 meta[b'branch'] = commit[b'branch']
1986 node = commit.get(b'commit', commit.get(b'rev'))
1986 node = commit.get(b'commit', commit.get(b'rev'))
1987 if node:
1987 if node:
1988 meta[b'node'] = node
1988 meta[b'node'] = node
1989 if len(commit.get(b'parents', ())) >= 1:
1989 if len(commit.get(b'parents', ())) >= 1:
1990 meta[b'parent'] = commit[b'parents'][0]
1990 meta[b'parent'] = commit[b'parents'][0]
1991 else:
1991 else:
1992 meta = {}
1992 meta = {}
1993 if b'date' not in meta and b'dateCreated' in diff:
1993 if b'date' not in meta and b'dateCreated' in diff:
1994 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1994 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1995 if b'branch' not in meta and diff.get(b'branch'):
1995 if b'branch' not in meta and diff.get(b'branch'):
1996 meta[b'branch'] = diff[b'branch']
1996 meta[b'branch'] = diff[b'branch']
1997 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1997 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1998 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1998 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1999 return meta
1999 return meta
2000
2000
2001
2001
2002 def _getdrevs(ui, stack, specs):
2002 def _getdrevs(ui, stack, specs):
2003 """convert user supplied DREVSPECs into "Differential Revision" dicts
2003 """convert user supplied DREVSPECs into "Differential Revision" dicts
2004
2004
2005 See ``hg help phabread`` for how to specify each DREVSPEC.
2005 See ``hg help phabread`` for how to specify each DREVSPEC.
2006 """
2006 """
2007 if len(specs) > 0:
2007 if len(specs) > 0:
2008
2008
2009 def _formatspec(s):
2009 def _formatspec(s):
2010 if stack:
2010 if stack:
2011 s = b':(%s)' % s
2011 s = b':(%s)' % s
2012 return b'(%s)' % s
2012 return b'(%s)' % s
2013
2013
2014 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2014 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2015
2015
2016 drevs = querydrev(ui, spec)
2016 drevs = querydrev(ui, spec)
2017 if drevs:
2017 if drevs:
2018 return drevs
2018 return drevs
2019
2019
2020 raise error.Abort(_(b"empty DREVSPEC set"))
2020 raise error.Abort(_(b"empty DREVSPEC set"))
2021
2021
2022
2022
2023 def readpatch(ui, drevs, write):
2023 def readpatch(ui, drevs, write):
2024 """generate plain-text patch readable by 'hg import'
2024 """generate plain-text patch readable by 'hg import'
2025
2025
2026 write takes a list of (DREV, bytes), where DREV is the differential number
2026 write takes a list of (DREV, bytes), where DREV is the differential number
2027 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2027 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2028 to be imported. drevs is what "querydrev" returns, results of
2028 to be imported. drevs is what "querydrev" returns, results of
2029 "differential.query".
2029 "differential.query".
2030 """
2030 """
2031 # Prefetch hg:meta property for all diffs
2031 # Prefetch hg:meta property for all diffs
2032 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2032 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2033 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2033 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2034
2034
2035 patches = []
2035 patches = []
2036
2036
2037 # Generate patch for each drev
2037 # Generate patch for each drev
2038 for drev in drevs:
2038 for drev in drevs:
2039 ui.note(_(b'reading D%s\n') % drev[b'id'])
2039 ui.note(_(b'reading D%s\n') % drev[b'id'])
2040
2040
2041 diffid = max(int(v) for v in drev[b'diffs'])
2041 diffid = max(int(v) for v in drev[b'diffs'])
2042 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2042 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2043 desc = getdescfromdrev(drev)
2043 desc = getdescfromdrev(drev)
2044 header = b'# HG changeset patch\n'
2044 header = b'# HG changeset patch\n'
2045
2045
2046 # Try to preserve metadata from hg:meta property. Write hg patch
2046 # Try to preserve metadata from hg:meta property. Write hg patch
2047 # headers that can be read by the "import" command. See patchheadermap
2047 # headers that can be read by the "import" command. See patchheadermap
2048 # and extract in mercurial/patch.py for supported headers.
2048 # and extract in mercurial/patch.py for supported headers.
2049 meta = getdiffmeta(diffs[b'%d' % diffid])
2049 meta = getdiffmeta(diffs[b'%d' % diffid])
2050 for k in _metanamemap.keys():
2050 for k in _metanamemap.keys():
2051 if k in meta:
2051 if k in meta:
2052 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2052 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2053
2053
2054 content = b'%s%s\n%s' % (header, desc, body)
2054 content = b'%s%s\n%s' % (header, desc, body)
2055 patches.append((drev[b'id'], content))
2055 patches.append((drev[b'id'], content))
2056
2056
2057 # Write patches to the supplied callback
2057 # Write patches to the supplied callback
2058 write(patches)
2058 write(patches)
2059
2059
2060
2060
2061 @vcrcommand(
2061 @vcrcommand(
2062 b'phabread',
2062 b'phabread',
2063 [(b'', b'stack', False, _(b'read dependencies'))],
2063 [(b'', b'stack', False, _(b'read dependencies'))],
2064 _(b'DREVSPEC... [OPTIONS]'),
2064 _(b'DREVSPEC... [OPTIONS]'),
2065 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2065 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2066 optionalrepo=True,
2066 optionalrepo=True,
2067 )
2067 )
2068 def phabread(ui, repo, *specs, **opts):
2068 def phabread(ui, repo, *specs, **opts):
2069 """print patches from Phabricator suitable for importing
2069 """print patches from Phabricator suitable for importing
2070
2070
2071 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2071 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2072 the number ``123``. It could also have common operators like ``+``, ``-``,
2072 the number ``123``. It could also have common operators like ``+``, ``-``,
2073 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2073 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2074 select a stack. If multiple DREVSPEC values are given, the result is the
2074 select a stack. If multiple DREVSPEC values are given, the result is the
2075 union of each individually evaluated value. No attempt is currently made
2075 union of each individually evaluated value. No attempt is currently made
2076 to reorder the values to run from parent to child.
2076 to reorder the values to run from parent to child.
2077
2077
2078 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2078 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2079 could be used to filter patches by status. For performance reason, they
2079 could be used to filter patches by status. For performance reason, they
2080 only represent a subset of non-status selections and cannot be used alone.
2080 only represent a subset of non-status selections and cannot be used alone.
2081
2081
2082 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2082 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2083 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2083 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2084 stack up to D9.
2084 stack up to D9.
2085
2085
2086 If --stack is given, follow dependencies information and read all patches.
2086 If --stack is given, follow dependencies information and read all patches.
2087 It is equivalent to the ``:`` operator.
2087 It is equivalent to the ``:`` operator.
2088 """
2088 """
2089 opts = pycompat.byteskwargs(opts)
2089 opts = pycompat.byteskwargs(opts)
2090 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2090 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2091
2091
2092 def _write(patches):
2092 def _write(patches):
2093 for drev, content in patches:
2093 for drev, content in patches:
2094 ui.write(content)
2094 ui.write(content)
2095
2095
2096 readpatch(ui, drevs, _write)
2096 readpatch(ui, drevs, _write)
2097
2097
2098
2098
2099 @vcrcommand(
2099 @vcrcommand(
2100 b'phabimport',
2100 b'phabimport',
2101 [(b'', b'stack', False, _(b'import dependencies as well'))],
2101 [(b'', b'stack', False, _(b'import dependencies as well'))],
2102 _(b'DREVSPEC... [OPTIONS]'),
2102 _(b'DREVSPEC... [OPTIONS]'),
2103 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2103 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2104 )
2104 )
2105 def phabimport(ui, repo, *specs, **opts):
2105 def phabimport(ui, repo, *specs, **opts):
2106 """import patches from Phabricator for the specified Differential Revisions
2106 """import patches from Phabricator for the specified Differential Revisions
2107
2107
2108 The patches are read and applied starting at the parent of the working
2108 The patches are read and applied starting at the parent of the working
2109 directory.
2109 directory.
2110
2110
2111 See ``hg help phabread`` for how to specify DREVSPEC.
2111 See ``hg help phabread`` for how to specify DREVSPEC.
2112 """
2112 """
2113 opts = pycompat.byteskwargs(opts)
2113 opts = pycompat.byteskwargs(opts)
2114
2114
2115 # --bypass avoids losing exec and symlink bits when importing on Windows,
2115 # --bypass avoids losing exec and symlink bits when importing on Windows,
2116 # and allows importing with a dirty wdir. It also aborts instead of leaving
2116 # and allows importing with a dirty wdir. It also aborts instead of leaving
2117 # rejects.
2117 # rejects.
2118 opts[b'bypass'] = True
2118 opts[b'bypass'] = True
2119
2119
2120 # Mandatory default values, synced with commands.import
2120 # Mandatory default values, synced with commands.import
2121 opts[b'strip'] = 1
2121 opts[b'strip'] = 1
2122 opts[b'prefix'] = b''
2122 opts[b'prefix'] = b''
2123 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2123 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2124 opts[b'obsolete'] = False
2124 opts[b'obsolete'] = False
2125
2125
2126 if ui.configbool(b'phabimport', b'secret'):
2126 if ui.configbool(b'phabimport', b'secret'):
2127 opts[b'secret'] = True
2127 opts[b'secret'] = True
2128 if ui.configbool(b'phabimport', b'obsolete'):
2128 if ui.configbool(b'phabimport', b'obsolete'):
2129 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2129 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2130
2130
2131 def _write(patches):
2131 def _write(patches):
2132 parents = repo[None].parents()
2132 parents = repo[None].parents()
2133
2133
2134 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2134 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2135 for drev, contents in patches:
2135 for drev, contents in patches:
2136 ui.status(_(b'applying patch from D%s\n') % drev)
2136 ui.status(_(b'applying patch from D%s\n') % drev)
2137
2137
2138 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2138 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2139 msg, node, rej = cmdutil.tryimportone(
2139 msg, node, rej = cmdutil.tryimportone(
2140 ui,
2140 ui,
2141 repo,
2141 repo,
2142 patchdata,
2142 patchdata,
2143 parents,
2143 parents,
2144 opts,
2144 opts,
2145 [],
2145 [],
2146 None, # Never update wdir to another revision
2146 None, # Never update wdir to another revision
2147 )
2147 )
2148
2148
2149 if not node:
2149 if not node:
2150 raise error.Abort(_(b'D%s: no diffs found') % drev)
2150 raise error.Abort(_(b'D%s: no diffs found') % drev)
2151
2151
2152 ui.note(msg + b'\n')
2152 ui.note(msg + b'\n')
2153 parents = [repo[node]]
2153 parents = [repo[node]]
2154
2154
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2156
2156
2157 readpatch(repo.ui, drevs, _write)
2157 readpatch(repo.ui, drevs, _write)
2158
2158
2159
2159
2160 @vcrcommand(
2160 @vcrcommand(
2161 b'phabupdate',
2161 b'phabupdate',
2162 [
2162 [
2163 (b'', b'accept', False, _(b'accept revisions')),
2163 (b'', b'accept', False, _(b'accept revisions')),
2164 (b'', b'reject', False, _(b'reject revisions')),
2164 (b'', b'reject', False, _(b'reject revisions')),
2165 (b'', b'request-review', False, _(b'request review on revisions')),
2165 (b'', b'request-review', False, _(b'request review on revisions')),
2166 (b'', b'abandon', False, _(b'abandon revisions')),
2166 (b'', b'abandon', False, _(b'abandon revisions')),
2167 (b'', b'reclaim', False, _(b'reclaim revisions')),
2167 (b'', b'reclaim', False, _(b'reclaim revisions')),
2168 (b'', b'close', False, _(b'close revisions')),
2168 (b'', b'close', False, _(b'close revisions')),
2169 (b'', b'reopen', False, _(b'reopen revisions')),
2169 (b'', b'reopen', False, _(b'reopen revisions')),
2170 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2170 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2171 (b'', b'commandeer', False, _(b'commandeer revisions')),
2171 (b'm', b'comment', b'', _(b'comment on the last revision')),
2172 (b'm', b'comment', b'', _(b'comment on the last revision')),
2172 ],
2173 ],
2173 _(b'DREVSPEC... [OPTIONS]'),
2174 _(b'DREVSPEC... [OPTIONS]'),
2174 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2175 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2175 optionalrepo=True,
2176 optionalrepo=True,
2176 )
2177 )
2177 def phabupdate(ui, repo, *specs, **opts):
2178 def phabupdate(ui, repo, *specs, **opts):
2178 """update Differential Revision in batch
2179 """update Differential Revision in batch
2179
2180
2180 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2181 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2181 """
2182 """
2182 opts = pycompat.byteskwargs(opts)
2183 opts = pycompat.byteskwargs(opts)
2183 transactions = [
2184 transactions = [
2184 b'abandon',
2185 b'abandon',
2185 b'accept',
2186 b'accept',
2186 b'close',
2187 b'close',
2188 b'commandeer',
2187 b'plan-changes',
2189 b'plan-changes',
2188 b'reclaim',
2190 b'reclaim',
2189 b'reject',
2191 b'reject',
2190 b'reopen',
2192 b'reopen',
2191 b'request-review',
2193 b'request-review',
2192 ]
2194 ]
2193 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2195 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2194 if len(flags) > 1:
2196 if len(flags) > 1:
2195 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2197 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2196
2198
2197 actions = []
2199 actions = []
2198 for f in flags:
2200 for f in flags:
2199 actions.append({b'type': f, b'value': True})
2201 actions.append({b'type': f, b'value': True})
2200
2202
2201 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2203 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2202 for i, drev in enumerate(drevs):
2204 for i, drev in enumerate(drevs):
2203 if i + 1 == len(drevs) and opts.get(b'comment'):
2205 if i + 1 == len(drevs) and opts.get(b'comment'):
2204 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2206 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2205 if actions:
2207 if actions:
2206 params = {
2208 params = {
2207 b'objectIdentifier': drev[b'phid'],
2209 b'objectIdentifier': drev[b'phid'],
2208 b'transactions': actions,
2210 b'transactions': actions,
2209 }
2211 }
2210 callconduit(ui, b'differential.revision.edit', params)
2212 callconduit(ui, b'differential.revision.edit', params)
2211
2213
2212
2214
2213 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2215 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2214 def template_review(context, mapping):
2216 def template_review(context, mapping):
2215 """:phabreview: Object describing the review for this changeset.
2217 """:phabreview: Object describing the review for this changeset.
2216 Has attributes `url` and `id`.
2218 Has attributes `url` and `id`.
2217 """
2219 """
2218 ctx = context.resource(mapping, b'ctx')
2220 ctx = context.resource(mapping, b'ctx')
2219 m = _differentialrevisiondescre.search(ctx.description())
2221 m = _differentialrevisiondescre.search(ctx.description())
2220 if m:
2222 if m:
2221 return templateutil.hybriddict(
2223 return templateutil.hybriddict(
2222 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2224 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2223 )
2225 )
2224 else:
2226 else:
2225 tags = ctx.repo().nodetags(ctx.node())
2227 tags = ctx.repo().nodetags(ctx.node())
2226 for t in tags:
2228 for t in tags:
2227 if _differentialrevisiontagre.match(t):
2229 if _differentialrevisiontagre.match(t):
2228 url = ctx.repo().ui.config(b'phabricator', b'url')
2230 url = ctx.repo().ui.config(b'phabricator', b'url')
2229 if not url.endswith(b'/'):
2231 if not url.endswith(b'/'):
2230 url += b'/'
2232 url += b'/'
2231 url += t
2233 url += t
2232
2234
2233 return templateutil.hybriddict({b'url': url, b'id': t,})
2235 return templateutil.hybriddict({b'url': url, b'id': t,})
2234 return None
2236 return None
2235
2237
2236
2238
2237 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2239 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2238 def template_status(context, mapping):
2240 def template_status(context, mapping):
2239 """:phabstatus: String. Status of Phabricator differential.
2241 """:phabstatus: String. Status of Phabricator differential.
2240 """
2242 """
2241 ctx = context.resource(mapping, b'ctx')
2243 ctx = context.resource(mapping, b'ctx')
2242 repo = context.resource(mapping, b'repo')
2244 repo = context.resource(mapping, b'repo')
2243 ui = context.resource(mapping, b'ui')
2245 ui = context.resource(mapping, b'ui')
2244
2246
2245 rev = ctx.rev()
2247 rev = ctx.rev()
2246 try:
2248 try:
2247 drevid = getdrevmap(repo, [rev])[rev]
2249 drevid = getdrevmap(repo, [rev])[rev]
2248 except KeyError:
2250 except KeyError:
2249 return None
2251 return None
2250 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2252 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2251 for drev in drevs:
2253 for drev in drevs:
2252 if int(drev[b'id']) == drevid:
2254 if int(drev[b'id']) == drevid:
2253 return templateutil.hybriddict(
2255 return templateutil.hybriddict(
2254 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2256 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2255 )
2257 )
2256 return None
2258 return None
2257
2259
2258
2260
2259 @show.showview(b'phabstatus', csettopic=b'work')
2261 @show.showview(b'phabstatus', csettopic=b'work')
2260 def phabstatusshowview(ui, repo, displayer):
2262 def phabstatusshowview(ui, repo, displayer):
2261 """Phabricator differiential status"""
2263 """Phabricator differiential status"""
2262 revs = repo.revs('sort(_underway(), topo)')
2264 revs = repo.revs('sort(_underway(), topo)')
2263 drevmap = getdrevmap(repo, revs)
2265 drevmap = getdrevmap(repo, revs)
2264 unknownrevs, drevids, revsbydrevid = [], set(), {}
2266 unknownrevs, drevids, revsbydrevid = [], set(), {}
2265 for rev, drevid in pycompat.iteritems(drevmap):
2267 for rev, drevid in pycompat.iteritems(drevmap):
2266 if drevid is not None:
2268 if drevid is not None:
2267 drevids.add(drevid)
2269 drevids.add(drevid)
2268 revsbydrevid.setdefault(drevid, set()).add(rev)
2270 revsbydrevid.setdefault(drevid, set()).add(rev)
2269 else:
2271 else:
2270 unknownrevs.append(rev)
2272 unknownrevs.append(rev)
2271
2273
2272 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2274 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2273 drevsbyrev = {}
2275 drevsbyrev = {}
2274 for drev in drevs:
2276 for drev in drevs:
2275 for rev in revsbydrevid[int(drev[b'id'])]:
2277 for rev in revsbydrevid[int(drev[b'id'])]:
2276 drevsbyrev[rev] = drev
2278 drevsbyrev[rev] = drev
2277
2279
2278 def phabstatus(ctx):
2280 def phabstatus(ctx):
2279 drev = drevsbyrev[ctx.rev()]
2281 drev = drevsbyrev[ctx.rev()]
2280 status = ui.label(
2282 status = ui.label(
2281 b'%(statusName)s' % drev,
2283 b'%(statusName)s' % drev,
2282 b'phabricator.status.%s' % _getstatusname(drev),
2284 b'phabricator.status.%s' % _getstatusname(drev),
2283 )
2285 )
2284 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2286 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2285
2287
2286 revs -= smartset.baseset(unknownrevs)
2288 revs -= smartset.baseset(unknownrevs)
2287 revdag = graphmod.dagwalker(repo, revs)
2289 revdag = graphmod.dagwalker(repo, revs)
2288
2290
2289 ui.setconfig(b'experimental', b'graphshorten', True)
2291 ui.setconfig(b'experimental', b'graphshorten', True)
2290 displayer._exthook = phabstatus
2292 displayer._exthook = phabstatus
2291 nodelen = show.longestshortest(repo, revs)
2293 nodelen = show.longestshortest(repo, revs)
2292 logcmdutil.displaygraph(
2294 logcmdutil.displaygraph(
2293 ui,
2295 ui,
2294 repo,
2296 repo,
2295 revdag,
2297 revdag,
2296 displayer,
2298 displayer,
2297 graphmod.asciiedges,
2299 graphmod.asciiedges,
2298 props={b'nodelen': nodelen},
2300 props={b'nodelen': nodelen},
2299 )
2301 )
General Comments 0
You need to be logged in to leave comments. Login now