##// END OF EJS Templates
phabricator: teach `getoldnodedrevmap()` to handle folded reviews...
Matt Harbison -
r45136:5f9c917e default
parent child Browse files
Show More
@@ -1,2022 +1,2055 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
124 )
124 )
125 eh.configitem(
125 eh.configitem(
126 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
127 )
127 )
128 eh.configitem(
128 eh.configitem(
129 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
133 )
133 )
134
134
135 colortable = {
135 colortable = {
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
141 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
148 }
148 }
149
149
150 _VCR_FLAGS = [
150 _VCR_FLAGS = [
151 (
151 (
152 b'',
152 b'',
153 b'test-vcr',
153 b'test-vcr',
154 b'',
154 b'',
155 _(
155 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
158 b' (ADVANCED)'
159 ),
159 ),
160 ),
160 ),
161 ]
161 ]
162
162
163
163
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
167 """
168 result = False
168 result = False
169 arcconfig = {}
169 arcconfig = {}
170
170
171 try:
171 try:
172 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
178 else x,
178 else x,
179 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
180 )
180 )
181
181
182 result = True
182 result = True
183 except ValueError:
183 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
185 except IOError:
186 pass
186 pass
187
187
188 cfg = util.sortdict()
188 cfg = util.sortdict()
189
189
190 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
192
193 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
195
196 if cfg:
196 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
198
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
200
201
201
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
204
204
205 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
207 return False
208 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
210 for key in r1params:
211 if key not in r2params:
211 if key not in r2params:
212 return False
212 return False
213 value = r1params[key][0]
213 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
218 if r1json != r2json:
219 return False
219 return False
220 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
221 return False
221 return False
222 return True
222 return True
223
223
224 def sanitiserequest(request):
224 def sanitiserequest(request):
225 request.body = re.sub(
225 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
227 )
228 return request
228 return request
229
229
230 def sanitiseresponse(response):
230 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
233 return response
233 return response
234
234
235 def decorate(fn):
235 def decorate(fn):
236 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
238 if cassette:
239 import hgdemandimport
239 import hgdemandimport
240
240
241 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
242 import vcr as vcrmod
242 import vcr as vcrmod
243 import vcr.stubs as stubs
243 import vcr.stubs as stubs
244
244
245 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
246 serializer='json',
246 serializer='json',
247 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
249 custom_patches=[
249 custom_patches=[
250 (
250 (
251 urlmod,
251 urlmod,
252 'httpconnection',
252 'httpconnection',
253 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
254 ),
254 ),
255 (
255 (
256 urlmod,
256 urlmod,
257 'httpsconnection',
257 'httpsconnection',
258 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
259 ),
259 ),
260 ],
260 ],
261 )
261 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
266
266
267 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
270
270
271 return command(
271 return command(
272 name,
272 name,
273 fullflags,
273 fullflags,
274 spec,
274 spec,
275 helpcategory=helpcategory,
275 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
277 )(cmd)
277 )(cmd)
278
278
279 return decorate
279 return decorate
280
280
281
281
282 def urlencodenested(params):
282 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
284
284
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
288 """
289 flatparams = util.sortdict()
289 flatparams = util.sortdict()
290
290
291 def process(prefix, obj):
291 def process(prefix, obj):
292 if isinstance(obj, bool):
292 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
296 if items is None:
297 flatparams[prefix] = obj
297 flatparams[prefix] = obj
298 else:
298 else:
299 for k, v in items(obj):
299 for k, v in items(obj):
300 if prefix:
300 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
302 else:
302 else:
303 process(k, v)
303 process(k, v)
304
304
305 process(b'', params)
305 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
307
307
308
308
309 def readurltoken(ui):
309 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
311
311
312 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
314 """
314 """
315 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
316 if not url:
316 if not url:
317 raise error.Abort(
317 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
319 )
320
320
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
322 token = None
323
323
324 if res:
324 if res:
325 group, auth = res
325 group, auth = res
326
326
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
328
329 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
330
330
331 if not token:
331 if not token:
332 raise error.Abort(
332 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
334 )
335
335
336 return url, token
336 return url, token
337
337
338
338
339 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
344 params = params.copy()
345 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
346 b'token': token,
346 b'token': token,
347 }
347 }
348 rawdata = {
348 rawdata = {
349 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
350 b'output': b'json',
350 b'output': b'json',
351 b'__conduit__': 1,
351 b'__conduit__': 1,
352 }
352 }
353 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
355 if curlcmd:
356 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
358 )
359 sin.write(data)
359 sin.write(data)
360 sin.close()
360 sin.close()
361 body = sout.read()
361 body = sout.read()
362 else:
362 else:
363 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
366 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
371 else x,
371 else x,
372 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
374 )
375 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
377 parsed[b'error_code'],
378 parsed[b'error_info'],
378 parsed[b'error_info'],
379 )
379 )
380 raise error.Abort(msg)
380 raise error.Abort(msg)
381 return parsed[b'result']
381 return parsed[b'result']
382
382
383
383
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
386 """call Conduit API
386 """call Conduit API
387
387
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
389 to stdout as a JSON blob.
390 """
390 """
391 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
394 params = pycompat.rapply(
394 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
397 else x,
397 else x,
398 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
399 )
399 )
400 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
401 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
403 callconduit(ui, name, params),
404 )
404 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
407
407
408
408
409 def getrepophid(repo):
409 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
413 if repophid:
414 return repophid
414 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
416 if not callsign:
417 return None
417 return None
418 query = callconduit(
418 query = callconduit(
419 repo.ui,
419 repo.ui,
420 b'diffusion.repository.search',
420 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
422 )
422 )
423 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
424 return None
424 return None
425 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
427 return repophid
428
428
429
429
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
433 )
434
434
435
435
436 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
438
438
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
442 be ``None``.
443
443
444 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
445 association information.
445 association information.
446
446
447 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
450
450
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
453 """
453 """
454 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
456
456
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 for node in nodelist:
459 for node in nodelist:
460 ctx = unfi[node]
460 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
463 for n in precnodes:
464 if has_node(n):
464 if has_node(n):
465 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
467 if m:
467 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
469 break
470 else:
470 else:
471 continue # move to next predecessor
471 continue # move to next predecessor
472 break # found a tag, stop
472 break # found a tag, stop
473 else:
473 else:
474 # Check commit message
474 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
476 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
478
479 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
481 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
483 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
485 )
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486
487 def getnodes(d, precset):
488 # Ignore other nodes that were combined into the Differential
489 # that aren't predecessors of the current local node.
490 return [n for n in getlocalcommits(d) if n in precset]
491
487 for newnode, (force, precset, drev) in toconfirm.items():
492 for newnode, (force, precset, drev) in toconfirm.items():
488 diffs = [
493 diffs = [
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 ]
495 ]
491
496
492 # "precursors" as known by Phabricator
497 # local predecessors known by Phabricator
493 phprecset = {getnode(d) for d in diffs}
498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
494
499
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
500 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 # and force is not set (when commit message says nothing)
501 # and force is not set (when commit message says nothing)
497 if not force and not bool(phprecset & precset):
502 if not force and not phprecset:
498 tagname = b'D%d' % drev
503 tagname = b'D%d' % drev
499 tags.tag(
504 tags.tag(
500 repo,
505 repo,
501 tagname,
506 tagname,
502 nullid,
507 nullid,
503 message=None,
508 message=None,
504 user=None,
509 user=None,
505 date=None,
510 date=None,
506 local=True,
511 local=True,
507 )
512 )
508 unfi.ui.warn(
513 unfi.ui.warn(
509 _(
514 _(
510 b'D%d: local tag removed - does not match '
515 b'D%d: local tag removed - does not match '
511 b'Differential history\n'
516 b'Differential history\n'
512 )
517 )
513 % drev
518 % drev
514 )
519 )
515 continue
520 continue
516
521
517 # Find the last node using Phabricator metadata, and make sure it
522 # Find the last node using Phabricator metadata, and make sure it
518 # exists in the repo
523 # exists in the repo
519 oldnode = lastdiff = None
524 oldnode = lastdiff = None
520 if diffs:
525 if diffs:
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 oldnode = getnode(lastdiff)
527 oldnodes = getnodes(lastdiff, precset)
528
529 # If this commit was the result of `hg fold` after submission,
530 # and now resubmitted with --fold, the easiest thing to do is
531 # to leave the node clear. This only results in creating a new
532 # diff for the _same_ Differential Revision if this commit is
533 # the first or last in the selected range.
534 # If this commit is the result of `hg split` in the same
535 # scenario, there is a single oldnode here (and multiple
536 # newnodes mapped to it). That makes it the same as the normal
537 # case, as the edges of the newnode range cleanly maps to one
538 # oldnode each.
539 if len(oldnodes) == 1:
540 oldnode = oldnodes[0]
523 if oldnode and not has_node(oldnode):
541 if oldnode and not has_node(oldnode):
524 oldnode = None
542 oldnode = None
525
543
526 result[newnode] = (oldnode, lastdiff, drev)
544 result[newnode] = (oldnode, lastdiff, drev)
527
545
528 return result
546 return result
529
547
530
548
531 def getdrevmap(repo, revs):
549 def getdrevmap(repo, revs):
532 """Return a dict mapping each rev in `revs` to their Differential Revision
550 """Return a dict mapping each rev in `revs` to their Differential Revision
533 ID or None.
551 ID or None.
534 """
552 """
535 result = {}
553 result = {}
536 for rev in revs:
554 for rev in revs:
537 result[rev] = None
555 result[rev] = None
538 ctx = repo[rev]
556 ctx = repo[rev]
539 # Check commit message
557 # Check commit message
540 m = _differentialrevisiondescre.search(ctx.description())
558 m = _differentialrevisiondescre.search(ctx.description())
541 if m:
559 if m:
542 result[rev] = int(m.group('id'))
560 result[rev] = int(m.group('id'))
543 continue
561 continue
544 # Check tags
562 # Check tags
545 for tag in repo.nodetags(ctx.node()):
563 for tag in repo.nodetags(ctx.node()):
546 m = _differentialrevisiontagre.match(tag)
564 m = _differentialrevisiontagre.match(tag)
547 if m:
565 if m:
548 result[rev] = int(m.group(1))
566 result[rev] = int(m.group(1))
549 break
567 break
550
568
551 return result
569 return result
552
570
553
571
554 def getdiff(basectx, ctx, diffopts):
572 def getdiff(basectx, ctx, diffopts):
555 """plain-text diff without header (user, commit message, etc)"""
573 """plain-text diff without header (user, commit message, etc)"""
556 output = util.stringio()
574 output = util.stringio()
557 for chunk, _label in patch.diffui(
575 for chunk, _label in patch.diffui(
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 ):
577 ):
560 output.write(chunk)
578 output.write(chunk)
561 return output.getvalue()
579 return output.getvalue()
562
580
563
581
564 class DiffChangeType(object):
582 class DiffChangeType(object):
565 ADD = 1
583 ADD = 1
566 CHANGE = 2
584 CHANGE = 2
567 DELETE = 3
585 DELETE = 3
568 MOVE_AWAY = 4
586 MOVE_AWAY = 4
569 COPY_AWAY = 5
587 COPY_AWAY = 5
570 MOVE_HERE = 6
588 MOVE_HERE = 6
571 COPY_HERE = 7
589 COPY_HERE = 7
572 MULTICOPY = 8
590 MULTICOPY = 8
573
591
574
592
575 class DiffFileType(object):
593 class DiffFileType(object):
576 TEXT = 1
594 TEXT = 1
577 IMAGE = 2
595 IMAGE = 2
578 BINARY = 3
596 BINARY = 3
579
597
580
598
581 @attr.s
599 @attr.s
582 class phabhunk(dict):
600 class phabhunk(dict):
583 """Represents a Differential hunk, which is owned by a Differential change
601 """Represents a Differential hunk, which is owned by a Differential change
584 """
602 """
585
603
586 oldOffset = attr.ib(default=0) # camelcase-required
604 oldOffset = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
605 oldLength = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
606 newOffset = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
607 newLength = attr.ib(default=0) # camelcase-required
590 corpus = attr.ib(default='')
608 corpus = attr.ib(default='')
591 # These get added to the phabchange's equivalents
609 # These get added to the phabchange's equivalents
592 addLines = attr.ib(default=0) # camelcase-required
610 addLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
594
612
595
613
596 @attr.s
614 @attr.s
597 class phabchange(object):
615 class phabchange(object):
598 """Represents a Differential change, owns Differential hunks and owned by a
616 """Represents a Differential change, owns Differential hunks and owned by a
599 Differential diff. Each one represents one file in a diff.
617 Differential diff. Each one represents one file in a diff.
600 """
618 """
601
619
602 currentPath = attr.ib(default=None) # camelcase-required
620 currentPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
621 oldPath = attr.ib(default=None) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 metadata = attr.ib(default=attr.Factory(dict))
623 metadata = attr.ib(default=attr.Factory(dict))
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 type = attr.ib(default=DiffChangeType.CHANGE)
626 type = attr.ib(default=DiffChangeType.CHANGE)
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
628 commitHash = attr.ib(default=None) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
629 addLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
630 delLines = attr.ib(default=0) # camelcase-required
613 hunks = attr.ib(default=attr.Factory(list))
631 hunks = attr.ib(default=attr.Factory(list))
614
632
615 def copynewmetadatatoold(self):
633 def copynewmetadatatoold(self):
616 for key in list(self.metadata.keys()):
634 for key in list(self.metadata.keys()):
617 newkey = key.replace(b'new:', b'old:')
635 newkey = key.replace(b'new:', b'old:')
618 self.metadata[newkey] = self.metadata[key]
636 self.metadata[newkey] = self.metadata[key]
619
637
620 def addoldmode(self, value):
638 def addoldmode(self, value):
621 self.oldProperties[b'unix:filemode'] = value
639 self.oldProperties[b'unix:filemode'] = value
622
640
623 def addnewmode(self, value):
641 def addnewmode(self, value):
624 self.newProperties[b'unix:filemode'] = value
642 self.newProperties[b'unix:filemode'] = value
625
643
626 def addhunk(self, hunk):
644 def addhunk(self, hunk):
627 if not isinstance(hunk, phabhunk):
645 if not isinstance(hunk, phabhunk):
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 # It's useful to include these stats since the Phab web UI shows them,
648 # It's useful to include these stats since the Phab web UI shows them,
631 # and uses them to estimate how large a change a Revision is. Also used
649 # and uses them to estimate how large a change a Revision is. Also used
632 # in email subjects for the [+++--] bit.
650 # in email subjects for the [+++--] bit.
633 self.addLines += hunk.addLines
651 self.addLines += hunk.addLines
634 self.delLines += hunk.delLines
652 self.delLines += hunk.delLines
635
653
636
654
637 @attr.s
655 @attr.s
638 class phabdiff(object):
656 class phabdiff(object):
639 """Represents a Differential diff, owns Differential changes. Corresponds
657 """Represents a Differential diff, owns Differential changes. Corresponds
640 to a commit.
658 to a commit.
641 """
659 """
642
660
643 # Doesn't seem to be any reason to send this (output of uname -n)
661 # Doesn't seem to be any reason to send this (output of uname -n)
644 sourceMachine = attr.ib(default=b'') # camelcase-required
662 sourceMachine = attr.ib(default=b'') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
663 sourcePath = attr.ib(default=b'/') # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 branch = attr.ib(default=b'default')
667 branch = attr.ib(default=b'default')
650 bookmark = attr.ib(default=None)
668 bookmark = attr.ib(default=None)
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
670 lintStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
671 unitStatus = attr.ib(default=b'none') # camelcase-required
654 changes = attr.ib(default=attr.Factory(dict))
672 changes = attr.ib(default=attr.Factory(dict))
655 repositoryPHID = attr.ib(default=None) # camelcase-required
673 repositoryPHID = attr.ib(default=None) # camelcase-required
656
674
657 def addchange(self, change):
675 def addchange(self, change):
658 if not isinstance(change, phabchange):
676 if not isinstance(change, phabchange):
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 self.changes[change.currentPath] = pycompat.byteskwargs(
678 self.changes[change.currentPath] = pycompat.byteskwargs(
661 attr.asdict(change)
679 attr.asdict(change)
662 )
680 )
663
681
664
682
665 def maketext(pchange, basectx, ctx, fname):
683 def maketext(pchange, basectx, ctx, fname):
666 """populate the phabchange for a text file"""
684 """populate the phabchange for a text file"""
667 repo = ctx.repo()
685 repo = ctx.repo()
668 fmatcher = match.exact([fname])
686 fmatcher = match.exact([fname])
669 diffopts = mdiff.diffopts(git=True, context=32767)
687 diffopts = mdiff.diffopts(git=True, context=32767)
670 _pfctx, _fctx, header, fhunks = next(
688 _pfctx, _fctx, header, fhunks = next(
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 )
690 )
673
691
674 for fhunk in fhunks:
692 for fhunk in fhunks:
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 corpus = b''.join(lines[1:])
694 corpus = b''.join(lines[1:])
677 shunk = list(header)
695 shunk = list(header)
678 shunk.extend(lines)
696 shunk.extend(lines)
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 patch.diffstatdata(util.iterlines(shunk))
698 patch.diffstatdata(util.iterlines(shunk))
681 )
699 )
682 pchange.addhunk(
700 pchange.addhunk(
683 phabhunk(
701 phabhunk(
684 oldOffset,
702 oldOffset,
685 oldLength,
703 oldLength,
686 newOffset,
704 newOffset,
687 newLength,
705 newLength,
688 corpus,
706 corpus,
689 addLines,
707 addLines,
690 delLines,
708 delLines,
691 )
709 )
692 )
710 )
693
711
694
712
695 def uploadchunks(fctx, fphid):
713 def uploadchunks(fctx, fphid):
696 """upload large binary files as separate chunks.
714 """upload large binary files as separate chunks.
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 """
716 """
699 ui = fctx.repo().ui
717 ui = fctx.repo().ui
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 with ui.makeprogress(
719 with ui.makeprogress(
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 ) as progress:
721 ) as progress:
704 for chunk in chunks:
722 for chunk in chunks:
705 progress.increment()
723 progress.increment()
706 if chunk[b'complete']:
724 if chunk[b'complete']:
707 continue
725 continue
708 bstart = int(chunk[b'byteStart'])
726 bstart = int(chunk[b'byteStart'])
709 bend = int(chunk[b'byteEnd'])
727 bend = int(chunk[b'byteEnd'])
710 callconduit(
728 callconduit(
711 ui,
729 ui,
712 b'file.uploadchunk',
730 b'file.uploadchunk',
713 {
731 {
714 b'filePHID': fphid,
732 b'filePHID': fphid,
715 b'byteStart': bstart,
733 b'byteStart': bstart,
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 b'dataEncoding': b'base64',
735 b'dataEncoding': b'base64',
718 },
736 },
719 )
737 )
720
738
721
739
722 def uploadfile(fctx):
740 def uploadfile(fctx):
723 """upload binary files to Phabricator"""
741 """upload binary files to Phabricator"""
724 repo = fctx.repo()
742 repo = fctx.repo()
725 ui = repo.ui
743 ui = repo.ui
726 fname = fctx.path()
744 fname = fctx.path()
727 size = fctx.size()
745 size = fctx.size()
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729
747
730 # an allocate call is required first to see if an upload is even required
748 # an allocate call is required first to see if an upload is even required
731 # (Phab might already have it) and to determine if chunking is needed
749 # (Phab might already have it) and to determine if chunking is needed
732 allocateparams = {
750 allocateparams = {
733 b'name': fname,
751 b'name': fname,
734 b'contentLength': size,
752 b'contentLength': size,
735 b'contentHash': fhash,
753 b'contentHash': fhash,
736 }
754 }
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 fphid = filealloc[b'filePHID']
756 fphid = filealloc[b'filePHID']
739
757
740 if filealloc[b'upload']:
758 if filealloc[b'upload']:
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
759 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 if not fphid:
760 if not fphid:
743 uploadparams = {
761 uploadparams = {
744 b'name': fname,
762 b'name': fname,
745 b'data_base64': base64.b64encode(fctx.data()),
763 b'data_base64': base64.b64encode(fctx.data()),
746 }
764 }
747 fphid = callconduit(ui, b'file.upload', uploadparams)
765 fphid = callconduit(ui, b'file.upload', uploadparams)
748 else:
766 else:
749 uploadchunks(fctx, fphid)
767 uploadchunks(fctx, fphid)
750 else:
768 else:
751 ui.debug(b'server already has %s\n' % bytes(fctx))
769 ui.debug(b'server already has %s\n' % bytes(fctx))
752
770
753 if not fphid:
771 if not fphid:
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755
773
756 return fphid
774 return fphid
757
775
758
776
759 def addoldbinary(pchange, oldfctx, fctx):
777 def addoldbinary(pchange, oldfctx, fctx):
760 """add the metadata for the previous version of a binary file to the
778 """add the metadata for the previous version of a binary file to the
761 phabchange for the new version
779 phabchange for the new version
762
780
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 version of the file, or None if the file is being removed.
782 version of the file, or None if the file is being removed.
765 """
783 """
766 if not fctx or fctx.cmp(oldfctx):
784 if not fctx or fctx.cmp(oldfctx):
767 # Files differ, add the old one
785 # Files differ, add the old one
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
786 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 mimeguess, _enc = mimetypes.guess_type(
787 mimeguess, _enc = mimetypes.guess_type(
770 encoding.unifromlocal(oldfctx.path())
788 encoding.unifromlocal(oldfctx.path())
771 )
789 )
772 if mimeguess:
790 if mimeguess:
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 mimeguess
792 mimeguess
775 )
793 )
776 fphid = uploadfile(oldfctx)
794 fphid = uploadfile(oldfctx)
777 pchange.metadata[b'old:binary-phid'] = fphid
795 pchange.metadata[b'old:binary-phid'] = fphid
778 else:
796 else:
779 # If it's left as IMAGE/BINARY web UI might try to display it
797 # If it's left as IMAGE/BINARY web UI might try to display it
780 pchange.fileType = DiffFileType.TEXT
798 pchange.fileType = DiffFileType.TEXT
781 pchange.copynewmetadatatoold()
799 pchange.copynewmetadatatoold()
782
800
783
801
784 def makebinary(pchange, fctx):
802 def makebinary(pchange, fctx):
785 """populate the phabchange for a binary file"""
803 """populate the phabchange for a binary file"""
786 pchange.fileType = DiffFileType.BINARY
804 pchange.fileType = DiffFileType.BINARY
787 fphid = uploadfile(fctx)
805 fphid = uploadfile(fctx)
788 pchange.metadata[b'new:binary-phid'] = fphid
806 pchange.metadata[b'new:binary-phid'] = fphid
789 pchange.metadata[b'new:file:size'] = fctx.size()
807 pchange.metadata[b'new:file:size'] = fctx.size()
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 if mimeguess:
809 if mimeguess:
792 mimeguess = pycompat.bytestr(mimeguess)
810 mimeguess = pycompat.bytestr(mimeguess)
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
811 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 if mimeguess.startswith(b'image/'):
812 if mimeguess.startswith(b'image/'):
795 pchange.fileType = DiffFileType.IMAGE
813 pchange.fileType = DiffFileType.IMAGE
796
814
797
815
798 # Copied from mercurial/patch.py
816 # Copied from mercurial/patch.py
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800
818
801
819
802 def notutf8(fctx):
820 def notutf8(fctx):
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
821 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 as binary
822 as binary
805 """
823 """
806 try:
824 try:
807 fctx.data().decode('utf-8')
825 fctx.data().decode('utf-8')
808 return False
826 return False
809 except UnicodeDecodeError:
827 except UnicodeDecodeError:
810 fctx.repo().ui.write(
828 fctx.repo().ui.write(
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
829 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 % fctx.path()
830 % fctx.path()
813 )
831 )
814 return True
832 return True
815
833
816
834
817 def addremoved(pdiff, basectx, ctx, removed):
835 def addremoved(pdiff, basectx, ctx, removed):
818 """add removed files to the phabdiff. Shouldn't include moves"""
836 """add removed files to the phabdiff. Shouldn't include moves"""
819 for fname in removed:
837 for fname in removed:
820 pchange = phabchange(
838 pchange = phabchange(
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 )
840 )
823 oldfctx = basectx.p1()[fname]
841 oldfctx = basectx.p1()[fname]
824 pchange.addoldmode(gitmode[oldfctx.flags()])
842 pchange.addoldmode(gitmode[oldfctx.flags()])
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 maketext(pchange, basectx, ctx, fname)
844 maketext(pchange, basectx, ctx, fname)
827
845
828 pdiff.addchange(pchange)
846 pdiff.addchange(pchange)
829
847
830
848
831 def addmodified(pdiff, basectx, ctx, modified):
849 def addmodified(pdiff, basectx, ctx, modified):
832 """add modified files to the phabdiff"""
850 """add modified files to the phabdiff"""
833 for fname in modified:
851 for fname in modified:
834 fctx = ctx[fname]
852 fctx = ctx[fname]
835 oldfctx = basectx.p1()[fname]
853 oldfctx = basectx.p1()[fname]
836 pchange = phabchange(currentPath=fname, oldPath=fname)
854 pchange = phabchange(currentPath=fname, oldPath=fname)
837 filemode = gitmode[fctx.flags()]
855 filemode = gitmode[fctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
856 originalmode = gitmode[oldfctx.flags()]
839 if filemode != originalmode:
857 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
858 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
859 pchange.addnewmode(filemode)
842
860
843 if (
861 if (
844 fctx.isbinary()
862 fctx.isbinary()
845 or notutf8(fctx)
863 or notutf8(fctx)
846 or oldfctx.isbinary()
864 or oldfctx.isbinary()
847 or notutf8(oldfctx)
865 or notutf8(oldfctx)
848 ):
866 ):
849 makebinary(pchange, fctx)
867 makebinary(pchange, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
868 addoldbinary(pchange, oldfctx, fctx)
851 else:
869 else:
852 maketext(pchange, basectx, ctx, fname)
870 maketext(pchange, basectx, ctx, fname)
853
871
854 pdiff.addchange(pchange)
872 pdiff.addchange(pchange)
855
873
856
874
857 def addadded(pdiff, basectx, ctx, added, removed):
875 def addadded(pdiff, basectx, ctx, added, removed):
858 """add file adds to the phabdiff, both new files and copies/moves"""
876 """add file adds to the phabdiff, both new files and copies/moves"""
859 # Keep track of files that've been recorded as moved/copied, so if there are
877 # Keep track of files that've been recorded as moved/copied, so if there are
860 # additional copies we can mark them (moves get removed from removed)
878 # additional copies we can mark them (moves get removed from removed)
861 copiedchanges = {}
879 copiedchanges = {}
862 movedchanges = {}
880 movedchanges = {}
863
881
864 copy = {}
882 copy = {}
865 if basectx != ctx:
883 if basectx != ctx:
866 copy = copies.pathcopies(basectx.p1(), ctx)
884 copy = copies.pathcopies(basectx.p1(), ctx)
867
885
868 for fname in added:
886 for fname in added:
869 fctx = ctx[fname]
887 fctx = ctx[fname]
870 oldfctx = None
888 oldfctx = None
871 pchange = phabchange(currentPath=fname)
889 pchange = phabchange(currentPath=fname)
872
890
873 filemode = gitmode[fctx.flags()]
891 filemode = gitmode[fctx.flags()]
874
892
875 if copy:
893 if copy:
876 originalfname = copy.get(fname, fname)
894 originalfname = copy.get(fname, fname)
877 else:
895 else:
878 originalfname = fname
896 originalfname = fname
879 if fctx.renamed():
897 if fctx.renamed():
880 originalfname = fctx.renamed()[0]
898 originalfname = fctx.renamed()[0]
881
899
882 renamed = fname != originalfname
900 renamed = fname != originalfname
883
901
884 if renamed:
902 if renamed:
885 oldfctx = basectx.p1()[originalfname]
903 oldfctx = basectx.p1()[originalfname]
886 originalmode = gitmode[oldfctx.flags()]
904 originalmode = gitmode[oldfctx.flags()]
887 pchange.oldPath = originalfname
905 pchange.oldPath = originalfname
888
906
889 if originalfname in removed:
907 if originalfname in removed:
890 origpchange = phabchange(
908 origpchange = phabchange(
891 currentPath=originalfname,
909 currentPath=originalfname,
892 oldPath=originalfname,
910 oldPath=originalfname,
893 type=DiffChangeType.MOVE_AWAY,
911 type=DiffChangeType.MOVE_AWAY,
894 awayPaths=[fname],
912 awayPaths=[fname],
895 )
913 )
896 movedchanges[originalfname] = origpchange
914 movedchanges[originalfname] = origpchange
897 removed.remove(originalfname)
915 removed.remove(originalfname)
898 pchange.type = DiffChangeType.MOVE_HERE
916 pchange.type = DiffChangeType.MOVE_HERE
899 elif originalfname in movedchanges:
917 elif originalfname in movedchanges:
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 movedchanges[originalfname].awayPaths.append(fname)
919 movedchanges[originalfname].awayPaths.append(fname)
902 pchange.type = DiffChangeType.COPY_HERE
920 pchange.type = DiffChangeType.COPY_HERE
903 else: # pure copy
921 else: # pure copy
904 if originalfname not in copiedchanges:
922 if originalfname not in copiedchanges:
905 origpchange = phabchange(
923 origpchange = phabchange(
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 )
925 )
908 copiedchanges[originalfname] = origpchange
926 copiedchanges[originalfname] = origpchange
909 else:
927 else:
910 origpchange = copiedchanges[originalfname]
928 origpchange = copiedchanges[originalfname]
911 origpchange.awayPaths.append(fname)
929 origpchange.awayPaths.append(fname)
912 pchange.type = DiffChangeType.COPY_HERE
930 pchange.type = DiffChangeType.COPY_HERE
913
931
914 if filemode != originalmode:
932 if filemode != originalmode:
915 pchange.addoldmode(originalmode)
933 pchange.addoldmode(originalmode)
916 pchange.addnewmode(filemode)
934 pchange.addnewmode(filemode)
917 else: # Brand-new file
935 else: # Brand-new file
918 pchange.addnewmode(gitmode[fctx.flags()])
936 pchange.addnewmode(gitmode[fctx.flags()])
919 pchange.type = DiffChangeType.ADD
937 pchange.type = DiffChangeType.ADD
920
938
921 if (
939 if (
922 fctx.isbinary()
940 fctx.isbinary()
923 or notutf8(fctx)
941 or notutf8(fctx)
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 ):
943 ):
926 makebinary(pchange, fctx)
944 makebinary(pchange, fctx)
927 if renamed:
945 if renamed:
928 addoldbinary(pchange, oldfctx, fctx)
946 addoldbinary(pchange, oldfctx, fctx)
929 else:
947 else:
930 maketext(pchange, basectx, ctx, fname)
948 maketext(pchange, basectx, ctx, fname)
931
949
932 pdiff.addchange(pchange)
950 pdiff.addchange(pchange)
933
951
934 for _path, copiedchange in copiedchanges.items():
952 for _path, copiedchange in copiedchanges.items():
935 pdiff.addchange(copiedchange)
953 pdiff.addchange(copiedchange)
936 for _path, movedchange in movedchanges.items():
954 for _path, movedchange in movedchanges.items():
937 pdiff.addchange(movedchange)
955 pdiff.addchange(movedchange)
938
956
939
957
940 def creatediff(basectx, ctx):
958 def creatediff(basectx, ctx):
941 """create a Differential Diff"""
959 """create a Differential Diff"""
942 repo = ctx.repo()
960 repo = ctx.repo()
943 repophid = getrepophid(repo)
961 repophid = getrepophid(repo)
944 # Create a "Differential Diff" via "differential.creatediff" API
962 # Create a "Differential Diff" via "differential.creatediff" API
945 pdiff = phabdiff(
963 pdiff = phabdiff(
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 branch=b'%s' % ctx.branch(),
965 branch=b'%s' % ctx.branch(),
948 )
966 )
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 # addadded will remove moved files from removed, so addremoved won't get
968 # addadded will remove moved files from removed, so addremoved won't get
951 # them
969 # them
952 addadded(pdiff, basectx, ctx, added, removed)
970 addadded(pdiff, basectx, ctx, added, removed)
953 addmodified(pdiff, basectx, ctx, modified)
971 addmodified(pdiff, basectx, ctx, modified)
954 addremoved(pdiff, basectx, ctx, removed)
972 addremoved(pdiff, basectx, ctx, removed)
955 if repophid:
973 if repophid:
956 pdiff.repositoryPHID = repophid
974 pdiff.repositoryPHID = repophid
957 diff = callconduit(
975 diff = callconduit(
958 repo.ui,
976 repo.ui,
959 b'differential.creatediff',
977 b'differential.creatediff',
960 pycompat.byteskwargs(attr.asdict(pdiff)),
978 pycompat.byteskwargs(attr.asdict(pdiff)),
961 )
979 )
962 if not diff:
980 if not diff:
963 if basectx != ctx:
981 if basectx != ctx:
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 else:
983 else:
966 msg = _(b'cannot create diff for %s') % ctx
984 msg = _(b'cannot create diff for %s') % ctx
967 raise error.Abort(msg)
985 raise error.Abort(msg)
968 return diff
986 return diff
969
987
970
988
971 def writediffproperties(ctxs, diff):
989 def writediffproperties(ctxs, diff):
972 """write metadata to diff so patches could be applied losslessly
990 """write metadata to diff so patches could be applied losslessly
973
991
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
992 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 The list is generally a single commit, but may be several when using
993 The list is generally a single commit, but may be several when using
976 ``phabsend --fold``.
994 ``phabsend --fold``.
977 """
995 """
978 # creatediff returns with a diffid but query returns with an id
996 # creatediff returns with a diffid but query returns with an id
979 diffid = diff.get(b'diffid', diff.get(b'id'))
997 diffid = diff.get(b'diffid', diff.get(b'id'))
980 basectx = ctxs[0]
998 basectx = ctxs[0]
981 tipctx = ctxs[-1]
999 tipctx = ctxs[-1]
982
1000
983 params = {
1001 params = {
984 b'diff_id': diffid,
1002 b'diff_id': diffid,
985 b'name': b'hg:meta',
1003 b'name': b'hg:meta',
986 b'data': templatefilters.json(
1004 b'data': templatefilters.json(
987 {
1005 {
988 b'user': tipctx.user(),
1006 b'user': tipctx.user(),
989 b'date': b'%d %d' % tipctx.date(),
1007 b'date': b'%d %d' % tipctx.date(),
990 b'branch': tipctx.branch(),
1008 b'branch': tipctx.branch(),
991 b'node': tipctx.hex(),
1009 b'node': tipctx.hex(),
992 b'parent': basectx.p1().hex(),
1010 b'parent': basectx.p1().hex(),
993 }
1011 }
994 ),
1012 ),
995 }
1013 }
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997
1015
998 commits = {}
1016 commits = {}
999 for ctx in ctxs:
1017 for ctx in ctxs:
1000 commits[ctx.hex()] = {
1018 commits[ctx.hex()] = {
1001 b'author': stringutil.person(ctx.user()),
1019 b'author': stringutil.person(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1020 b'authorEmail': stringutil.email(ctx.user()),
1003 b'time': int(ctx.date()[0]),
1021 b'time': int(ctx.date()[0]),
1004 b'commit': ctx.hex(),
1022 b'commit': ctx.hex(),
1005 b'parents': [ctx.p1().hex()],
1023 b'parents': [ctx.p1().hex()],
1006 b'branch': ctx.branch(),
1024 b'branch': ctx.branch(),
1007 }
1025 }
1008 params = {
1026 params = {
1009 b'diff_id': diffid,
1027 b'diff_id': diffid,
1010 b'name': b'local:commits',
1028 b'name': b'local:commits',
1011 b'data': templatefilters.json(commits),
1029 b'data': templatefilters.json(commits),
1012 }
1030 }
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014
1032
1015
1033
1016 def createdifferentialrevision(
1034 def createdifferentialrevision(
1017 ctxs,
1035 ctxs,
1018 revid=None,
1036 revid=None,
1019 parentrevphid=None,
1037 parentrevphid=None,
1020 oldbasenode=None,
1038 oldbasenode=None,
1021 oldnode=None,
1039 oldnode=None,
1022 olddiff=None,
1040 olddiff=None,
1023 actions=None,
1041 actions=None,
1024 comment=None,
1042 comment=None,
1025 ):
1043 ):
1026 """create or update a Differential Revision
1044 """create or update a Differential Revision
1027
1045
1028 If revid is None, create a new Differential Revision, otherwise update
1046 If revid is None, create a new Differential Revision, otherwise update
1029 revid. If parentrevphid is not None, set it as a dependency.
1047 revid. If parentrevphid is not None, set it as a dependency.
1030
1048
1031 If there is a single commit for the new Differential Revision, ``ctxs`` will
1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1032 be a list of that single context. Otherwise, it is a list that covers the
1050 be a list of that single context. Otherwise, it is a list that covers the
1033 range of changes for the differential, where ``ctxs[0]`` is the first change
1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1034 to include and ``ctxs[-1]`` is the last.
1052 to include and ``ctxs[-1]`` is the last.
1035
1053
1036 If oldnode is not None, check if the patch content (without commit message
1054 If oldnode is not None, check if the patch content (without commit message
1037 and metadata) has changed before creating another diff. For a Revision with
1055 and metadata) has changed before creating another diff. For a Revision with
1038 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1039 Revision covering multiple commits, ``oldbasenode`` corresponds to
1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1040 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1041 corresponds to ``ctxs[-1]``.
1059 corresponds to ``ctxs[-1]``.
1042
1060
1043 If actions is not None, they will be appended to the transaction.
1061 If actions is not None, they will be appended to the transaction.
1044 """
1062 """
1045 ctx = ctxs[-1]
1063 ctx = ctxs[-1]
1046 basectx = ctxs[0]
1064 basectx = ctxs[0]
1047
1065
1048 repo = ctx.repo()
1066 repo = ctx.repo()
1049 if oldnode:
1067 if oldnode:
1050 diffopts = mdiff.diffopts(git=True, context=32767)
1068 diffopts = mdiff.diffopts(git=True, context=32767)
1051 unfi = repo.unfiltered()
1069 unfi = repo.unfiltered()
1052 oldctx = unfi[oldnode]
1070 oldctx = unfi[oldnode]
1053 oldbasectx = unfi[oldbasenode]
1071 oldbasectx = unfi[oldbasenode]
1054 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1055 oldbasectx, oldctx, diffopts
1073 oldbasectx, oldctx, diffopts
1056 )
1074 )
1057 else:
1075 else:
1058 neednewdiff = True
1076 neednewdiff = True
1059
1077
1060 transactions = []
1078 transactions = []
1061 if neednewdiff:
1079 if neednewdiff:
1062 diff = creatediff(basectx, ctx)
1080 diff = creatediff(basectx, ctx)
1063 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1064 if comment:
1082 if comment:
1065 transactions.append({b'type': b'comment', b'value': comment})
1083 transactions.append({b'type': b'comment', b'value': comment})
1066 else:
1084 else:
1067 # Even if we don't need to upload a new diff because the patch content
1085 # Even if we don't need to upload a new diff because the patch content
1068 # does not change. We might still need to update its metadata so
1086 # does not change. We might still need to update its metadata so
1069 # pushers could know the correct node metadata.
1087 # pushers could know the correct node metadata.
1070 assert olddiff
1088 assert olddiff
1071 diff = olddiff
1089 diff = olddiff
1072 writediffproperties(ctxs, diff)
1090 writediffproperties(ctxs, diff)
1073
1091
1074 # Set the parent Revision every time, so commit re-ordering is picked-up
1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1075 if parentrevphid:
1093 if parentrevphid:
1076 transactions.append(
1094 transactions.append(
1077 {b'type': b'parents.set', b'value': [parentrevphid]}
1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1078 )
1096 )
1079
1097
1080 if actions:
1098 if actions:
1081 transactions += actions
1099 transactions += actions
1082
1100
1083 # When folding multiple local commits into a single review, arcanist will
1101 # When folding multiple local commits into a single review, arcanist will
1084 # take the summary line of the first commit as the title, and then
1102 # take the summary line of the first commit as the title, and then
1085 # concatenate the rest of the remaining messages (including each of their
1103 # concatenate the rest of the remaining messages (including each of their
1086 # first lines) to the rest of the first commit message (each separated by
1104 # first lines) to the rest of the first commit message (each separated by
1087 # an empty line), and use that as the summary field. Do the same here.
1105 # an empty line), and use that as the summary field. Do the same here.
1088 # For commits with only a one line message, there is no summary field, as
1106 # For commits with only a one line message, there is no summary field, as
1089 # this gets assigned to the title.
1107 # this gets assigned to the title.
1090 fields = util.sortdict() # sorted for stable wire protocol in tests
1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1091
1109
1092 for i, _ctx in enumerate(ctxs):
1110 for i, _ctx in enumerate(ctxs):
1093 # Parse commit message and update related fields.
1111 # Parse commit message and update related fields.
1094 desc = _ctx.description()
1112 desc = _ctx.description()
1095 info = callconduit(
1113 info = callconduit(
1096 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1097 )
1115 )
1098
1116
1099 for k in [b'title', b'summary', b'testPlan']:
1117 for k in [b'title', b'summary', b'testPlan']:
1100 v = info[b'fields'].get(k)
1118 v = info[b'fields'].get(k)
1101 if not v:
1119 if not v:
1102 continue
1120 continue
1103
1121
1104 if i == 0:
1122 if i == 0:
1105 # Title, summary and test plan (if present) are taken verbatim
1123 # Title, summary and test plan (if present) are taken verbatim
1106 # for the first commit.
1124 # for the first commit.
1107 fields[k] = v.rstrip()
1125 fields[k] = v.rstrip()
1108 continue
1126 continue
1109 elif k == b'title':
1127 elif k == b'title':
1110 # Add subsequent titles (i.e. the first line of the commit
1128 # Add subsequent titles (i.e. the first line of the commit
1111 # message) back to the summary.
1129 # message) back to the summary.
1112 k = b'summary'
1130 k = b'summary'
1113
1131
1114 # Append any current field to the existing composite field
1132 # Append any current field to the existing composite field
1115 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1116
1134
1117 for k, v in fields.items():
1135 for k, v in fields.items():
1118 transactions.append({b'type': k, b'value': v})
1136 transactions.append({b'type': k, b'value': v})
1119
1137
1120 params = {b'transactions': transactions}
1138 params = {b'transactions': transactions}
1121 if revid is not None:
1139 if revid is not None:
1122 # Update an existing Differential Revision
1140 # Update an existing Differential Revision
1123 params[b'objectIdentifier'] = revid
1141 params[b'objectIdentifier'] = revid
1124
1142
1125 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1126 if not revision:
1144 if not revision:
1127 if len(ctxs) == 1:
1145 if len(ctxs) == 1:
1128 msg = _(b'cannot create revision for %s') % ctx
1146 msg = _(b'cannot create revision for %s') % ctx
1129 else:
1147 else:
1130 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1131 raise error.Abort(msg)
1149 raise error.Abort(msg)
1132
1150
1133 return revision, diff
1151 return revision, diff
1134
1152
1135
1153
1136 def userphids(ui, names):
1154 def userphids(ui, names):
1137 """convert user names to PHIDs"""
1155 """convert user names to PHIDs"""
1138 names = [name.lower() for name in names]
1156 names = [name.lower() for name in names]
1139 query = {b'constraints': {b'usernames': names}}
1157 query = {b'constraints': {b'usernames': names}}
1140 result = callconduit(ui, b'user.search', query)
1158 result = callconduit(ui, b'user.search', query)
1141 # username not found is not an error of the API. So check if we have missed
1159 # username not found is not an error of the API. So check if we have missed
1142 # some names here.
1160 # some names here.
1143 data = result[b'data']
1161 data = result[b'data']
1144 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1145 unresolved = set(names) - resolved
1163 unresolved = set(names) - resolved
1146 if unresolved:
1164 if unresolved:
1147 raise error.Abort(
1165 raise error.Abort(
1148 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1149 )
1167 )
1150 return [entry[b'phid'] for entry in data]
1168 return [entry[b'phid'] for entry in data]
1151
1169
1152
1170
1153 @vcrcommand(
1171 @vcrcommand(
1154 b'phabsend',
1172 b'phabsend',
1155 [
1173 [
1156 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1174 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1157 (b'', b'amend', True, _(b'update commit messages')),
1175 (b'', b'amend', True, _(b'update commit messages')),
1158 (b'', b'reviewer', [], _(b'specify reviewers')),
1176 (b'', b'reviewer', [], _(b'specify reviewers')),
1159 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1177 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1160 (
1178 (
1161 b'm',
1179 b'm',
1162 b'comment',
1180 b'comment',
1163 b'',
1181 b'',
1164 _(b'add a comment to Revisions with new/updated Diffs'),
1182 _(b'add a comment to Revisions with new/updated Diffs'),
1165 ),
1183 ),
1166 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1184 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1167 ],
1185 ],
1168 _(b'REV [OPTIONS]'),
1186 _(b'REV [OPTIONS]'),
1169 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1187 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1170 )
1188 )
1171 def phabsend(ui, repo, *revs, **opts):
1189 def phabsend(ui, repo, *revs, **opts):
1172 """upload changesets to Phabricator
1190 """upload changesets to Phabricator
1173
1191
1174 If there are multiple revisions specified, they will be send as a stack
1192 If there are multiple revisions specified, they will be send as a stack
1175 with a linear dependencies relationship using the order specified by the
1193 with a linear dependencies relationship using the order specified by the
1176 revset.
1194 revset.
1177
1195
1178 For the first time uploading changesets, local tags will be created to
1196 For the first time uploading changesets, local tags will be created to
1179 maintain the association. After the first time, phabsend will check
1197 maintain the association. After the first time, phabsend will check
1180 obsstore and tags information so it can figure out whether to update an
1198 obsstore and tags information so it can figure out whether to update an
1181 existing Differential Revision, or create a new one.
1199 existing Differential Revision, or create a new one.
1182
1200
1183 If --amend is set, update commit messages so they have the
1201 If --amend is set, update commit messages so they have the
1184 ``Differential Revision`` URL, remove related tags. This is similar to what
1202 ``Differential Revision`` URL, remove related tags. This is similar to what
1185 arcanist will do, and is more desired in author-push workflows. Otherwise,
1203 arcanist will do, and is more desired in author-push workflows. Otherwise,
1186 use local tags to record the ``Differential Revision`` association.
1204 use local tags to record the ``Differential Revision`` association.
1187
1205
1188 The --confirm option lets you confirm changesets before sending them. You
1206 The --confirm option lets you confirm changesets before sending them. You
1189 can also add following to your configuration file to make it default
1207 can also add following to your configuration file to make it default
1190 behaviour::
1208 behaviour::
1191
1209
1192 [phabsend]
1210 [phabsend]
1193 confirm = true
1211 confirm = true
1194
1212
1195 phabsend will check obsstore and the above association to decide whether to
1213 phabsend will check obsstore and the above association to decide whether to
1196 update an existing Differential Revision, or create a new one.
1214 update an existing Differential Revision, or create a new one.
1197 """
1215 """
1198 opts = pycompat.byteskwargs(opts)
1216 opts = pycompat.byteskwargs(opts)
1199 revs = list(revs) + opts.get(b'rev', [])
1217 revs = list(revs) + opts.get(b'rev', [])
1200 revs = scmutil.revrange(repo, revs)
1218 revs = scmutil.revrange(repo, revs)
1201 revs.sort() # ascending order to preserve topological parent/child in phab
1219 revs.sort() # ascending order to preserve topological parent/child in phab
1202
1220
1203 if not revs:
1221 if not revs:
1204 raise error.Abort(_(b'phabsend requires at least one changeset'))
1222 raise error.Abort(_(b'phabsend requires at least one changeset'))
1205 if opts.get(b'amend'):
1223 if opts.get(b'amend'):
1206 cmdutil.checkunfinished(repo)
1224 cmdutil.checkunfinished(repo)
1207
1225
1208 # {newnode: (oldnode, olddiff, olddrev}
1226 # {newnode: (oldnode, olddiff, olddrev}
1209 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1227 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1210
1228
1211 confirm = ui.configbool(b'phabsend', b'confirm')
1229 confirm = ui.configbool(b'phabsend', b'confirm')
1212 confirm |= bool(opts.get(b'confirm'))
1230 confirm |= bool(opts.get(b'confirm'))
1213 if confirm:
1231 if confirm:
1214 confirmed = _confirmbeforesend(repo, revs, oldmap)
1232 confirmed = _confirmbeforesend(repo, revs, oldmap)
1215 if not confirmed:
1233 if not confirmed:
1216 raise error.Abort(_(b'phabsend cancelled'))
1234 raise error.Abort(_(b'phabsend cancelled'))
1217
1235
1218 actions = []
1236 actions = []
1219 reviewers = opts.get(b'reviewer', [])
1237 reviewers = opts.get(b'reviewer', [])
1220 blockers = opts.get(b'blocker', [])
1238 blockers = opts.get(b'blocker', [])
1221 phids = []
1239 phids = []
1222 if reviewers:
1240 if reviewers:
1223 phids.extend(userphids(repo.ui, reviewers))
1241 phids.extend(userphids(repo.ui, reviewers))
1224 if blockers:
1242 if blockers:
1225 phids.extend(
1243 phids.extend(
1226 map(
1244 map(
1227 lambda phid: b'blocking(%s)' % phid,
1245 lambda phid: b'blocking(%s)' % phid,
1228 userphids(repo.ui, blockers),
1246 userphids(repo.ui, blockers),
1229 )
1247 )
1230 )
1248 )
1231 if phids:
1249 if phids:
1232 actions.append({b'type': b'reviewers.add', b'value': phids})
1250 actions.append({b'type': b'reviewers.add', b'value': phids})
1233
1251
1234 drevids = [] # [int]
1252 drevids = [] # [int]
1235 diffmap = {} # {newnode: diff}
1253 diffmap = {} # {newnode: diff}
1236
1254
1237 # Send patches one by one so we know their Differential Revision PHIDs and
1255 # Send patches one by one so we know their Differential Revision PHIDs and
1238 # can provide dependency relationship
1256 # can provide dependency relationship
1239 lastrevphid = None
1257 lastrevphid = None
1240 for rev in revs:
1258 for rev in revs:
1241 ui.debug(b'sending rev %d\n' % rev)
1259 ui.debug(b'sending rev %d\n' % rev)
1242 ctx = repo[rev]
1260 ctx = repo[rev]
1243
1261
1244 # Get Differential Revision ID
1262 # Get Differential Revision ID
1245 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1263 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1246 oldbasenode = oldnode
1264 oldbasenode = oldnode
1247 if oldnode != ctx.node() or opts.get(b'amend'):
1265 if oldnode != ctx.node() or opts.get(b'amend'):
1248 # Create or update Differential Revision
1266 # Create or update Differential Revision
1249 revision, diff = createdifferentialrevision(
1267 revision, diff = createdifferentialrevision(
1250 [ctx],
1268 [ctx],
1251 revid,
1269 revid,
1252 lastrevphid,
1270 lastrevphid,
1253 oldbasenode,
1271 oldbasenode,
1254 oldnode,
1272 oldnode,
1255 olddiff,
1273 olddiff,
1256 actions,
1274 actions,
1257 opts.get(b'comment'),
1275 opts.get(b'comment'),
1258 )
1276 )
1259 diffmap[ctx.node()] = diff
1277 diffmap[ctx.node()] = diff
1260 newrevid = int(revision[b'object'][b'id'])
1278 newrevid = int(revision[b'object'][b'id'])
1261 newrevphid = revision[b'object'][b'phid']
1279 newrevphid = revision[b'object'][b'phid']
1262 if revid:
1280 if revid:
1263 action = b'updated'
1281 action = b'updated'
1264 else:
1282 else:
1265 action = b'created'
1283 action = b'created'
1266
1284
1267 # Create a local tag to note the association, if commit message
1285 # Create a local tag to note the association, if commit message
1268 # does not have it already
1286 # does not have it already
1269 m = _differentialrevisiondescre.search(ctx.description())
1287 m = _differentialrevisiondescre.search(ctx.description())
1270 if not m or int(m.group('id')) != newrevid:
1288 if not m or int(m.group('id')) != newrevid:
1271 tagname = b'D%d' % newrevid
1289 tagname = b'D%d' % newrevid
1272 tags.tag(
1290 tags.tag(
1273 repo,
1291 repo,
1274 tagname,
1292 tagname,
1275 ctx.node(),
1293 ctx.node(),
1276 message=None,
1294 message=None,
1277 user=None,
1295 user=None,
1278 date=None,
1296 date=None,
1279 local=True,
1297 local=True,
1280 )
1298 )
1281 else:
1299 else:
1282 # Nothing changed. But still set "newrevphid" so the next revision
1300 # Nothing changed. But still set "newrevphid" so the next revision
1283 # could depend on this one and "newrevid" for the summary line.
1301 # could depend on this one and "newrevid" for the summary line.
1284 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1302 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1285 newrevid = revid
1303 newrevid = revid
1286 action = b'skipped'
1304 action = b'skipped'
1287
1305
1288 actiondesc = ui.label(
1306 actiondesc = ui.label(
1289 {
1307 {
1290 b'created': _(b'created'),
1308 b'created': _(b'created'),
1291 b'skipped': _(b'skipped'),
1309 b'skipped': _(b'skipped'),
1292 b'updated': _(b'updated'),
1310 b'updated': _(b'updated'),
1293 }[action],
1311 }[action],
1294 b'phabricator.action.%s' % action,
1312 b'phabricator.action.%s' % action,
1295 )
1313 )
1296 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1314 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1297 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1315 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1298 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1316 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1299 ui.write(
1317 ui.write(
1300 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1318 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1301 )
1319 )
1302 drevids.append(newrevid)
1320 drevids.append(newrevid)
1303 lastrevphid = newrevphid
1321 lastrevphid = newrevphid
1304
1322
1305 # Update commit messages and remove tags
1323 # Update commit messages and remove tags
1306 if opts.get(b'amend'):
1324 if opts.get(b'amend'):
1307 unfi = repo.unfiltered()
1325 unfi = repo.unfiltered()
1308 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1326 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1309 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1327 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1310 wnode = unfi[b'.'].node()
1328 wnode = unfi[b'.'].node()
1311 mapping = {} # {oldnode: [newnode]}
1329 mapping = {} # {oldnode: [newnode]}
1312 for i, rev in enumerate(revs):
1330 for i, rev in enumerate(revs):
1313 old = unfi[rev]
1331 old = unfi[rev]
1314 drevid = drevids[i]
1332 drevid = drevids[i]
1315 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1333 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1316 newdesc = get_amended_desc(drev, old, False)
1334 newdesc = get_amended_desc(drev, old, False)
1317 # Make sure commit message contain "Differential Revision"
1335 # Make sure commit message contain "Differential Revision"
1318 if old.description() != newdesc:
1336 if old.description() != newdesc:
1319 if old.phase() == phases.public:
1337 if old.phase() == phases.public:
1320 ui.warn(
1338 ui.warn(
1321 _(b"warning: not updating public commit %s\n")
1339 _(b"warning: not updating public commit %s\n")
1322 % scmutil.formatchangeid(old)
1340 % scmutil.formatchangeid(old)
1323 )
1341 )
1324 continue
1342 continue
1325 parents = [
1343 parents = [
1326 mapping.get(old.p1().node(), (old.p1(),))[0],
1344 mapping.get(old.p1().node(), (old.p1(),))[0],
1327 mapping.get(old.p2().node(), (old.p2(),))[0],
1345 mapping.get(old.p2().node(), (old.p2(),))[0],
1328 ]
1346 ]
1329 new = context.metadataonlyctx(
1347 new = context.metadataonlyctx(
1330 repo,
1348 repo,
1331 old,
1349 old,
1332 parents=parents,
1350 parents=parents,
1333 text=newdesc,
1351 text=newdesc,
1334 user=old.user(),
1352 user=old.user(),
1335 date=old.date(),
1353 date=old.date(),
1336 extra=old.extra(),
1354 extra=old.extra(),
1337 )
1355 )
1338
1356
1339 newnode = new.commit()
1357 newnode = new.commit()
1340
1358
1341 mapping[old.node()] = [newnode]
1359 mapping[old.node()] = [newnode]
1342 # Update diff property
1360 # Update diff property
1343 # If it fails just warn and keep going, otherwise the DREV
1361 # If it fails just warn and keep going, otherwise the DREV
1344 # associations will be lost
1362 # associations will be lost
1345 try:
1363 try:
1346 writediffproperties(
1364 writediffproperties(
1347 [unfi[newnode]], diffmap[old.node()]
1365 [unfi[newnode]], diffmap[old.node()]
1348 )
1366 )
1349 except util.urlerr.urlerror:
1367 except util.urlerr.urlerror:
1350 ui.warnnoi18n(
1368 ui.warnnoi18n(
1351 b'Failed to update metadata for D%d\n' % drevid
1369 b'Failed to update metadata for D%d\n' % drevid
1352 )
1370 )
1353 # Remove local tags since it's no longer necessary
1371 # Remove local tags since it's no longer necessary
1354 tagname = b'D%d' % drevid
1372 tagname = b'D%d' % drevid
1355 if tagname in repo.tags():
1373 if tagname in repo.tags():
1356 tags.tag(
1374 tags.tag(
1357 repo,
1375 repo,
1358 tagname,
1376 tagname,
1359 nullid,
1377 nullid,
1360 message=None,
1378 message=None,
1361 user=None,
1379 user=None,
1362 date=None,
1380 date=None,
1363 local=True,
1381 local=True,
1364 )
1382 )
1365 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1383 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1366 if wnode in mapping:
1384 if wnode in mapping:
1367 unfi.setparents(mapping[wnode][0])
1385 unfi.setparents(mapping[wnode][0])
1368
1386
1369
1387
1370 # Map from "hg:meta" keys to header understood by "hg import". The order is
1388 # Map from "hg:meta" keys to header understood by "hg import". The order is
1371 # consistent with "hg export" output.
1389 # consistent with "hg export" output.
1372 _metanamemap = util.sortdict(
1390 _metanamemap = util.sortdict(
1373 [
1391 [
1374 (b'user', b'User'),
1392 (b'user', b'User'),
1375 (b'date', b'Date'),
1393 (b'date', b'Date'),
1376 (b'branch', b'Branch'),
1394 (b'branch', b'Branch'),
1377 (b'node', b'Node ID'),
1395 (b'node', b'Node ID'),
1378 (b'parent', b'Parent '),
1396 (b'parent', b'Parent '),
1379 ]
1397 ]
1380 )
1398 )
1381
1399
1382
1400
1383 def _confirmbeforesend(repo, revs, oldmap):
1401 def _confirmbeforesend(repo, revs, oldmap):
1384 url, token = readurltoken(repo.ui)
1402 url, token = readurltoken(repo.ui)
1385 ui = repo.ui
1403 ui = repo.ui
1386 for rev in revs:
1404 for rev in revs:
1387 ctx = repo[rev]
1405 ctx = repo[rev]
1388 desc = ctx.description().splitlines()[0]
1406 desc = ctx.description().splitlines()[0]
1389 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1407 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1390 if drevid:
1408 if drevid:
1391 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1409 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1392 else:
1410 else:
1393 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1411 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1394
1412
1395 ui.write(
1413 ui.write(
1396 _(b'%s - %s: %s\n')
1414 _(b'%s - %s: %s\n')
1397 % (
1415 % (
1398 drevdesc,
1416 drevdesc,
1399 ui.label(bytes(ctx), b'phabricator.node'),
1417 ui.label(bytes(ctx), b'phabricator.node'),
1400 ui.label(desc, b'phabricator.desc'),
1418 ui.label(desc, b'phabricator.desc'),
1401 )
1419 )
1402 )
1420 )
1403
1421
1404 if ui.promptchoice(
1422 if ui.promptchoice(
1405 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1423 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1406 ):
1424 ):
1407 return False
1425 return False
1408
1426
1409 return True
1427 return True
1410
1428
1411
1429
1412 _knownstatusnames = {
1430 _knownstatusnames = {
1413 b'accepted',
1431 b'accepted',
1414 b'needsreview',
1432 b'needsreview',
1415 b'needsrevision',
1433 b'needsrevision',
1416 b'closed',
1434 b'closed',
1417 b'abandoned',
1435 b'abandoned',
1418 b'changesplanned',
1436 b'changesplanned',
1419 }
1437 }
1420
1438
1421
1439
1422 def _getstatusname(drev):
1440 def _getstatusname(drev):
1423 """get normalized status name from a Differential Revision"""
1441 """get normalized status name from a Differential Revision"""
1424 return drev[b'statusName'].replace(b' ', b'').lower()
1442 return drev[b'statusName'].replace(b' ', b'').lower()
1425
1443
1426
1444
1427 # Small language to specify differential revisions. Support symbols: (), :X,
1445 # Small language to specify differential revisions. Support symbols: (), :X,
1428 # +, and -.
1446 # +, and -.
1429
1447
1430 _elements = {
1448 _elements = {
1431 # token-type: binding-strength, primary, prefix, infix, suffix
1449 # token-type: binding-strength, primary, prefix, infix, suffix
1432 b'(': (12, None, (b'group', 1, b')'), None, None),
1450 b'(': (12, None, (b'group', 1, b')'), None, None),
1433 b':': (8, None, (b'ancestors', 8), None, None),
1451 b':': (8, None, (b'ancestors', 8), None, None),
1434 b'&': (5, None, None, (b'and_', 5), None),
1452 b'&': (5, None, None, (b'and_', 5), None),
1435 b'+': (4, None, None, (b'add', 4), None),
1453 b'+': (4, None, None, (b'add', 4), None),
1436 b'-': (4, None, None, (b'sub', 4), None),
1454 b'-': (4, None, None, (b'sub', 4), None),
1437 b')': (0, None, None, None, None),
1455 b')': (0, None, None, None, None),
1438 b'symbol': (0, b'symbol', None, None, None),
1456 b'symbol': (0, b'symbol', None, None, None),
1439 b'end': (0, None, None, None, None),
1457 b'end': (0, None, None, None, None),
1440 }
1458 }
1441
1459
1442
1460
1443 def _tokenize(text):
1461 def _tokenize(text):
1444 view = memoryview(text) # zero-copy slice
1462 view = memoryview(text) # zero-copy slice
1445 special = b'():+-& '
1463 special = b'():+-& '
1446 pos = 0
1464 pos = 0
1447 length = len(text)
1465 length = len(text)
1448 while pos < length:
1466 while pos < length:
1449 symbol = b''.join(
1467 symbol = b''.join(
1450 itertools.takewhile(
1468 itertools.takewhile(
1451 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1469 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1452 )
1470 )
1453 )
1471 )
1454 if symbol:
1472 if symbol:
1455 yield (b'symbol', symbol, pos)
1473 yield (b'symbol', symbol, pos)
1456 pos += len(symbol)
1474 pos += len(symbol)
1457 else: # special char, ignore space
1475 else: # special char, ignore space
1458 if text[pos : pos + 1] != b' ':
1476 if text[pos : pos + 1] != b' ':
1459 yield (text[pos : pos + 1], None, pos)
1477 yield (text[pos : pos + 1], None, pos)
1460 pos += 1
1478 pos += 1
1461 yield (b'end', None, pos)
1479 yield (b'end', None, pos)
1462
1480
1463
1481
1464 def _parse(text):
1482 def _parse(text):
1465 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1483 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1466 if pos != len(text):
1484 if pos != len(text):
1467 raise error.ParseError(b'invalid token', pos)
1485 raise error.ParseError(b'invalid token', pos)
1468 return tree
1486 return tree
1469
1487
1470
1488
1471 def _parsedrev(symbol):
1489 def _parsedrev(symbol):
1472 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1490 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1473 if symbol.startswith(b'D') and symbol[1:].isdigit():
1491 if symbol.startswith(b'D') and symbol[1:].isdigit():
1474 return int(symbol[1:])
1492 return int(symbol[1:])
1475 if symbol.isdigit():
1493 if symbol.isdigit():
1476 return int(symbol)
1494 return int(symbol)
1477
1495
1478
1496
1479 def _prefetchdrevs(tree):
1497 def _prefetchdrevs(tree):
1480 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1498 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1481 drevs = set()
1499 drevs = set()
1482 ancestordrevs = set()
1500 ancestordrevs = set()
1483 op = tree[0]
1501 op = tree[0]
1484 if op == b'symbol':
1502 if op == b'symbol':
1485 r = _parsedrev(tree[1])
1503 r = _parsedrev(tree[1])
1486 if r:
1504 if r:
1487 drevs.add(r)
1505 drevs.add(r)
1488 elif op == b'ancestors':
1506 elif op == b'ancestors':
1489 r, a = _prefetchdrevs(tree[1])
1507 r, a = _prefetchdrevs(tree[1])
1490 drevs.update(r)
1508 drevs.update(r)
1491 ancestordrevs.update(r)
1509 ancestordrevs.update(r)
1492 ancestordrevs.update(a)
1510 ancestordrevs.update(a)
1493 else:
1511 else:
1494 for t in tree[1:]:
1512 for t in tree[1:]:
1495 r, a = _prefetchdrevs(t)
1513 r, a = _prefetchdrevs(t)
1496 drevs.update(r)
1514 drevs.update(r)
1497 ancestordrevs.update(a)
1515 ancestordrevs.update(a)
1498 return drevs, ancestordrevs
1516 return drevs, ancestordrevs
1499
1517
1500
1518
1501 def querydrev(ui, spec):
1519 def querydrev(ui, spec):
1502 """return a list of "Differential Revision" dicts
1520 """return a list of "Differential Revision" dicts
1503
1521
1504 spec is a string using a simple query language, see docstring in phabread
1522 spec is a string using a simple query language, see docstring in phabread
1505 for details.
1523 for details.
1506
1524
1507 A "Differential Revision dict" looks like:
1525 A "Differential Revision dict" looks like:
1508
1526
1509 {
1527 {
1510 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1528 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1511 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1529 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1512 "auxiliary": {
1530 "auxiliary": {
1513 "phabricator:depends-on": [
1531 "phabricator:depends-on": [
1514 "PHID-DREV-gbapp366kutjebt7agcd"
1532 "PHID-DREV-gbapp366kutjebt7agcd"
1515 ]
1533 ]
1516 "phabricator:projects": [],
1534 "phabricator:projects": [],
1517 },
1535 },
1518 "branch": "default",
1536 "branch": "default",
1519 "ccs": [],
1537 "ccs": [],
1520 "commits": [],
1538 "commits": [],
1521 "dateCreated": "1499181406",
1539 "dateCreated": "1499181406",
1522 "dateModified": "1499182103",
1540 "dateModified": "1499182103",
1523 "diffs": [
1541 "diffs": [
1524 "3",
1542 "3",
1525 "4",
1543 "4",
1526 ],
1544 ],
1527 "hashes": [],
1545 "hashes": [],
1528 "id": "2",
1546 "id": "2",
1529 "lineCount": "2",
1547 "lineCount": "2",
1530 "phid": "PHID-DREV-672qvysjcczopag46qty",
1548 "phid": "PHID-DREV-672qvysjcczopag46qty",
1531 "properties": {},
1549 "properties": {},
1532 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1550 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1533 "reviewers": [],
1551 "reviewers": [],
1534 "sourcePath": null
1552 "sourcePath": null
1535 "status": "0",
1553 "status": "0",
1536 "statusName": "Needs Review",
1554 "statusName": "Needs Review",
1537 "summary": "",
1555 "summary": "",
1538 "testPlan": "",
1556 "testPlan": "",
1539 "title": "example",
1557 "title": "example",
1540 "uri": "https://phab.example.com/D2",
1558 "uri": "https://phab.example.com/D2",
1541 }
1559 }
1542 """
1560 """
1543 # TODO: replace differential.query and differential.querydiffs with
1561 # TODO: replace differential.query and differential.querydiffs with
1544 # differential.diff.search because the former (and their output) are
1562 # differential.diff.search because the former (and their output) are
1545 # frozen, and planned to be deprecated and removed.
1563 # frozen, and planned to be deprecated and removed.
1546
1564
1547 def fetch(params):
1565 def fetch(params):
1548 """params -> single drev or None"""
1566 """params -> single drev or None"""
1549 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1567 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1550 if key in prefetched:
1568 if key in prefetched:
1551 return prefetched[key]
1569 return prefetched[key]
1552 drevs = callconduit(ui, b'differential.query', params)
1570 drevs = callconduit(ui, b'differential.query', params)
1553 # Fill prefetched with the result
1571 # Fill prefetched with the result
1554 for drev in drevs:
1572 for drev in drevs:
1555 prefetched[drev[b'phid']] = drev
1573 prefetched[drev[b'phid']] = drev
1556 prefetched[int(drev[b'id'])] = drev
1574 prefetched[int(drev[b'id'])] = drev
1557 if key not in prefetched:
1575 if key not in prefetched:
1558 raise error.Abort(
1576 raise error.Abort(
1559 _(b'cannot get Differential Revision %r') % params
1577 _(b'cannot get Differential Revision %r') % params
1560 )
1578 )
1561 return prefetched[key]
1579 return prefetched[key]
1562
1580
1563 def getstack(topdrevids):
1581 def getstack(topdrevids):
1564 """given a top, get a stack from the bottom, [id] -> [id]"""
1582 """given a top, get a stack from the bottom, [id] -> [id]"""
1565 visited = set()
1583 visited = set()
1566 result = []
1584 result = []
1567 queue = [{b'ids': [i]} for i in topdrevids]
1585 queue = [{b'ids': [i]} for i in topdrevids]
1568 while queue:
1586 while queue:
1569 params = queue.pop()
1587 params = queue.pop()
1570 drev = fetch(params)
1588 drev = fetch(params)
1571 if drev[b'id'] in visited:
1589 if drev[b'id'] in visited:
1572 continue
1590 continue
1573 visited.add(drev[b'id'])
1591 visited.add(drev[b'id'])
1574 result.append(int(drev[b'id']))
1592 result.append(int(drev[b'id']))
1575 auxiliary = drev.get(b'auxiliary', {})
1593 auxiliary = drev.get(b'auxiliary', {})
1576 depends = auxiliary.get(b'phabricator:depends-on', [])
1594 depends = auxiliary.get(b'phabricator:depends-on', [])
1577 for phid in depends:
1595 for phid in depends:
1578 queue.append({b'phids': [phid]})
1596 queue.append({b'phids': [phid]})
1579 result.reverse()
1597 result.reverse()
1580 return smartset.baseset(result)
1598 return smartset.baseset(result)
1581
1599
1582 # Initialize prefetch cache
1600 # Initialize prefetch cache
1583 prefetched = {} # {id or phid: drev}
1601 prefetched = {} # {id or phid: drev}
1584
1602
1585 tree = _parse(spec)
1603 tree = _parse(spec)
1586 drevs, ancestordrevs = _prefetchdrevs(tree)
1604 drevs, ancestordrevs = _prefetchdrevs(tree)
1587
1605
1588 # developer config: phabricator.batchsize
1606 # developer config: phabricator.batchsize
1589 batchsize = ui.configint(b'phabricator', b'batchsize')
1607 batchsize = ui.configint(b'phabricator', b'batchsize')
1590
1608
1591 # Prefetch Differential Revisions in batch
1609 # Prefetch Differential Revisions in batch
1592 tofetch = set(drevs)
1610 tofetch = set(drevs)
1593 for r in ancestordrevs:
1611 for r in ancestordrevs:
1594 tofetch.update(range(max(1, r - batchsize), r + 1))
1612 tofetch.update(range(max(1, r - batchsize), r + 1))
1595 if drevs:
1613 if drevs:
1596 fetch({b'ids': list(tofetch)})
1614 fetch({b'ids': list(tofetch)})
1597 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1615 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1598
1616
1599 # Walk through the tree, return smartsets
1617 # Walk through the tree, return smartsets
1600 def walk(tree):
1618 def walk(tree):
1601 op = tree[0]
1619 op = tree[0]
1602 if op == b'symbol':
1620 if op == b'symbol':
1603 drev = _parsedrev(tree[1])
1621 drev = _parsedrev(tree[1])
1604 if drev:
1622 if drev:
1605 return smartset.baseset([drev])
1623 return smartset.baseset([drev])
1606 elif tree[1] in _knownstatusnames:
1624 elif tree[1] in _knownstatusnames:
1607 drevs = [
1625 drevs = [
1608 r
1626 r
1609 for r in validids
1627 for r in validids
1610 if _getstatusname(prefetched[r]) == tree[1]
1628 if _getstatusname(prefetched[r]) == tree[1]
1611 ]
1629 ]
1612 return smartset.baseset(drevs)
1630 return smartset.baseset(drevs)
1613 else:
1631 else:
1614 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1632 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1615 elif op in {b'and_', b'add', b'sub'}:
1633 elif op in {b'and_', b'add', b'sub'}:
1616 assert len(tree) == 3
1634 assert len(tree) == 3
1617 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1635 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1618 elif op == b'group':
1636 elif op == b'group':
1619 return walk(tree[1])
1637 return walk(tree[1])
1620 elif op == b'ancestors':
1638 elif op == b'ancestors':
1621 return getstack(walk(tree[1]))
1639 return getstack(walk(tree[1]))
1622 else:
1640 else:
1623 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1641 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1624
1642
1625 return [prefetched[r] for r in walk(tree)]
1643 return [prefetched[r] for r in walk(tree)]
1626
1644
1627
1645
1628 def getdescfromdrev(drev):
1646 def getdescfromdrev(drev):
1629 """get description (commit message) from "Differential Revision"
1647 """get description (commit message) from "Differential Revision"
1630
1648
1631 This is similar to differential.getcommitmessage API. But we only care
1649 This is similar to differential.getcommitmessage API. But we only care
1632 about limited fields: title, summary, test plan, and URL.
1650 about limited fields: title, summary, test plan, and URL.
1633 """
1651 """
1634 title = drev[b'title']
1652 title = drev[b'title']
1635 summary = drev[b'summary'].rstrip()
1653 summary = drev[b'summary'].rstrip()
1636 testplan = drev[b'testPlan'].rstrip()
1654 testplan = drev[b'testPlan'].rstrip()
1637 if testplan:
1655 if testplan:
1638 testplan = b'Test Plan:\n%s' % testplan
1656 testplan = b'Test Plan:\n%s' % testplan
1639 uri = b'Differential Revision: %s' % drev[b'uri']
1657 uri = b'Differential Revision: %s' % drev[b'uri']
1640 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1658 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1641
1659
1642
1660
1643 def get_amended_desc(drev, ctx, folded):
1661 def get_amended_desc(drev, ctx, folded):
1644 """similar to ``getdescfromdrev``, but supports a folded series of commits
1662 """similar to ``getdescfromdrev``, but supports a folded series of commits
1645
1663
1646 This is used when determining if an individual commit needs to have its
1664 This is used when determining if an individual commit needs to have its
1647 message amended after posting it for review. The determination is made for
1665 message amended after posting it for review. The determination is made for
1648 each individual commit, even when they were folded into one review.
1666 each individual commit, even when they were folded into one review.
1649 """
1667 """
1650 if not folded:
1668 if not folded:
1651 return getdescfromdrev(drev)
1669 return getdescfromdrev(drev)
1652
1670
1653 uri = b'Differential Revision: %s' % drev[b'uri']
1671 uri = b'Differential Revision: %s' % drev[b'uri']
1654
1672
1655 # Since the commit messages were combined when posting multiple commits
1673 # Since the commit messages were combined when posting multiple commits
1656 # with --fold, the fields can't be read from Phabricator here, or *all*
1674 # with --fold, the fields can't be read from Phabricator here, or *all*
1657 # affected local revisions will end up with the same commit message after
1675 # affected local revisions will end up with the same commit message after
1658 # the URI is amended in. Append in the DREV line, or update it if it
1676 # the URI is amended in. Append in the DREV line, or update it if it
1659 # exists. At worst, this means commit message or test plan updates on
1677 # exists. At worst, this means commit message or test plan updates on
1660 # Phabricator aren't propagated back to the repository, but that seems
1678 # Phabricator aren't propagated back to the repository, but that seems
1661 # reasonable for the case where local commits are effectively combined
1679 # reasonable for the case where local commits are effectively combined
1662 # in Phabricator.
1680 # in Phabricator.
1663 m = _differentialrevisiondescre.search(ctx.description())
1681 m = _differentialrevisiondescre.search(ctx.description())
1664 if not m:
1682 if not m:
1665 return b'\n\n'.join([ctx.description(), uri])
1683 return b'\n\n'.join([ctx.description(), uri])
1666
1684
1667 return _differentialrevisiondescre.sub(uri, ctx.description())
1685 return _differentialrevisiondescre.sub(uri, ctx.description())
1668
1686
1669
1687
1688 def getlocalcommits(diff):
1689 """get the set of local commits from a diff object
1690
1691 See ``getdiffmeta()`` for an example diff object.
1692 """
1693 props = diff.get(b'properties') or {}
1694 commits = props.get(b'local:commits') or {}
1695 if len(commits) > 1:
1696 return {bin(c) for c in commits.keys()}
1697
1698 # Storing the diff metadata predates storing `local:commits`, so continue
1699 # to use that in the --no-fold case.
1700 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1701
1702
1670 def getdiffmeta(diff):
1703 def getdiffmeta(diff):
1671 """get commit metadata (date, node, user, p1) from a diff object
1704 """get commit metadata (date, node, user, p1) from a diff object
1672
1705
1673 The metadata could be "hg:meta", sent by phabsend, like:
1706 The metadata could be "hg:meta", sent by phabsend, like:
1674
1707
1675 "properties": {
1708 "properties": {
1676 "hg:meta": {
1709 "hg:meta": {
1677 "branch": "default",
1710 "branch": "default",
1678 "date": "1499571514 25200",
1711 "date": "1499571514 25200",
1679 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1712 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1680 "user": "Foo Bar <foo@example.com>",
1713 "user": "Foo Bar <foo@example.com>",
1681 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1714 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1682 }
1715 }
1683 }
1716 }
1684
1717
1685 Or converted from "local:commits", sent by "arc", like:
1718 Or converted from "local:commits", sent by "arc", like:
1686
1719
1687 "properties": {
1720 "properties": {
1688 "local:commits": {
1721 "local:commits": {
1689 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1722 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1690 "author": "Foo Bar",
1723 "author": "Foo Bar",
1691 "authorEmail": "foo@example.com"
1724 "authorEmail": "foo@example.com"
1692 "branch": "default",
1725 "branch": "default",
1693 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1726 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1694 "local": "1000",
1727 "local": "1000",
1695 "message": "...",
1728 "message": "...",
1696 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1729 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1697 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1730 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1698 "summary": "...",
1731 "summary": "...",
1699 "tag": "",
1732 "tag": "",
1700 "time": 1499546314,
1733 "time": 1499546314,
1701 }
1734 }
1702 }
1735 }
1703 }
1736 }
1704
1737
1705 Note: metadata extracted from "local:commits" will lose time zone
1738 Note: metadata extracted from "local:commits" will lose time zone
1706 information.
1739 information.
1707 """
1740 """
1708 props = diff.get(b'properties') or {}
1741 props = diff.get(b'properties') or {}
1709 meta = props.get(b'hg:meta')
1742 meta = props.get(b'hg:meta')
1710 if not meta:
1743 if not meta:
1711 if props.get(b'local:commits'):
1744 if props.get(b'local:commits'):
1712 commit = sorted(props[b'local:commits'].values())[0]
1745 commit = sorted(props[b'local:commits'].values())[0]
1713 meta = {}
1746 meta = {}
1714 if b'author' in commit and b'authorEmail' in commit:
1747 if b'author' in commit and b'authorEmail' in commit:
1715 meta[b'user'] = b'%s <%s>' % (
1748 meta[b'user'] = b'%s <%s>' % (
1716 commit[b'author'],
1749 commit[b'author'],
1717 commit[b'authorEmail'],
1750 commit[b'authorEmail'],
1718 )
1751 )
1719 if b'time' in commit:
1752 if b'time' in commit:
1720 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1753 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1721 if b'branch' in commit:
1754 if b'branch' in commit:
1722 meta[b'branch'] = commit[b'branch']
1755 meta[b'branch'] = commit[b'branch']
1723 node = commit.get(b'commit', commit.get(b'rev'))
1756 node = commit.get(b'commit', commit.get(b'rev'))
1724 if node:
1757 if node:
1725 meta[b'node'] = node
1758 meta[b'node'] = node
1726 if len(commit.get(b'parents', ())) >= 1:
1759 if len(commit.get(b'parents', ())) >= 1:
1727 meta[b'parent'] = commit[b'parents'][0]
1760 meta[b'parent'] = commit[b'parents'][0]
1728 else:
1761 else:
1729 meta = {}
1762 meta = {}
1730 if b'date' not in meta and b'dateCreated' in diff:
1763 if b'date' not in meta and b'dateCreated' in diff:
1731 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1764 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1732 if b'branch' not in meta and diff.get(b'branch'):
1765 if b'branch' not in meta and diff.get(b'branch'):
1733 meta[b'branch'] = diff[b'branch']
1766 meta[b'branch'] = diff[b'branch']
1734 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1767 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1735 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1768 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1736 return meta
1769 return meta
1737
1770
1738
1771
1739 def _getdrevs(ui, stack, specs):
1772 def _getdrevs(ui, stack, specs):
1740 """convert user supplied DREVSPECs into "Differential Revision" dicts
1773 """convert user supplied DREVSPECs into "Differential Revision" dicts
1741
1774
1742 See ``hg help phabread`` for how to specify each DREVSPEC.
1775 See ``hg help phabread`` for how to specify each DREVSPEC.
1743 """
1776 """
1744 if len(specs) > 0:
1777 if len(specs) > 0:
1745
1778
1746 def _formatspec(s):
1779 def _formatspec(s):
1747 if stack:
1780 if stack:
1748 s = b':(%s)' % s
1781 s = b':(%s)' % s
1749 return b'(%s)' % s
1782 return b'(%s)' % s
1750
1783
1751 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1784 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1752
1785
1753 drevs = querydrev(ui, spec)
1786 drevs = querydrev(ui, spec)
1754 if drevs:
1787 if drevs:
1755 return drevs
1788 return drevs
1756
1789
1757 raise error.Abort(_(b"empty DREVSPEC set"))
1790 raise error.Abort(_(b"empty DREVSPEC set"))
1758
1791
1759
1792
1760 def readpatch(ui, drevs, write):
1793 def readpatch(ui, drevs, write):
1761 """generate plain-text patch readable by 'hg import'
1794 """generate plain-text patch readable by 'hg import'
1762
1795
1763 write takes a list of (DREV, bytes), where DREV is the differential number
1796 write takes a list of (DREV, bytes), where DREV is the differential number
1764 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1797 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1765 to be imported. drevs is what "querydrev" returns, results of
1798 to be imported. drevs is what "querydrev" returns, results of
1766 "differential.query".
1799 "differential.query".
1767 """
1800 """
1768 # Prefetch hg:meta property for all diffs
1801 # Prefetch hg:meta property for all diffs
1769 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1802 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1770 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1803 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1771
1804
1772 patches = []
1805 patches = []
1773
1806
1774 # Generate patch for each drev
1807 # Generate patch for each drev
1775 for drev in drevs:
1808 for drev in drevs:
1776 ui.note(_(b'reading D%s\n') % drev[b'id'])
1809 ui.note(_(b'reading D%s\n') % drev[b'id'])
1777
1810
1778 diffid = max(int(v) for v in drev[b'diffs'])
1811 diffid = max(int(v) for v in drev[b'diffs'])
1779 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1812 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1780 desc = getdescfromdrev(drev)
1813 desc = getdescfromdrev(drev)
1781 header = b'# HG changeset patch\n'
1814 header = b'# HG changeset patch\n'
1782
1815
1783 # Try to preserve metadata from hg:meta property. Write hg patch
1816 # Try to preserve metadata from hg:meta property. Write hg patch
1784 # headers that can be read by the "import" command. See patchheadermap
1817 # headers that can be read by the "import" command. See patchheadermap
1785 # and extract in mercurial/patch.py for supported headers.
1818 # and extract in mercurial/patch.py for supported headers.
1786 meta = getdiffmeta(diffs[b'%d' % diffid])
1819 meta = getdiffmeta(diffs[b'%d' % diffid])
1787 for k in _metanamemap.keys():
1820 for k in _metanamemap.keys():
1788 if k in meta:
1821 if k in meta:
1789 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1822 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1790
1823
1791 content = b'%s%s\n%s' % (header, desc, body)
1824 content = b'%s%s\n%s' % (header, desc, body)
1792 patches.append((drev[b'id'], content))
1825 patches.append((drev[b'id'], content))
1793
1826
1794 # Write patches to the supplied callback
1827 # Write patches to the supplied callback
1795 write(patches)
1828 write(patches)
1796
1829
1797
1830
1798 @vcrcommand(
1831 @vcrcommand(
1799 b'phabread',
1832 b'phabread',
1800 [(b'', b'stack', False, _(b'read dependencies'))],
1833 [(b'', b'stack', False, _(b'read dependencies'))],
1801 _(b'DREVSPEC... [OPTIONS]'),
1834 _(b'DREVSPEC... [OPTIONS]'),
1802 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1835 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1803 optionalrepo=True,
1836 optionalrepo=True,
1804 )
1837 )
1805 def phabread(ui, repo, *specs, **opts):
1838 def phabread(ui, repo, *specs, **opts):
1806 """print patches from Phabricator suitable for importing
1839 """print patches from Phabricator suitable for importing
1807
1840
1808 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1841 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1809 the number ``123``. It could also have common operators like ``+``, ``-``,
1842 the number ``123``. It could also have common operators like ``+``, ``-``,
1810 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1843 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1811 select a stack. If multiple DREVSPEC values are given, the result is the
1844 select a stack. If multiple DREVSPEC values are given, the result is the
1812 union of each individually evaluated value. No attempt is currently made
1845 union of each individually evaluated value. No attempt is currently made
1813 to reorder the values to run from parent to child.
1846 to reorder the values to run from parent to child.
1814
1847
1815 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1848 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1816 could be used to filter patches by status. For performance reason, they
1849 could be used to filter patches by status. For performance reason, they
1817 only represent a subset of non-status selections and cannot be used alone.
1850 only represent a subset of non-status selections and cannot be used alone.
1818
1851
1819 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1852 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1820 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1853 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1821 stack up to D9.
1854 stack up to D9.
1822
1855
1823 If --stack is given, follow dependencies information and read all patches.
1856 If --stack is given, follow dependencies information and read all patches.
1824 It is equivalent to the ``:`` operator.
1857 It is equivalent to the ``:`` operator.
1825 """
1858 """
1826 opts = pycompat.byteskwargs(opts)
1859 opts = pycompat.byteskwargs(opts)
1827 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1860 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1828
1861
1829 def _write(patches):
1862 def _write(patches):
1830 for drev, content in patches:
1863 for drev, content in patches:
1831 ui.write(content)
1864 ui.write(content)
1832
1865
1833 readpatch(ui, drevs, _write)
1866 readpatch(ui, drevs, _write)
1834
1867
1835
1868
1836 @vcrcommand(
1869 @vcrcommand(
1837 b'phabimport',
1870 b'phabimport',
1838 [(b'', b'stack', False, _(b'import dependencies as well'))],
1871 [(b'', b'stack', False, _(b'import dependencies as well'))],
1839 _(b'DREVSPEC... [OPTIONS]'),
1872 _(b'DREVSPEC... [OPTIONS]'),
1840 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1873 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1841 )
1874 )
1842 def phabimport(ui, repo, *specs, **opts):
1875 def phabimport(ui, repo, *specs, **opts):
1843 """import patches from Phabricator for the specified Differential Revisions
1876 """import patches from Phabricator for the specified Differential Revisions
1844
1877
1845 The patches are read and applied starting at the parent of the working
1878 The patches are read and applied starting at the parent of the working
1846 directory.
1879 directory.
1847
1880
1848 See ``hg help phabread`` for how to specify DREVSPEC.
1881 See ``hg help phabread`` for how to specify DREVSPEC.
1849 """
1882 """
1850 opts = pycompat.byteskwargs(opts)
1883 opts = pycompat.byteskwargs(opts)
1851
1884
1852 # --bypass avoids losing exec and symlink bits when importing on Windows,
1885 # --bypass avoids losing exec and symlink bits when importing on Windows,
1853 # and allows importing with a dirty wdir. It also aborts instead of leaving
1886 # and allows importing with a dirty wdir. It also aborts instead of leaving
1854 # rejects.
1887 # rejects.
1855 opts[b'bypass'] = True
1888 opts[b'bypass'] = True
1856
1889
1857 # Mandatory default values, synced with commands.import
1890 # Mandatory default values, synced with commands.import
1858 opts[b'strip'] = 1
1891 opts[b'strip'] = 1
1859 opts[b'prefix'] = b''
1892 opts[b'prefix'] = b''
1860 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1893 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1861 opts[b'obsolete'] = False
1894 opts[b'obsolete'] = False
1862
1895
1863 if ui.configbool(b'phabimport', b'secret'):
1896 if ui.configbool(b'phabimport', b'secret'):
1864 opts[b'secret'] = True
1897 opts[b'secret'] = True
1865 if ui.configbool(b'phabimport', b'obsolete'):
1898 if ui.configbool(b'phabimport', b'obsolete'):
1866 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1899 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1867
1900
1868 def _write(patches):
1901 def _write(patches):
1869 parents = repo[None].parents()
1902 parents = repo[None].parents()
1870
1903
1871 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1904 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1872 for drev, contents in patches:
1905 for drev, contents in patches:
1873 ui.status(_(b'applying patch from D%s\n') % drev)
1906 ui.status(_(b'applying patch from D%s\n') % drev)
1874
1907
1875 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1908 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1876 msg, node, rej = cmdutil.tryimportone(
1909 msg, node, rej = cmdutil.tryimportone(
1877 ui,
1910 ui,
1878 repo,
1911 repo,
1879 patchdata,
1912 patchdata,
1880 parents,
1913 parents,
1881 opts,
1914 opts,
1882 [],
1915 [],
1883 None, # Never update wdir to another revision
1916 None, # Never update wdir to another revision
1884 )
1917 )
1885
1918
1886 if not node:
1919 if not node:
1887 raise error.Abort(_(b'D%s: no diffs found') % drev)
1920 raise error.Abort(_(b'D%s: no diffs found') % drev)
1888
1921
1889 ui.note(msg + b'\n')
1922 ui.note(msg + b'\n')
1890 parents = [repo[node]]
1923 parents = [repo[node]]
1891
1924
1892 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1925 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1893
1926
1894 readpatch(repo.ui, drevs, _write)
1927 readpatch(repo.ui, drevs, _write)
1895
1928
1896
1929
1897 @vcrcommand(
1930 @vcrcommand(
1898 b'phabupdate',
1931 b'phabupdate',
1899 [
1932 [
1900 (b'', b'accept', False, _(b'accept revisions')),
1933 (b'', b'accept', False, _(b'accept revisions')),
1901 (b'', b'reject', False, _(b'reject revisions')),
1934 (b'', b'reject', False, _(b'reject revisions')),
1902 (b'', b'abandon', False, _(b'abandon revisions')),
1935 (b'', b'abandon', False, _(b'abandon revisions')),
1903 (b'', b'reclaim', False, _(b'reclaim revisions')),
1936 (b'', b'reclaim', False, _(b'reclaim revisions')),
1904 (b'm', b'comment', b'', _(b'comment on the last revision')),
1937 (b'm', b'comment', b'', _(b'comment on the last revision')),
1905 ],
1938 ],
1906 _(b'DREVSPEC... [OPTIONS]'),
1939 _(b'DREVSPEC... [OPTIONS]'),
1907 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1940 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1908 optionalrepo=True,
1941 optionalrepo=True,
1909 )
1942 )
1910 def phabupdate(ui, repo, *specs, **opts):
1943 def phabupdate(ui, repo, *specs, **opts):
1911 """update Differential Revision in batch
1944 """update Differential Revision in batch
1912
1945
1913 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1946 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1914 """
1947 """
1915 opts = pycompat.byteskwargs(opts)
1948 opts = pycompat.byteskwargs(opts)
1916 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1949 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1917 if len(flags) > 1:
1950 if len(flags) > 1:
1918 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1951 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1919
1952
1920 actions = []
1953 actions = []
1921 for f in flags:
1954 for f in flags:
1922 actions.append({b'type': f, b'value': True})
1955 actions.append({b'type': f, b'value': True})
1923
1956
1924 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1957 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1925 for i, drev in enumerate(drevs):
1958 for i, drev in enumerate(drevs):
1926 if i + 1 == len(drevs) and opts.get(b'comment'):
1959 if i + 1 == len(drevs) and opts.get(b'comment'):
1927 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1960 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1928 if actions:
1961 if actions:
1929 params = {
1962 params = {
1930 b'objectIdentifier': drev[b'phid'],
1963 b'objectIdentifier': drev[b'phid'],
1931 b'transactions': actions,
1964 b'transactions': actions,
1932 }
1965 }
1933 callconduit(ui, b'differential.revision.edit', params)
1966 callconduit(ui, b'differential.revision.edit', params)
1934
1967
1935
1968
1936 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1969 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1937 def template_review(context, mapping):
1970 def template_review(context, mapping):
1938 """:phabreview: Object describing the review for this changeset.
1971 """:phabreview: Object describing the review for this changeset.
1939 Has attributes `url` and `id`.
1972 Has attributes `url` and `id`.
1940 """
1973 """
1941 ctx = context.resource(mapping, b'ctx')
1974 ctx = context.resource(mapping, b'ctx')
1942 m = _differentialrevisiondescre.search(ctx.description())
1975 m = _differentialrevisiondescre.search(ctx.description())
1943 if m:
1976 if m:
1944 return templateutil.hybriddict(
1977 return templateutil.hybriddict(
1945 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1978 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1946 )
1979 )
1947 else:
1980 else:
1948 tags = ctx.repo().nodetags(ctx.node())
1981 tags = ctx.repo().nodetags(ctx.node())
1949 for t in tags:
1982 for t in tags:
1950 if _differentialrevisiontagre.match(t):
1983 if _differentialrevisiontagre.match(t):
1951 url = ctx.repo().ui.config(b'phabricator', b'url')
1984 url = ctx.repo().ui.config(b'phabricator', b'url')
1952 if not url.endswith(b'/'):
1985 if not url.endswith(b'/'):
1953 url += b'/'
1986 url += b'/'
1954 url += t
1987 url += t
1955
1988
1956 return templateutil.hybriddict({b'url': url, b'id': t,})
1989 return templateutil.hybriddict({b'url': url, b'id': t,})
1957 return None
1990 return None
1958
1991
1959
1992
1960 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1993 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1961 def template_status(context, mapping):
1994 def template_status(context, mapping):
1962 """:phabstatus: String. Status of Phabricator differential.
1995 """:phabstatus: String. Status of Phabricator differential.
1963 """
1996 """
1964 ctx = context.resource(mapping, b'ctx')
1997 ctx = context.resource(mapping, b'ctx')
1965 repo = context.resource(mapping, b'repo')
1998 repo = context.resource(mapping, b'repo')
1966 ui = context.resource(mapping, b'ui')
1999 ui = context.resource(mapping, b'ui')
1967
2000
1968 rev = ctx.rev()
2001 rev = ctx.rev()
1969 try:
2002 try:
1970 drevid = getdrevmap(repo, [rev])[rev]
2003 drevid = getdrevmap(repo, [rev])[rev]
1971 except KeyError:
2004 except KeyError:
1972 return None
2005 return None
1973 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2006 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1974 for drev in drevs:
2007 for drev in drevs:
1975 if int(drev[b'id']) == drevid:
2008 if int(drev[b'id']) == drevid:
1976 return templateutil.hybriddict(
2009 return templateutil.hybriddict(
1977 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2010 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1978 )
2011 )
1979 return None
2012 return None
1980
2013
1981
2014
1982 @show.showview(b'phabstatus', csettopic=b'work')
2015 @show.showview(b'phabstatus', csettopic=b'work')
1983 def phabstatusshowview(ui, repo, displayer):
2016 def phabstatusshowview(ui, repo, displayer):
1984 """Phabricator differiential status"""
2017 """Phabricator differiential status"""
1985 revs = repo.revs('sort(_underway(), topo)')
2018 revs = repo.revs('sort(_underway(), topo)')
1986 drevmap = getdrevmap(repo, revs)
2019 drevmap = getdrevmap(repo, revs)
1987 unknownrevs, drevids, revsbydrevid = [], set(), {}
2020 unknownrevs, drevids, revsbydrevid = [], set(), {}
1988 for rev, drevid in pycompat.iteritems(drevmap):
2021 for rev, drevid in pycompat.iteritems(drevmap):
1989 if drevid is not None:
2022 if drevid is not None:
1990 drevids.add(drevid)
2023 drevids.add(drevid)
1991 revsbydrevid.setdefault(drevid, set()).add(rev)
2024 revsbydrevid.setdefault(drevid, set()).add(rev)
1992 else:
2025 else:
1993 unknownrevs.append(rev)
2026 unknownrevs.append(rev)
1994
2027
1995 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2028 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1996 drevsbyrev = {}
2029 drevsbyrev = {}
1997 for drev in drevs:
2030 for drev in drevs:
1998 for rev in revsbydrevid[int(drev[b'id'])]:
2031 for rev in revsbydrevid[int(drev[b'id'])]:
1999 drevsbyrev[rev] = drev
2032 drevsbyrev[rev] = drev
2000
2033
2001 def phabstatus(ctx):
2034 def phabstatus(ctx):
2002 drev = drevsbyrev[ctx.rev()]
2035 drev = drevsbyrev[ctx.rev()]
2003 status = ui.label(
2036 status = ui.label(
2004 b'%(statusName)s' % drev,
2037 b'%(statusName)s' % drev,
2005 b'phabricator.status.%s' % _getstatusname(drev),
2038 b'phabricator.status.%s' % _getstatusname(drev),
2006 )
2039 )
2007 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2040 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2008
2041
2009 revs -= smartset.baseset(unknownrevs)
2042 revs -= smartset.baseset(unknownrevs)
2010 revdag = graphmod.dagwalker(repo, revs)
2043 revdag = graphmod.dagwalker(repo, revs)
2011
2044
2012 ui.setconfig(b'experimental', b'graphshorten', True)
2045 ui.setconfig(b'experimental', b'graphshorten', True)
2013 displayer._exthook = phabstatus
2046 displayer._exthook = phabstatus
2014 nodelen = show.longestshortest(repo, revs)
2047 nodelen = show.longestshortest(repo, revs)
2015 logcmdutil.displaygraph(
2048 logcmdutil.displaygraph(
2016 ui,
2049 ui,
2017 repo,
2050 repo,
2018 revdag,
2051 revdag,
2019 displayer,
2052 displayer,
2020 graphmod.asciiedges,
2053 graphmod.asciiedges,
2021 props={b'nodelen': nodelen},
2054 props={b'nodelen': nodelen},
2022 )
2055 )
General Comments 0
You need to be logged in to leave comments. Login now