##// END OF EJS Templates
phabricator: teach createdifferentialrevision() to allow a folded commit range...
Matt Harbison -
r45135:419fec82 default
parent child Browse files
Show More
@@ -1,2003 +1,2022
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
124 )
124 )
125 eh.configitem(
125 eh.configitem(
126 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
127 )
127 )
128 eh.configitem(
128 eh.configitem(
129 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
133 )
133 )
134
134
135 colortable = {
135 colortable = {
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
141 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
148 }
148 }
149
149
150 _VCR_FLAGS = [
150 _VCR_FLAGS = [
151 (
151 (
152 b'',
152 b'',
153 b'test-vcr',
153 b'test-vcr',
154 b'',
154 b'',
155 _(
155 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
158 b' (ADVANCED)'
159 ),
159 ),
160 ),
160 ),
161 ]
161 ]
162
162
163
163
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
167 """
168 result = False
168 result = False
169 arcconfig = {}
169 arcconfig = {}
170
170
171 try:
171 try:
172 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
178 else x,
178 else x,
179 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
180 )
180 )
181
181
182 result = True
182 result = True
183 except ValueError:
183 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
185 except IOError:
186 pass
186 pass
187
187
188 cfg = util.sortdict()
188 cfg = util.sortdict()
189
189
190 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
192
193 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
195
196 if cfg:
196 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
198
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
200
201
201
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
204
204
205 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
207 return False
208 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
210 for key in r1params:
211 if key not in r2params:
211 if key not in r2params:
212 return False
212 return False
213 value = r1params[key][0]
213 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
218 if r1json != r2json:
219 return False
219 return False
220 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
221 return False
221 return False
222 return True
222 return True
223
223
224 def sanitiserequest(request):
224 def sanitiserequest(request):
225 request.body = re.sub(
225 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
227 )
228 return request
228 return request
229
229
230 def sanitiseresponse(response):
230 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
233 return response
233 return response
234
234
235 def decorate(fn):
235 def decorate(fn):
236 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
238 if cassette:
239 import hgdemandimport
239 import hgdemandimport
240
240
241 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
242 import vcr as vcrmod
242 import vcr as vcrmod
243 import vcr.stubs as stubs
243 import vcr.stubs as stubs
244
244
245 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
246 serializer='json',
246 serializer='json',
247 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
249 custom_patches=[
249 custom_patches=[
250 (
250 (
251 urlmod,
251 urlmod,
252 'httpconnection',
252 'httpconnection',
253 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
254 ),
254 ),
255 (
255 (
256 urlmod,
256 urlmod,
257 'httpsconnection',
257 'httpsconnection',
258 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
259 ),
259 ),
260 ],
260 ],
261 )
261 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
266
266
267 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
270
270
271 return command(
271 return command(
272 name,
272 name,
273 fullflags,
273 fullflags,
274 spec,
274 spec,
275 helpcategory=helpcategory,
275 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
277 )(cmd)
277 )(cmd)
278
278
279 return decorate
279 return decorate
280
280
281
281
282 def urlencodenested(params):
282 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
284
284
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
288 """
289 flatparams = util.sortdict()
289 flatparams = util.sortdict()
290
290
291 def process(prefix, obj):
291 def process(prefix, obj):
292 if isinstance(obj, bool):
292 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
296 if items is None:
297 flatparams[prefix] = obj
297 flatparams[prefix] = obj
298 else:
298 else:
299 for k, v in items(obj):
299 for k, v in items(obj):
300 if prefix:
300 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
302 else:
302 else:
303 process(k, v)
303 process(k, v)
304
304
305 process(b'', params)
305 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
307
307
308
308
309 def readurltoken(ui):
309 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
311
311
312 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
314 """
314 """
315 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
316 if not url:
316 if not url:
317 raise error.Abort(
317 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
319 )
320
320
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
322 token = None
323
323
324 if res:
324 if res:
325 group, auth = res
325 group, auth = res
326
326
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
328
329 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
330
330
331 if not token:
331 if not token:
332 raise error.Abort(
332 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
334 )
335
335
336 return url, token
336 return url, token
337
337
338
338
339 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
344 params = params.copy()
345 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
346 b'token': token,
346 b'token': token,
347 }
347 }
348 rawdata = {
348 rawdata = {
349 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
350 b'output': b'json',
350 b'output': b'json',
351 b'__conduit__': 1,
351 b'__conduit__': 1,
352 }
352 }
353 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
355 if curlcmd:
356 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
358 )
359 sin.write(data)
359 sin.write(data)
360 sin.close()
360 sin.close()
361 body = sout.read()
361 body = sout.read()
362 else:
362 else:
363 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
366 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
371 else x,
371 else x,
372 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
374 )
375 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
377 parsed[b'error_code'],
378 parsed[b'error_info'],
378 parsed[b'error_info'],
379 )
379 )
380 raise error.Abort(msg)
380 raise error.Abort(msg)
381 return parsed[b'result']
381 return parsed[b'result']
382
382
383
383
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
386 """call Conduit API
386 """call Conduit API
387
387
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
389 to stdout as a JSON blob.
390 """
390 """
391 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
394 params = pycompat.rapply(
394 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
397 else x,
397 else x,
398 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
399 )
399 )
400 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
401 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
403 callconduit(ui, name, params),
404 )
404 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
407
407
408
408
409 def getrepophid(repo):
409 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
413 if repophid:
414 return repophid
414 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
416 if not callsign:
417 return None
417 return None
418 query = callconduit(
418 query = callconduit(
419 repo.ui,
419 repo.ui,
420 b'diffusion.repository.search',
420 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
422 )
422 )
423 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
424 return None
424 return None
425 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
427 return repophid
428
428
429
429
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
433 )
434
434
435
435
436 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
438
438
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
442 be ``None``.
443
443
444 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
445 association information.
445 association information.
446
446
447 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
450
450
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
453 """
453 """
454 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
456
456
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 for node in nodelist:
459 for node in nodelist:
460 ctx = unfi[node]
460 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
463 for n in precnodes:
464 if has_node(n):
464 if has_node(n):
465 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
467 if m:
467 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
469 break
470 else:
470 else:
471 continue # move to next predecessor
471 continue # move to next predecessor
472 break # found a tag, stop
472 break # found a tag, stop
473 else:
473 else:
474 # Check commit message
474 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
476 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
478
479 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
481 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
483 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
485 )
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 for newnode, (force, precset, drev) in toconfirm.items():
487 for newnode, (force, precset, drev) in toconfirm.items():
488 diffs = [
488 diffs = [
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 ]
490 ]
491
491
492 # "precursors" as known by Phabricator
492 # "precursors" as known by Phabricator
493 phprecset = {getnode(d) for d in diffs}
493 phprecset = {getnode(d) for d in diffs}
494
494
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 # and force is not set (when commit message says nothing)
496 # and force is not set (when commit message says nothing)
497 if not force and not bool(phprecset & precset):
497 if not force and not bool(phprecset & precset):
498 tagname = b'D%d' % drev
498 tagname = b'D%d' % drev
499 tags.tag(
499 tags.tag(
500 repo,
500 repo,
501 tagname,
501 tagname,
502 nullid,
502 nullid,
503 message=None,
503 message=None,
504 user=None,
504 user=None,
505 date=None,
505 date=None,
506 local=True,
506 local=True,
507 )
507 )
508 unfi.ui.warn(
508 unfi.ui.warn(
509 _(
509 _(
510 b'D%d: local tag removed - does not match '
510 b'D%d: local tag removed - does not match '
511 b'Differential history\n'
511 b'Differential history\n'
512 )
512 )
513 % drev
513 % drev
514 )
514 )
515 continue
515 continue
516
516
517 # Find the last node using Phabricator metadata, and make sure it
517 # Find the last node using Phabricator metadata, and make sure it
518 # exists in the repo
518 # exists in the repo
519 oldnode = lastdiff = None
519 oldnode = lastdiff = None
520 if diffs:
520 if diffs:
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 oldnode = getnode(lastdiff)
522 oldnode = getnode(lastdiff)
523 if oldnode and not has_node(oldnode):
523 if oldnode and not has_node(oldnode):
524 oldnode = None
524 oldnode = None
525
525
526 result[newnode] = (oldnode, lastdiff, drev)
526 result[newnode] = (oldnode, lastdiff, drev)
527
527
528 return result
528 return result
529
529
530
530
531 def getdrevmap(repo, revs):
531 def getdrevmap(repo, revs):
532 """Return a dict mapping each rev in `revs` to their Differential Revision
532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 ID or None.
533 ID or None.
534 """
534 """
535 result = {}
535 result = {}
536 for rev in revs:
536 for rev in revs:
537 result[rev] = None
537 result[rev] = None
538 ctx = repo[rev]
538 ctx = repo[rev]
539 # Check commit message
539 # Check commit message
540 m = _differentialrevisiondescre.search(ctx.description())
540 m = _differentialrevisiondescre.search(ctx.description())
541 if m:
541 if m:
542 result[rev] = int(m.group('id'))
542 result[rev] = int(m.group('id'))
543 continue
543 continue
544 # Check tags
544 # Check tags
545 for tag in repo.nodetags(ctx.node()):
545 for tag in repo.nodetags(ctx.node()):
546 m = _differentialrevisiontagre.match(tag)
546 m = _differentialrevisiontagre.match(tag)
547 if m:
547 if m:
548 result[rev] = int(m.group(1))
548 result[rev] = int(m.group(1))
549 break
549 break
550
550
551 return result
551 return result
552
552
553
553
554 def getdiff(basectx, ctx, diffopts):
554 def getdiff(basectx, ctx, diffopts):
555 """plain-text diff without header (user, commit message, etc)"""
555 """plain-text diff without header (user, commit message, etc)"""
556 output = util.stringio()
556 output = util.stringio()
557 for chunk, _label in patch.diffui(
557 for chunk, _label in patch.diffui(
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 ):
559 ):
560 output.write(chunk)
560 output.write(chunk)
561 return output.getvalue()
561 return output.getvalue()
562
562
563
563
564 class DiffChangeType(object):
564 class DiffChangeType(object):
565 ADD = 1
565 ADD = 1
566 CHANGE = 2
566 CHANGE = 2
567 DELETE = 3
567 DELETE = 3
568 MOVE_AWAY = 4
568 MOVE_AWAY = 4
569 COPY_AWAY = 5
569 COPY_AWAY = 5
570 MOVE_HERE = 6
570 MOVE_HERE = 6
571 COPY_HERE = 7
571 COPY_HERE = 7
572 MULTICOPY = 8
572 MULTICOPY = 8
573
573
574
574
575 class DiffFileType(object):
575 class DiffFileType(object):
576 TEXT = 1
576 TEXT = 1
577 IMAGE = 2
577 IMAGE = 2
578 BINARY = 3
578 BINARY = 3
579
579
580
580
581 @attr.s
581 @attr.s
582 class phabhunk(dict):
582 class phabhunk(dict):
583 """Represents a Differential hunk, which is owned by a Differential change
583 """Represents a Differential hunk, which is owned by a Differential change
584 """
584 """
585
585
586 oldOffset = attr.ib(default=0) # camelcase-required
586 oldOffset = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
590 corpus = attr.ib(default='')
590 corpus = attr.ib(default='')
591 # These get added to the phabchange's equivalents
591 # These get added to the phabchange's equivalents
592 addLines = attr.ib(default=0) # camelcase-required
592 addLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
594
594
595
595
596 @attr.s
596 @attr.s
597 class phabchange(object):
597 class phabchange(object):
598 """Represents a Differential change, owns Differential hunks and owned by a
598 """Represents a Differential change, owns Differential hunks and owned by a
599 Differential diff. Each one represents one file in a diff.
599 Differential diff. Each one represents one file in a diff.
600 """
600 """
601
601
602 currentPath = attr.ib(default=None) # camelcase-required
602 currentPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 metadata = attr.ib(default=attr.Factory(dict))
605 metadata = attr.ib(default=attr.Factory(dict))
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 type = attr.ib(default=DiffChangeType.CHANGE)
608 type = attr.ib(default=DiffChangeType.CHANGE)
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
613 hunks = attr.ib(default=attr.Factory(list))
613 hunks = attr.ib(default=attr.Factory(list))
614
614
615 def copynewmetadatatoold(self):
615 def copynewmetadatatoold(self):
616 for key in list(self.metadata.keys()):
616 for key in list(self.metadata.keys()):
617 newkey = key.replace(b'new:', b'old:')
617 newkey = key.replace(b'new:', b'old:')
618 self.metadata[newkey] = self.metadata[key]
618 self.metadata[newkey] = self.metadata[key]
619
619
620 def addoldmode(self, value):
620 def addoldmode(self, value):
621 self.oldProperties[b'unix:filemode'] = value
621 self.oldProperties[b'unix:filemode'] = value
622
622
623 def addnewmode(self, value):
623 def addnewmode(self, value):
624 self.newProperties[b'unix:filemode'] = value
624 self.newProperties[b'unix:filemode'] = value
625
625
626 def addhunk(self, hunk):
626 def addhunk(self, hunk):
627 if not isinstance(hunk, phabhunk):
627 if not isinstance(hunk, phabhunk):
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 # It's useful to include these stats since the Phab web UI shows them,
630 # It's useful to include these stats since the Phab web UI shows them,
631 # and uses them to estimate how large a change a Revision is. Also used
631 # and uses them to estimate how large a change a Revision is. Also used
632 # in email subjects for the [+++--] bit.
632 # in email subjects for the [+++--] bit.
633 self.addLines += hunk.addLines
633 self.addLines += hunk.addLines
634 self.delLines += hunk.delLines
634 self.delLines += hunk.delLines
635
635
636
636
637 @attr.s
637 @attr.s
638 class phabdiff(object):
638 class phabdiff(object):
639 """Represents a Differential diff, owns Differential changes. Corresponds
639 """Represents a Differential diff, owns Differential changes. Corresponds
640 to a commit.
640 to a commit.
641 """
641 """
642
642
643 # Doesn't seem to be any reason to send this (output of uname -n)
643 # Doesn't seem to be any reason to send this (output of uname -n)
644 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 branch = attr.ib(default=b'default')
649 branch = attr.ib(default=b'default')
650 bookmark = attr.ib(default=None)
650 bookmark = attr.ib(default=None)
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 changes = attr.ib(default=attr.Factory(dict))
654 changes = attr.ib(default=attr.Factory(dict))
655 repositoryPHID = attr.ib(default=None) # camelcase-required
655 repositoryPHID = attr.ib(default=None) # camelcase-required
656
656
657 def addchange(self, change):
657 def addchange(self, change):
658 if not isinstance(change, phabchange):
658 if not isinstance(change, phabchange):
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 self.changes[change.currentPath] = pycompat.byteskwargs(
660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 attr.asdict(change)
661 attr.asdict(change)
662 )
662 )
663
663
664
664
665 def maketext(pchange, basectx, ctx, fname):
665 def maketext(pchange, basectx, ctx, fname):
666 """populate the phabchange for a text file"""
666 """populate the phabchange for a text file"""
667 repo = ctx.repo()
667 repo = ctx.repo()
668 fmatcher = match.exact([fname])
668 fmatcher = match.exact([fname])
669 diffopts = mdiff.diffopts(git=True, context=32767)
669 diffopts = mdiff.diffopts(git=True, context=32767)
670 _pfctx, _fctx, header, fhunks = next(
670 _pfctx, _fctx, header, fhunks = next(
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 )
672 )
673
673
674 for fhunk in fhunks:
674 for fhunk in fhunks:
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 corpus = b''.join(lines[1:])
676 corpus = b''.join(lines[1:])
677 shunk = list(header)
677 shunk = list(header)
678 shunk.extend(lines)
678 shunk.extend(lines)
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 patch.diffstatdata(util.iterlines(shunk))
680 patch.diffstatdata(util.iterlines(shunk))
681 )
681 )
682 pchange.addhunk(
682 pchange.addhunk(
683 phabhunk(
683 phabhunk(
684 oldOffset,
684 oldOffset,
685 oldLength,
685 oldLength,
686 newOffset,
686 newOffset,
687 newLength,
687 newLength,
688 corpus,
688 corpus,
689 addLines,
689 addLines,
690 delLines,
690 delLines,
691 )
691 )
692 )
692 )
693
693
694
694
695 def uploadchunks(fctx, fphid):
695 def uploadchunks(fctx, fphid):
696 """upload large binary files as separate chunks.
696 """upload large binary files as separate chunks.
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 """
698 """
699 ui = fctx.repo().ui
699 ui = fctx.repo().ui
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 with ui.makeprogress(
701 with ui.makeprogress(
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 ) as progress:
703 ) as progress:
704 for chunk in chunks:
704 for chunk in chunks:
705 progress.increment()
705 progress.increment()
706 if chunk[b'complete']:
706 if chunk[b'complete']:
707 continue
707 continue
708 bstart = int(chunk[b'byteStart'])
708 bstart = int(chunk[b'byteStart'])
709 bend = int(chunk[b'byteEnd'])
709 bend = int(chunk[b'byteEnd'])
710 callconduit(
710 callconduit(
711 ui,
711 ui,
712 b'file.uploadchunk',
712 b'file.uploadchunk',
713 {
713 {
714 b'filePHID': fphid,
714 b'filePHID': fphid,
715 b'byteStart': bstart,
715 b'byteStart': bstart,
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 b'dataEncoding': b'base64',
717 b'dataEncoding': b'base64',
718 },
718 },
719 )
719 )
720
720
721
721
722 def uploadfile(fctx):
722 def uploadfile(fctx):
723 """upload binary files to Phabricator"""
723 """upload binary files to Phabricator"""
724 repo = fctx.repo()
724 repo = fctx.repo()
725 ui = repo.ui
725 ui = repo.ui
726 fname = fctx.path()
726 fname = fctx.path()
727 size = fctx.size()
727 size = fctx.size()
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729
729
730 # an allocate call is required first to see if an upload is even required
730 # an allocate call is required first to see if an upload is even required
731 # (Phab might already have it) and to determine if chunking is needed
731 # (Phab might already have it) and to determine if chunking is needed
732 allocateparams = {
732 allocateparams = {
733 b'name': fname,
733 b'name': fname,
734 b'contentLength': size,
734 b'contentLength': size,
735 b'contentHash': fhash,
735 b'contentHash': fhash,
736 }
736 }
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 fphid = filealloc[b'filePHID']
738 fphid = filealloc[b'filePHID']
739
739
740 if filealloc[b'upload']:
740 if filealloc[b'upload']:
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 if not fphid:
742 if not fphid:
743 uploadparams = {
743 uploadparams = {
744 b'name': fname,
744 b'name': fname,
745 b'data_base64': base64.b64encode(fctx.data()),
745 b'data_base64': base64.b64encode(fctx.data()),
746 }
746 }
747 fphid = callconduit(ui, b'file.upload', uploadparams)
747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 else:
748 else:
749 uploadchunks(fctx, fphid)
749 uploadchunks(fctx, fphid)
750 else:
750 else:
751 ui.debug(b'server already has %s\n' % bytes(fctx))
751 ui.debug(b'server already has %s\n' % bytes(fctx))
752
752
753 if not fphid:
753 if not fphid:
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755
755
756 return fphid
756 return fphid
757
757
758
758
759 def addoldbinary(pchange, oldfctx, fctx):
759 def addoldbinary(pchange, oldfctx, fctx):
760 """add the metadata for the previous version of a binary file to the
760 """add the metadata for the previous version of a binary file to the
761 phabchange for the new version
761 phabchange for the new version
762
762
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 version of the file, or None if the file is being removed.
764 version of the file, or None if the file is being removed.
765 """
765 """
766 if not fctx or fctx.cmp(oldfctx):
766 if not fctx or fctx.cmp(oldfctx):
767 # Files differ, add the old one
767 # Files differ, add the old one
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 mimeguess, _enc = mimetypes.guess_type(
769 mimeguess, _enc = mimetypes.guess_type(
770 encoding.unifromlocal(oldfctx.path())
770 encoding.unifromlocal(oldfctx.path())
771 )
771 )
772 if mimeguess:
772 if mimeguess:
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 mimeguess
774 mimeguess
775 )
775 )
776 fphid = uploadfile(oldfctx)
776 fphid = uploadfile(oldfctx)
777 pchange.metadata[b'old:binary-phid'] = fphid
777 pchange.metadata[b'old:binary-phid'] = fphid
778 else:
778 else:
779 # If it's left as IMAGE/BINARY web UI might try to display it
779 # If it's left as IMAGE/BINARY web UI might try to display it
780 pchange.fileType = DiffFileType.TEXT
780 pchange.fileType = DiffFileType.TEXT
781 pchange.copynewmetadatatoold()
781 pchange.copynewmetadatatoold()
782
782
783
783
784 def makebinary(pchange, fctx):
784 def makebinary(pchange, fctx):
785 """populate the phabchange for a binary file"""
785 """populate the phabchange for a binary file"""
786 pchange.fileType = DiffFileType.BINARY
786 pchange.fileType = DiffFileType.BINARY
787 fphid = uploadfile(fctx)
787 fphid = uploadfile(fctx)
788 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:binary-phid'] = fphid
789 pchange.metadata[b'new:file:size'] = fctx.size()
789 pchange.metadata[b'new:file:size'] = fctx.size()
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 if mimeguess:
791 if mimeguess:
792 mimeguess = pycompat.bytestr(mimeguess)
792 mimeguess = pycompat.bytestr(mimeguess)
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 if mimeguess.startswith(b'image/'):
794 if mimeguess.startswith(b'image/'):
795 pchange.fileType = DiffFileType.IMAGE
795 pchange.fileType = DiffFileType.IMAGE
796
796
797
797
798 # Copied from mercurial/patch.py
798 # Copied from mercurial/patch.py
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800
800
801
801
802 def notutf8(fctx):
802 def notutf8(fctx):
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 as binary
804 as binary
805 """
805 """
806 try:
806 try:
807 fctx.data().decode('utf-8')
807 fctx.data().decode('utf-8')
808 return False
808 return False
809 except UnicodeDecodeError:
809 except UnicodeDecodeError:
810 fctx.repo().ui.write(
810 fctx.repo().ui.write(
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 % fctx.path()
812 % fctx.path()
813 )
813 )
814 return True
814 return True
815
815
816
816
817 def addremoved(pdiff, basectx, ctx, removed):
817 def addremoved(pdiff, basectx, ctx, removed):
818 """add removed files to the phabdiff. Shouldn't include moves"""
818 """add removed files to the phabdiff. Shouldn't include moves"""
819 for fname in removed:
819 for fname in removed:
820 pchange = phabchange(
820 pchange = phabchange(
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 )
822 )
823 oldfctx = basectx.p1()[fname]
823 oldfctx = basectx.p1()[fname]
824 pchange.addoldmode(gitmode[oldfctx.flags()])
824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 maketext(pchange, basectx, ctx, fname)
826 maketext(pchange, basectx, ctx, fname)
827
827
828 pdiff.addchange(pchange)
828 pdiff.addchange(pchange)
829
829
830
830
831 def addmodified(pdiff, basectx, ctx, modified):
831 def addmodified(pdiff, basectx, ctx, modified):
832 """add modified files to the phabdiff"""
832 """add modified files to the phabdiff"""
833 for fname in modified:
833 for fname in modified:
834 fctx = ctx[fname]
834 fctx = ctx[fname]
835 oldfctx = basectx.p1()[fname]
835 oldfctx = basectx.p1()[fname]
836 pchange = phabchange(currentPath=fname, oldPath=fname)
836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 filemode = gitmode[fctx.flags()]
837 filemode = gitmode[fctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
839 if filemode != originalmode:
839 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
842
842
843 if (
843 if (
844 fctx.isbinary()
844 fctx.isbinary()
845 or notutf8(fctx)
845 or notutf8(fctx)
846 or oldfctx.isbinary()
846 or oldfctx.isbinary()
847 or notutf8(oldfctx)
847 or notutf8(oldfctx)
848 ):
848 ):
849 makebinary(pchange, fctx)
849 makebinary(pchange, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
851 else:
851 else:
852 maketext(pchange, basectx, ctx, fname)
852 maketext(pchange, basectx, ctx, fname)
853
853
854 pdiff.addchange(pchange)
854 pdiff.addchange(pchange)
855
855
856
856
857 def addadded(pdiff, basectx, ctx, added, removed):
857 def addadded(pdiff, basectx, ctx, added, removed):
858 """add file adds to the phabdiff, both new files and copies/moves"""
858 """add file adds to the phabdiff, both new files and copies/moves"""
859 # Keep track of files that've been recorded as moved/copied, so if there are
859 # Keep track of files that've been recorded as moved/copied, so if there are
860 # additional copies we can mark them (moves get removed from removed)
860 # additional copies we can mark them (moves get removed from removed)
861 copiedchanges = {}
861 copiedchanges = {}
862 movedchanges = {}
862 movedchanges = {}
863
863
864 copy = {}
864 copy = {}
865 if basectx != ctx:
865 if basectx != ctx:
866 copy = copies.pathcopies(basectx.p1(), ctx)
866 copy = copies.pathcopies(basectx.p1(), ctx)
867
867
868 for fname in added:
868 for fname in added:
869 fctx = ctx[fname]
869 fctx = ctx[fname]
870 oldfctx = None
870 oldfctx = None
871 pchange = phabchange(currentPath=fname)
871 pchange = phabchange(currentPath=fname)
872
872
873 filemode = gitmode[fctx.flags()]
873 filemode = gitmode[fctx.flags()]
874
874
875 if copy:
875 if copy:
876 originalfname = copy.get(fname, fname)
876 originalfname = copy.get(fname, fname)
877 else:
877 else:
878 originalfname = fname
878 originalfname = fname
879 if fctx.renamed():
879 if fctx.renamed():
880 originalfname = fctx.renamed()[0]
880 originalfname = fctx.renamed()[0]
881
881
882 renamed = fname != originalfname
882 renamed = fname != originalfname
883
883
884 if renamed:
884 if renamed:
885 oldfctx = basectx.p1()[originalfname]
885 oldfctx = basectx.p1()[originalfname]
886 originalmode = gitmode[oldfctx.flags()]
886 originalmode = gitmode[oldfctx.flags()]
887 pchange.oldPath = originalfname
887 pchange.oldPath = originalfname
888
888
889 if originalfname in removed:
889 if originalfname in removed:
890 origpchange = phabchange(
890 origpchange = phabchange(
891 currentPath=originalfname,
891 currentPath=originalfname,
892 oldPath=originalfname,
892 oldPath=originalfname,
893 type=DiffChangeType.MOVE_AWAY,
893 type=DiffChangeType.MOVE_AWAY,
894 awayPaths=[fname],
894 awayPaths=[fname],
895 )
895 )
896 movedchanges[originalfname] = origpchange
896 movedchanges[originalfname] = origpchange
897 removed.remove(originalfname)
897 removed.remove(originalfname)
898 pchange.type = DiffChangeType.MOVE_HERE
898 pchange.type = DiffChangeType.MOVE_HERE
899 elif originalfname in movedchanges:
899 elif originalfname in movedchanges:
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 movedchanges[originalfname].awayPaths.append(fname)
901 movedchanges[originalfname].awayPaths.append(fname)
902 pchange.type = DiffChangeType.COPY_HERE
902 pchange.type = DiffChangeType.COPY_HERE
903 else: # pure copy
903 else: # pure copy
904 if originalfname not in copiedchanges:
904 if originalfname not in copiedchanges:
905 origpchange = phabchange(
905 origpchange = phabchange(
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 )
907 )
908 copiedchanges[originalfname] = origpchange
908 copiedchanges[originalfname] = origpchange
909 else:
909 else:
910 origpchange = copiedchanges[originalfname]
910 origpchange = copiedchanges[originalfname]
911 origpchange.awayPaths.append(fname)
911 origpchange.awayPaths.append(fname)
912 pchange.type = DiffChangeType.COPY_HERE
912 pchange.type = DiffChangeType.COPY_HERE
913
913
914 if filemode != originalmode:
914 if filemode != originalmode:
915 pchange.addoldmode(originalmode)
915 pchange.addoldmode(originalmode)
916 pchange.addnewmode(filemode)
916 pchange.addnewmode(filemode)
917 else: # Brand-new file
917 else: # Brand-new file
918 pchange.addnewmode(gitmode[fctx.flags()])
918 pchange.addnewmode(gitmode[fctx.flags()])
919 pchange.type = DiffChangeType.ADD
919 pchange.type = DiffChangeType.ADD
920
920
921 if (
921 if (
922 fctx.isbinary()
922 fctx.isbinary()
923 or notutf8(fctx)
923 or notutf8(fctx)
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 ):
925 ):
926 makebinary(pchange, fctx)
926 makebinary(pchange, fctx)
927 if renamed:
927 if renamed:
928 addoldbinary(pchange, oldfctx, fctx)
928 addoldbinary(pchange, oldfctx, fctx)
929 else:
929 else:
930 maketext(pchange, basectx, ctx, fname)
930 maketext(pchange, basectx, ctx, fname)
931
931
932 pdiff.addchange(pchange)
932 pdiff.addchange(pchange)
933
933
934 for _path, copiedchange in copiedchanges.items():
934 for _path, copiedchange in copiedchanges.items():
935 pdiff.addchange(copiedchange)
935 pdiff.addchange(copiedchange)
936 for _path, movedchange in movedchanges.items():
936 for _path, movedchange in movedchanges.items():
937 pdiff.addchange(movedchange)
937 pdiff.addchange(movedchange)
938
938
939
939
940 def creatediff(basectx, ctx):
940 def creatediff(basectx, ctx):
941 """create a Differential Diff"""
941 """create a Differential Diff"""
942 repo = ctx.repo()
942 repo = ctx.repo()
943 repophid = getrepophid(repo)
943 repophid = getrepophid(repo)
944 # Create a "Differential Diff" via "differential.creatediff" API
944 # Create a "Differential Diff" via "differential.creatediff" API
945 pdiff = phabdiff(
945 pdiff = phabdiff(
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 branch=b'%s' % ctx.branch(),
947 branch=b'%s' % ctx.branch(),
948 )
948 )
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 # addadded will remove moved files from removed, so addremoved won't get
950 # addadded will remove moved files from removed, so addremoved won't get
951 # them
951 # them
952 addadded(pdiff, basectx, ctx, added, removed)
952 addadded(pdiff, basectx, ctx, added, removed)
953 addmodified(pdiff, basectx, ctx, modified)
953 addmodified(pdiff, basectx, ctx, modified)
954 addremoved(pdiff, basectx, ctx, removed)
954 addremoved(pdiff, basectx, ctx, removed)
955 if repophid:
955 if repophid:
956 pdiff.repositoryPHID = repophid
956 pdiff.repositoryPHID = repophid
957 diff = callconduit(
957 diff = callconduit(
958 repo.ui,
958 repo.ui,
959 b'differential.creatediff',
959 b'differential.creatediff',
960 pycompat.byteskwargs(attr.asdict(pdiff)),
960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 )
961 )
962 if not diff:
962 if not diff:
963 if basectx != ctx:
963 if basectx != ctx:
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 else:
965 else:
966 msg = _(b'cannot create diff for %s') % ctx
966 msg = _(b'cannot create diff for %s') % ctx
967 raise error.Abort(msg)
967 raise error.Abort(msg)
968 return diff
968 return diff
969
969
970
970
971 def writediffproperties(ctxs, diff):
971 def writediffproperties(ctxs, diff):
972 """write metadata to diff so patches could be applied losslessly
972 """write metadata to diff so patches could be applied losslessly
973
973
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 The list is generally a single commit, but may be several when using
975 The list is generally a single commit, but may be several when using
976 ``phabsend --fold``.
976 ``phabsend --fold``.
977 """
977 """
978 # creatediff returns with a diffid but query returns with an id
978 # creatediff returns with a diffid but query returns with an id
979 diffid = diff.get(b'diffid', diff.get(b'id'))
979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 basectx = ctxs[0]
980 basectx = ctxs[0]
981 tipctx = ctxs[-1]
981 tipctx = ctxs[-1]
982
982
983 params = {
983 params = {
984 b'diff_id': diffid,
984 b'diff_id': diffid,
985 b'name': b'hg:meta',
985 b'name': b'hg:meta',
986 b'data': templatefilters.json(
986 b'data': templatefilters.json(
987 {
987 {
988 b'user': tipctx.user(),
988 b'user': tipctx.user(),
989 b'date': b'%d %d' % tipctx.date(),
989 b'date': b'%d %d' % tipctx.date(),
990 b'branch': tipctx.branch(),
990 b'branch': tipctx.branch(),
991 b'node': tipctx.hex(),
991 b'node': tipctx.hex(),
992 b'parent': basectx.p1().hex(),
992 b'parent': basectx.p1().hex(),
993 }
993 }
994 ),
994 ),
995 }
995 }
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997
997
998 commits = {}
998 commits = {}
999 for ctx in ctxs:
999 for ctx in ctxs:
1000 commits[ctx.hex()] = {
1000 commits[ctx.hex()] = {
1001 b'author': stringutil.person(ctx.user()),
1001 b'author': stringutil.person(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1003 b'time': int(ctx.date()[0]),
1003 b'time': int(ctx.date()[0]),
1004 b'commit': ctx.hex(),
1004 b'commit': ctx.hex(),
1005 b'parents': [ctx.p1().hex()],
1005 b'parents': [ctx.p1().hex()],
1006 b'branch': ctx.branch(),
1006 b'branch': ctx.branch(),
1007 }
1007 }
1008 params = {
1008 params = {
1009 b'diff_id': diffid,
1009 b'diff_id': diffid,
1010 b'name': b'local:commits',
1010 b'name': b'local:commits',
1011 b'data': templatefilters.json(commits),
1011 b'data': templatefilters.json(commits),
1012 }
1012 }
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014
1014
1015
1015
1016 def createdifferentialrevision(
1016 def createdifferentialrevision(
1017 ctx,
1017 ctxs,
1018 revid=None,
1018 revid=None,
1019 parentrevphid=None,
1019 parentrevphid=None,
1020 oldbasenode=None,
1020 oldnode=None,
1021 oldnode=None,
1021 olddiff=None,
1022 olddiff=None,
1022 actions=None,
1023 actions=None,
1023 comment=None,
1024 comment=None,
1024 ):
1025 ):
1025 """create or update a Differential Revision
1026 """create or update a Differential Revision
1026
1027
1027 If revid is None, create a new Differential Revision, otherwise update
1028 If revid is None, create a new Differential Revision, otherwise update
1028 revid. If parentrevphid is not None, set it as a dependency.
1029 revid. If parentrevphid is not None, set it as a dependency.
1029
1030
1031 If there is a single commit for the new Differential Revision, ``ctxs`` will
1032 be a list of that single context. Otherwise, it is a list that covers the
1033 range of changes for the differential, where ``ctxs[0]`` is the first change
1034 to include and ``ctxs[-1]`` is the last.
1035
1030 If oldnode is not None, check if the patch content (without commit message
1036 If oldnode is not None, check if the patch content (without commit message
1031 and metadata) has changed before creating another diff.
1037 and metadata) has changed before creating another diff. For a Revision with
1038 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1039 Revision covering multiple commits, ``oldbasenode`` corresponds to
1040 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1041 corresponds to ``ctxs[-1]``.
1032
1042
1033 If actions is not None, they will be appended to the transaction.
1043 If actions is not None, they will be appended to the transaction.
1034 """
1044 """
1035 basectx = ctx
1045 ctx = ctxs[-1]
1046 basectx = ctxs[0]
1047
1036 repo = ctx.repo()
1048 repo = ctx.repo()
1037 if oldnode:
1049 if oldnode:
1038 diffopts = mdiff.diffopts(git=True, context=32767)
1050 diffopts = mdiff.diffopts(git=True, context=32767)
1039 oldctx = repo.unfiltered()[oldnode]
1051 unfi = repo.unfiltered()
1040 oldbasectx = oldctx
1052 oldctx = unfi[oldnode]
1053 oldbasectx = unfi[oldbasenode]
1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1054 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1042 oldbasectx, oldctx, diffopts
1055 oldbasectx, oldctx, diffopts
1043 )
1056 )
1044 else:
1057 else:
1045 neednewdiff = True
1058 neednewdiff = True
1046
1059
1047 transactions = []
1060 transactions = []
1048 if neednewdiff:
1061 if neednewdiff:
1049 diff = creatediff(basectx, ctx)
1062 diff = creatediff(basectx, ctx)
1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1063 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1051 if comment:
1064 if comment:
1052 transactions.append({b'type': b'comment', b'value': comment})
1065 transactions.append({b'type': b'comment', b'value': comment})
1053 else:
1066 else:
1054 # Even if we don't need to upload a new diff because the patch content
1067 # Even if we don't need to upload a new diff because the patch content
1055 # does not change. We might still need to update its metadata so
1068 # does not change. We might still need to update its metadata so
1056 # pushers could know the correct node metadata.
1069 # pushers could know the correct node metadata.
1057 assert olddiff
1070 assert olddiff
1058 diff = olddiff
1071 diff = olddiff
1059 writediffproperties([ctx], diff)
1072 writediffproperties(ctxs, diff)
1060
1073
1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1074 # Set the parent Revision every time, so commit re-ordering is picked-up
1062 if parentrevphid:
1075 if parentrevphid:
1063 transactions.append(
1076 transactions.append(
1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1077 {b'type': b'parents.set', b'value': [parentrevphid]}
1065 )
1078 )
1066
1079
1067 if actions:
1080 if actions:
1068 transactions += actions
1081 transactions += actions
1069
1082
1070 # When folding multiple local commits into a single review, arcanist will
1083 # When folding multiple local commits into a single review, arcanist will
1071 # take the summary line of the first commit as the title, and then
1084 # take the summary line of the first commit as the title, and then
1072 # concatenate the rest of the remaining messages (including each of their
1085 # concatenate the rest of the remaining messages (including each of their
1073 # first lines) to the rest of the first commit message (each separated by
1086 # first lines) to the rest of the first commit message (each separated by
1074 # an empty line), and use that as the summary field. Do the same here.
1087 # an empty line), and use that as the summary field. Do the same here.
1075 # For commits with only a one line message, there is no summary field, as
1088 # For commits with only a one line message, there is no summary field, as
1076 # this gets assigned to the title.
1089 # this gets assigned to the title.
1077 fields = util.sortdict() # sorted for stable wire protocol in tests
1090 fields = util.sortdict() # sorted for stable wire protocol in tests
1078
1091
1079 for i, _ctx in enumerate([ctx]):
1092 for i, _ctx in enumerate(ctxs):
1080 # Parse commit message and update related fields.
1093 # Parse commit message and update related fields.
1081 desc = _ctx.description()
1094 desc = _ctx.description()
1082 info = callconduit(
1095 info = callconduit(
1083 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1096 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1084 )
1097 )
1085
1098
1086 for k in [b'title', b'summary', b'testPlan']:
1099 for k in [b'title', b'summary', b'testPlan']:
1087 v = info[b'fields'].get(k)
1100 v = info[b'fields'].get(k)
1088 if not v:
1101 if not v:
1089 continue
1102 continue
1090
1103
1091 if i == 0:
1104 if i == 0:
1092 # Title, summary and test plan (if present) are taken verbatim
1105 # Title, summary and test plan (if present) are taken verbatim
1093 # for the first commit.
1106 # for the first commit.
1094 fields[k] = v.rstrip()
1107 fields[k] = v.rstrip()
1095 continue
1108 continue
1096 elif k == b'title':
1109 elif k == b'title':
1097 # Add subsequent titles (i.e. the first line of the commit
1110 # Add subsequent titles (i.e. the first line of the commit
1098 # message) back to the summary.
1111 # message) back to the summary.
1099 k = b'summary'
1112 k = b'summary'
1100
1113
1101 # Append any current field to the existing composite field
1114 # Append any current field to the existing composite field
1102 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1115 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1103
1116
1104 for k, v in fields.items():
1117 for k, v in fields.items():
1105 transactions.append({b'type': k, b'value': v})
1118 transactions.append({b'type': k, b'value': v})
1106
1119
1107 params = {b'transactions': transactions}
1120 params = {b'transactions': transactions}
1108 if revid is not None:
1121 if revid is not None:
1109 # Update an existing Differential Revision
1122 # Update an existing Differential Revision
1110 params[b'objectIdentifier'] = revid
1123 params[b'objectIdentifier'] = revid
1111
1124
1112 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1125 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1113 if not revision:
1126 if not revision:
1114 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1127 if len(ctxs) == 1:
1128 msg = _(b'cannot create revision for %s') % ctx
1129 else:
1130 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1131 raise error.Abort(msg)
1115
1132
1116 return revision, diff
1133 return revision, diff
1117
1134
1118
1135
1119 def userphids(ui, names):
1136 def userphids(ui, names):
1120 """convert user names to PHIDs"""
1137 """convert user names to PHIDs"""
1121 names = [name.lower() for name in names]
1138 names = [name.lower() for name in names]
1122 query = {b'constraints': {b'usernames': names}}
1139 query = {b'constraints': {b'usernames': names}}
1123 result = callconduit(ui, b'user.search', query)
1140 result = callconduit(ui, b'user.search', query)
1124 # username not found is not an error of the API. So check if we have missed
1141 # username not found is not an error of the API. So check if we have missed
1125 # some names here.
1142 # some names here.
1126 data = result[b'data']
1143 data = result[b'data']
1127 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1144 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1128 unresolved = set(names) - resolved
1145 unresolved = set(names) - resolved
1129 if unresolved:
1146 if unresolved:
1130 raise error.Abort(
1147 raise error.Abort(
1131 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1148 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1132 )
1149 )
1133 return [entry[b'phid'] for entry in data]
1150 return [entry[b'phid'] for entry in data]
1134
1151
1135
1152
1136 @vcrcommand(
1153 @vcrcommand(
1137 b'phabsend',
1154 b'phabsend',
1138 [
1155 [
1139 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1156 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1140 (b'', b'amend', True, _(b'update commit messages')),
1157 (b'', b'amend', True, _(b'update commit messages')),
1141 (b'', b'reviewer', [], _(b'specify reviewers')),
1158 (b'', b'reviewer', [], _(b'specify reviewers')),
1142 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1159 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1143 (
1160 (
1144 b'm',
1161 b'm',
1145 b'comment',
1162 b'comment',
1146 b'',
1163 b'',
1147 _(b'add a comment to Revisions with new/updated Diffs'),
1164 _(b'add a comment to Revisions with new/updated Diffs'),
1148 ),
1165 ),
1149 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1166 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1150 ],
1167 ],
1151 _(b'REV [OPTIONS]'),
1168 _(b'REV [OPTIONS]'),
1152 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1169 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1153 )
1170 )
1154 def phabsend(ui, repo, *revs, **opts):
1171 def phabsend(ui, repo, *revs, **opts):
1155 """upload changesets to Phabricator
1172 """upload changesets to Phabricator
1156
1173
1157 If there are multiple revisions specified, they will be send as a stack
1174 If there are multiple revisions specified, they will be send as a stack
1158 with a linear dependencies relationship using the order specified by the
1175 with a linear dependencies relationship using the order specified by the
1159 revset.
1176 revset.
1160
1177
1161 For the first time uploading changesets, local tags will be created to
1178 For the first time uploading changesets, local tags will be created to
1162 maintain the association. After the first time, phabsend will check
1179 maintain the association. After the first time, phabsend will check
1163 obsstore and tags information so it can figure out whether to update an
1180 obsstore and tags information so it can figure out whether to update an
1164 existing Differential Revision, or create a new one.
1181 existing Differential Revision, or create a new one.
1165
1182
1166 If --amend is set, update commit messages so they have the
1183 If --amend is set, update commit messages so they have the
1167 ``Differential Revision`` URL, remove related tags. This is similar to what
1184 ``Differential Revision`` URL, remove related tags. This is similar to what
1168 arcanist will do, and is more desired in author-push workflows. Otherwise,
1185 arcanist will do, and is more desired in author-push workflows. Otherwise,
1169 use local tags to record the ``Differential Revision`` association.
1186 use local tags to record the ``Differential Revision`` association.
1170
1187
1171 The --confirm option lets you confirm changesets before sending them. You
1188 The --confirm option lets you confirm changesets before sending them. You
1172 can also add following to your configuration file to make it default
1189 can also add following to your configuration file to make it default
1173 behaviour::
1190 behaviour::
1174
1191
1175 [phabsend]
1192 [phabsend]
1176 confirm = true
1193 confirm = true
1177
1194
1178 phabsend will check obsstore and the above association to decide whether to
1195 phabsend will check obsstore and the above association to decide whether to
1179 update an existing Differential Revision, or create a new one.
1196 update an existing Differential Revision, or create a new one.
1180 """
1197 """
1181 opts = pycompat.byteskwargs(opts)
1198 opts = pycompat.byteskwargs(opts)
1182 revs = list(revs) + opts.get(b'rev', [])
1199 revs = list(revs) + opts.get(b'rev', [])
1183 revs = scmutil.revrange(repo, revs)
1200 revs = scmutil.revrange(repo, revs)
1184 revs.sort() # ascending order to preserve topological parent/child in phab
1201 revs.sort() # ascending order to preserve topological parent/child in phab
1185
1202
1186 if not revs:
1203 if not revs:
1187 raise error.Abort(_(b'phabsend requires at least one changeset'))
1204 raise error.Abort(_(b'phabsend requires at least one changeset'))
1188 if opts.get(b'amend'):
1205 if opts.get(b'amend'):
1189 cmdutil.checkunfinished(repo)
1206 cmdutil.checkunfinished(repo)
1190
1207
1191 # {newnode: (oldnode, olddiff, olddrev}
1208 # {newnode: (oldnode, olddiff, olddrev}
1192 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1209 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1193
1210
1194 confirm = ui.configbool(b'phabsend', b'confirm')
1211 confirm = ui.configbool(b'phabsend', b'confirm')
1195 confirm |= bool(opts.get(b'confirm'))
1212 confirm |= bool(opts.get(b'confirm'))
1196 if confirm:
1213 if confirm:
1197 confirmed = _confirmbeforesend(repo, revs, oldmap)
1214 confirmed = _confirmbeforesend(repo, revs, oldmap)
1198 if not confirmed:
1215 if not confirmed:
1199 raise error.Abort(_(b'phabsend cancelled'))
1216 raise error.Abort(_(b'phabsend cancelled'))
1200
1217
1201 actions = []
1218 actions = []
1202 reviewers = opts.get(b'reviewer', [])
1219 reviewers = opts.get(b'reviewer', [])
1203 blockers = opts.get(b'blocker', [])
1220 blockers = opts.get(b'blocker', [])
1204 phids = []
1221 phids = []
1205 if reviewers:
1222 if reviewers:
1206 phids.extend(userphids(repo.ui, reviewers))
1223 phids.extend(userphids(repo.ui, reviewers))
1207 if blockers:
1224 if blockers:
1208 phids.extend(
1225 phids.extend(
1209 map(
1226 map(
1210 lambda phid: b'blocking(%s)' % phid,
1227 lambda phid: b'blocking(%s)' % phid,
1211 userphids(repo.ui, blockers),
1228 userphids(repo.ui, blockers),
1212 )
1229 )
1213 )
1230 )
1214 if phids:
1231 if phids:
1215 actions.append({b'type': b'reviewers.add', b'value': phids})
1232 actions.append({b'type': b'reviewers.add', b'value': phids})
1216
1233
1217 drevids = [] # [int]
1234 drevids = [] # [int]
1218 diffmap = {} # {newnode: diff}
1235 diffmap = {} # {newnode: diff}
1219
1236
1220 # Send patches one by one so we know their Differential Revision PHIDs and
1237 # Send patches one by one so we know their Differential Revision PHIDs and
1221 # can provide dependency relationship
1238 # can provide dependency relationship
1222 lastrevphid = None
1239 lastrevphid = None
1223 for rev in revs:
1240 for rev in revs:
1224 ui.debug(b'sending rev %d\n' % rev)
1241 ui.debug(b'sending rev %d\n' % rev)
1225 ctx = repo[rev]
1242 ctx = repo[rev]
1226
1243
1227 # Get Differential Revision ID
1244 # Get Differential Revision ID
1228 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1245 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1246 oldbasenode = oldnode
1229 if oldnode != ctx.node() or opts.get(b'amend'):
1247 if oldnode != ctx.node() or opts.get(b'amend'):
1230 # Create or update Differential Revision
1248 # Create or update Differential Revision
1231 revision, diff = createdifferentialrevision(
1249 revision, diff = createdifferentialrevision(
1232 ctx,
1250 [ctx],
1233 revid,
1251 revid,
1234 lastrevphid,
1252 lastrevphid,
1253 oldbasenode,
1235 oldnode,
1254 oldnode,
1236 olddiff,
1255 olddiff,
1237 actions,
1256 actions,
1238 opts.get(b'comment'),
1257 opts.get(b'comment'),
1239 )
1258 )
1240 diffmap[ctx.node()] = diff
1259 diffmap[ctx.node()] = diff
1241 newrevid = int(revision[b'object'][b'id'])
1260 newrevid = int(revision[b'object'][b'id'])
1242 newrevphid = revision[b'object'][b'phid']
1261 newrevphid = revision[b'object'][b'phid']
1243 if revid:
1262 if revid:
1244 action = b'updated'
1263 action = b'updated'
1245 else:
1264 else:
1246 action = b'created'
1265 action = b'created'
1247
1266
1248 # Create a local tag to note the association, if commit message
1267 # Create a local tag to note the association, if commit message
1249 # does not have it already
1268 # does not have it already
1250 m = _differentialrevisiondescre.search(ctx.description())
1269 m = _differentialrevisiondescre.search(ctx.description())
1251 if not m or int(m.group('id')) != newrevid:
1270 if not m or int(m.group('id')) != newrevid:
1252 tagname = b'D%d' % newrevid
1271 tagname = b'D%d' % newrevid
1253 tags.tag(
1272 tags.tag(
1254 repo,
1273 repo,
1255 tagname,
1274 tagname,
1256 ctx.node(),
1275 ctx.node(),
1257 message=None,
1276 message=None,
1258 user=None,
1277 user=None,
1259 date=None,
1278 date=None,
1260 local=True,
1279 local=True,
1261 )
1280 )
1262 else:
1281 else:
1263 # Nothing changed. But still set "newrevphid" so the next revision
1282 # Nothing changed. But still set "newrevphid" so the next revision
1264 # could depend on this one and "newrevid" for the summary line.
1283 # could depend on this one and "newrevid" for the summary line.
1265 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1284 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1266 newrevid = revid
1285 newrevid = revid
1267 action = b'skipped'
1286 action = b'skipped'
1268
1287
1269 actiondesc = ui.label(
1288 actiondesc = ui.label(
1270 {
1289 {
1271 b'created': _(b'created'),
1290 b'created': _(b'created'),
1272 b'skipped': _(b'skipped'),
1291 b'skipped': _(b'skipped'),
1273 b'updated': _(b'updated'),
1292 b'updated': _(b'updated'),
1274 }[action],
1293 }[action],
1275 b'phabricator.action.%s' % action,
1294 b'phabricator.action.%s' % action,
1276 )
1295 )
1277 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1296 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1278 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1297 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1279 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1298 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1280 ui.write(
1299 ui.write(
1281 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1300 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1282 )
1301 )
1283 drevids.append(newrevid)
1302 drevids.append(newrevid)
1284 lastrevphid = newrevphid
1303 lastrevphid = newrevphid
1285
1304
1286 # Update commit messages and remove tags
1305 # Update commit messages and remove tags
1287 if opts.get(b'amend'):
1306 if opts.get(b'amend'):
1288 unfi = repo.unfiltered()
1307 unfi = repo.unfiltered()
1289 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1308 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1290 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1309 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1291 wnode = unfi[b'.'].node()
1310 wnode = unfi[b'.'].node()
1292 mapping = {} # {oldnode: [newnode]}
1311 mapping = {} # {oldnode: [newnode]}
1293 for i, rev in enumerate(revs):
1312 for i, rev in enumerate(revs):
1294 old = unfi[rev]
1313 old = unfi[rev]
1295 drevid = drevids[i]
1314 drevid = drevids[i]
1296 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1315 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1297 newdesc = get_amended_desc(drev, old, False)
1316 newdesc = get_amended_desc(drev, old, False)
1298 # Make sure commit message contain "Differential Revision"
1317 # Make sure commit message contain "Differential Revision"
1299 if old.description() != newdesc:
1318 if old.description() != newdesc:
1300 if old.phase() == phases.public:
1319 if old.phase() == phases.public:
1301 ui.warn(
1320 ui.warn(
1302 _(b"warning: not updating public commit %s\n")
1321 _(b"warning: not updating public commit %s\n")
1303 % scmutil.formatchangeid(old)
1322 % scmutil.formatchangeid(old)
1304 )
1323 )
1305 continue
1324 continue
1306 parents = [
1325 parents = [
1307 mapping.get(old.p1().node(), (old.p1(),))[0],
1326 mapping.get(old.p1().node(), (old.p1(),))[0],
1308 mapping.get(old.p2().node(), (old.p2(),))[0],
1327 mapping.get(old.p2().node(), (old.p2(),))[0],
1309 ]
1328 ]
1310 new = context.metadataonlyctx(
1329 new = context.metadataonlyctx(
1311 repo,
1330 repo,
1312 old,
1331 old,
1313 parents=parents,
1332 parents=parents,
1314 text=newdesc,
1333 text=newdesc,
1315 user=old.user(),
1334 user=old.user(),
1316 date=old.date(),
1335 date=old.date(),
1317 extra=old.extra(),
1336 extra=old.extra(),
1318 )
1337 )
1319
1338
1320 newnode = new.commit()
1339 newnode = new.commit()
1321
1340
1322 mapping[old.node()] = [newnode]
1341 mapping[old.node()] = [newnode]
1323 # Update diff property
1342 # Update diff property
1324 # If it fails just warn and keep going, otherwise the DREV
1343 # If it fails just warn and keep going, otherwise the DREV
1325 # associations will be lost
1344 # associations will be lost
1326 try:
1345 try:
1327 writediffproperties(
1346 writediffproperties(
1328 [unfi[newnode]], diffmap[old.node()]
1347 [unfi[newnode]], diffmap[old.node()]
1329 )
1348 )
1330 except util.urlerr.urlerror:
1349 except util.urlerr.urlerror:
1331 ui.warnnoi18n(
1350 ui.warnnoi18n(
1332 b'Failed to update metadata for D%d\n' % drevid
1351 b'Failed to update metadata for D%d\n' % drevid
1333 )
1352 )
1334 # Remove local tags since it's no longer necessary
1353 # Remove local tags since it's no longer necessary
1335 tagname = b'D%d' % drevid
1354 tagname = b'D%d' % drevid
1336 if tagname in repo.tags():
1355 if tagname in repo.tags():
1337 tags.tag(
1356 tags.tag(
1338 repo,
1357 repo,
1339 tagname,
1358 tagname,
1340 nullid,
1359 nullid,
1341 message=None,
1360 message=None,
1342 user=None,
1361 user=None,
1343 date=None,
1362 date=None,
1344 local=True,
1363 local=True,
1345 )
1364 )
1346 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1365 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1347 if wnode in mapping:
1366 if wnode in mapping:
1348 unfi.setparents(mapping[wnode][0])
1367 unfi.setparents(mapping[wnode][0])
1349
1368
1350
1369
1351 # Map from "hg:meta" keys to header understood by "hg import". The order is
1370 # Map from "hg:meta" keys to header understood by "hg import". The order is
1352 # consistent with "hg export" output.
1371 # consistent with "hg export" output.
1353 _metanamemap = util.sortdict(
1372 _metanamemap = util.sortdict(
1354 [
1373 [
1355 (b'user', b'User'),
1374 (b'user', b'User'),
1356 (b'date', b'Date'),
1375 (b'date', b'Date'),
1357 (b'branch', b'Branch'),
1376 (b'branch', b'Branch'),
1358 (b'node', b'Node ID'),
1377 (b'node', b'Node ID'),
1359 (b'parent', b'Parent '),
1378 (b'parent', b'Parent '),
1360 ]
1379 ]
1361 )
1380 )
1362
1381
1363
1382
1364 def _confirmbeforesend(repo, revs, oldmap):
1383 def _confirmbeforesend(repo, revs, oldmap):
1365 url, token = readurltoken(repo.ui)
1384 url, token = readurltoken(repo.ui)
1366 ui = repo.ui
1385 ui = repo.ui
1367 for rev in revs:
1386 for rev in revs:
1368 ctx = repo[rev]
1387 ctx = repo[rev]
1369 desc = ctx.description().splitlines()[0]
1388 desc = ctx.description().splitlines()[0]
1370 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1389 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1371 if drevid:
1390 if drevid:
1372 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1391 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1373 else:
1392 else:
1374 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1393 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1375
1394
1376 ui.write(
1395 ui.write(
1377 _(b'%s - %s: %s\n')
1396 _(b'%s - %s: %s\n')
1378 % (
1397 % (
1379 drevdesc,
1398 drevdesc,
1380 ui.label(bytes(ctx), b'phabricator.node'),
1399 ui.label(bytes(ctx), b'phabricator.node'),
1381 ui.label(desc, b'phabricator.desc'),
1400 ui.label(desc, b'phabricator.desc'),
1382 )
1401 )
1383 )
1402 )
1384
1403
1385 if ui.promptchoice(
1404 if ui.promptchoice(
1386 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1405 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1387 ):
1406 ):
1388 return False
1407 return False
1389
1408
1390 return True
1409 return True
1391
1410
1392
1411
1393 _knownstatusnames = {
1412 _knownstatusnames = {
1394 b'accepted',
1413 b'accepted',
1395 b'needsreview',
1414 b'needsreview',
1396 b'needsrevision',
1415 b'needsrevision',
1397 b'closed',
1416 b'closed',
1398 b'abandoned',
1417 b'abandoned',
1399 b'changesplanned',
1418 b'changesplanned',
1400 }
1419 }
1401
1420
1402
1421
1403 def _getstatusname(drev):
1422 def _getstatusname(drev):
1404 """get normalized status name from a Differential Revision"""
1423 """get normalized status name from a Differential Revision"""
1405 return drev[b'statusName'].replace(b' ', b'').lower()
1424 return drev[b'statusName'].replace(b' ', b'').lower()
1406
1425
1407
1426
1408 # Small language to specify differential revisions. Support symbols: (), :X,
1427 # Small language to specify differential revisions. Support symbols: (), :X,
1409 # +, and -.
1428 # +, and -.
1410
1429
1411 _elements = {
1430 _elements = {
1412 # token-type: binding-strength, primary, prefix, infix, suffix
1431 # token-type: binding-strength, primary, prefix, infix, suffix
1413 b'(': (12, None, (b'group', 1, b')'), None, None),
1432 b'(': (12, None, (b'group', 1, b')'), None, None),
1414 b':': (8, None, (b'ancestors', 8), None, None),
1433 b':': (8, None, (b'ancestors', 8), None, None),
1415 b'&': (5, None, None, (b'and_', 5), None),
1434 b'&': (5, None, None, (b'and_', 5), None),
1416 b'+': (4, None, None, (b'add', 4), None),
1435 b'+': (4, None, None, (b'add', 4), None),
1417 b'-': (4, None, None, (b'sub', 4), None),
1436 b'-': (4, None, None, (b'sub', 4), None),
1418 b')': (0, None, None, None, None),
1437 b')': (0, None, None, None, None),
1419 b'symbol': (0, b'symbol', None, None, None),
1438 b'symbol': (0, b'symbol', None, None, None),
1420 b'end': (0, None, None, None, None),
1439 b'end': (0, None, None, None, None),
1421 }
1440 }
1422
1441
1423
1442
1424 def _tokenize(text):
1443 def _tokenize(text):
1425 view = memoryview(text) # zero-copy slice
1444 view = memoryview(text) # zero-copy slice
1426 special = b'():+-& '
1445 special = b'():+-& '
1427 pos = 0
1446 pos = 0
1428 length = len(text)
1447 length = len(text)
1429 while pos < length:
1448 while pos < length:
1430 symbol = b''.join(
1449 symbol = b''.join(
1431 itertools.takewhile(
1450 itertools.takewhile(
1432 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1451 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1433 )
1452 )
1434 )
1453 )
1435 if symbol:
1454 if symbol:
1436 yield (b'symbol', symbol, pos)
1455 yield (b'symbol', symbol, pos)
1437 pos += len(symbol)
1456 pos += len(symbol)
1438 else: # special char, ignore space
1457 else: # special char, ignore space
1439 if text[pos : pos + 1] != b' ':
1458 if text[pos : pos + 1] != b' ':
1440 yield (text[pos : pos + 1], None, pos)
1459 yield (text[pos : pos + 1], None, pos)
1441 pos += 1
1460 pos += 1
1442 yield (b'end', None, pos)
1461 yield (b'end', None, pos)
1443
1462
1444
1463
1445 def _parse(text):
1464 def _parse(text):
1446 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1465 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1447 if pos != len(text):
1466 if pos != len(text):
1448 raise error.ParseError(b'invalid token', pos)
1467 raise error.ParseError(b'invalid token', pos)
1449 return tree
1468 return tree
1450
1469
1451
1470
1452 def _parsedrev(symbol):
1471 def _parsedrev(symbol):
1453 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1472 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1454 if symbol.startswith(b'D') and symbol[1:].isdigit():
1473 if symbol.startswith(b'D') and symbol[1:].isdigit():
1455 return int(symbol[1:])
1474 return int(symbol[1:])
1456 if symbol.isdigit():
1475 if symbol.isdigit():
1457 return int(symbol)
1476 return int(symbol)
1458
1477
1459
1478
1460 def _prefetchdrevs(tree):
1479 def _prefetchdrevs(tree):
1461 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1480 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1462 drevs = set()
1481 drevs = set()
1463 ancestordrevs = set()
1482 ancestordrevs = set()
1464 op = tree[0]
1483 op = tree[0]
1465 if op == b'symbol':
1484 if op == b'symbol':
1466 r = _parsedrev(tree[1])
1485 r = _parsedrev(tree[1])
1467 if r:
1486 if r:
1468 drevs.add(r)
1487 drevs.add(r)
1469 elif op == b'ancestors':
1488 elif op == b'ancestors':
1470 r, a = _prefetchdrevs(tree[1])
1489 r, a = _prefetchdrevs(tree[1])
1471 drevs.update(r)
1490 drevs.update(r)
1472 ancestordrevs.update(r)
1491 ancestordrevs.update(r)
1473 ancestordrevs.update(a)
1492 ancestordrevs.update(a)
1474 else:
1493 else:
1475 for t in tree[1:]:
1494 for t in tree[1:]:
1476 r, a = _prefetchdrevs(t)
1495 r, a = _prefetchdrevs(t)
1477 drevs.update(r)
1496 drevs.update(r)
1478 ancestordrevs.update(a)
1497 ancestordrevs.update(a)
1479 return drevs, ancestordrevs
1498 return drevs, ancestordrevs
1480
1499
1481
1500
1482 def querydrev(ui, spec):
1501 def querydrev(ui, spec):
1483 """return a list of "Differential Revision" dicts
1502 """return a list of "Differential Revision" dicts
1484
1503
1485 spec is a string using a simple query language, see docstring in phabread
1504 spec is a string using a simple query language, see docstring in phabread
1486 for details.
1505 for details.
1487
1506
1488 A "Differential Revision dict" looks like:
1507 A "Differential Revision dict" looks like:
1489
1508
1490 {
1509 {
1491 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1510 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1492 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1511 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1493 "auxiliary": {
1512 "auxiliary": {
1494 "phabricator:depends-on": [
1513 "phabricator:depends-on": [
1495 "PHID-DREV-gbapp366kutjebt7agcd"
1514 "PHID-DREV-gbapp366kutjebt7agcd"
1496 ]
1515 ]
1497 "phabricator:projects": [],
1516 "phabricator:projects": [],
1498 },
1517 },
1499 "branch": "default",
1518 "branch": "default",
1500 "ccs": [],
1519 "ccs": [],
1501 "commits": [],
1520 "commits": [],
1502 "dateCreated": "1499181406",
1521 "dateCreated": "1499181406",
1503 "dateModified": "1499182103",
1522 "dateModified": "1499182103",
1504 "diffs": [
1523 "diffs": [
1505 "3",
1524 "3",
1506 "4",
1525 "4",
1507 ],
1526 ],
1508 "hashes": [],
1527 "hashes": [],
1509 "id": "2",
1528 "id": "2",
1510 "lineCount": "2",
1529 "lineCount": "2",
1511 "phid": "PHID-DREV-672qvysjcczopag46qty",
1530 "phid": "PHID-DREV-672qvysjcczopag46qty",
1512 "properties": {},
1531 "properties": {},
1513 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1532 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1514 "reviewers": [],
1533 "reviewers": [],
1515 "sourcePath": null
1534 "sourcePath": null
1516 "status": "0",
1535 "status": "0",
1517 "statusName": "Needs Review",
1536 "statusName": "Needs Review",
1518 "summary": "",
1537 "summary": "",
1519 "testPlan": "",
1538 "testPlan": "",
1520 "title": "example",
1539 "title": "example",
1521 "uri": "https://phab.example.com/D2",
1540 "uri": "https://phab.example.com/D2",
1522 }
1541 }
1523 """
1542 """
1524 # TODO: replace differential.query and differential.querydiffs with
1543 # TODO: replace differential.query and differential.querydiffs with
1525 # differential.diff.search because the former (and their output) are
1544 # differential.diff.search because the former (and their output) are
1526 # frozen, and planned to be deprecated and removed.
1545 # frozen, and planned to be deprecated and removed.
1527
1546
1528 def fetch(params):
1547 def fetch(params):
1529 """params -> single drev or None"""
1548 """params -> single drev or None"""
1530 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1549 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1531 if key in prefetched:
1550 if key in prefetched:
1532 return prefetched[key]
1551 return prefetched[key]
1533 drevs = callconduit(ui, b'differential.query', params)
1552 drevs = callconduit(ui, b'differential.query', params)
1534 # Fill prefetched with the result
1553 # Fill prefetched with the result
1535 for drev in drevs:
1554 for drev in drevs:
1536 prefetched[drev[b'phid']] = drev
1555 prefetched[drev[b'phid']] = drev
1537 prefetched[int(drev[b'id'])] = drev
1556 prefetched[int(drev[b'id'])] = drev
1538 if key not in prefetched:
1557 if key not in prefetched:
1539 raise error.Abort(
1558 raise error.Abort(
1540 _(b'cannot get Differential Revision %r') % params
1559 _(b'cannot get Differential Revision %r') % params
1541 )
1560 )
1542 return prefetched[key]
1561 return prefetched[key]
1543
1562
1544 def getstack(topdrevids):
1563 def getstack(topdrevids):
1545 """given a top, get a stack from the bottom, [id] -> [id]"""
1564 """given a top, get a stack from the bottom, [id] -> [id]"""
1546 visited = set()
1565 visited = set()
1547 result = []
1566 result = []
1548 queue = [{b'ids': [i]} for i in topdrevids]
1567 queue = [{b'ids': [i]} for i in topdrevids]
1549 while queue:
1568 while queue:
1550 params = queue.pop()
1569 params = queue.pop()
1551 drev = fetch(params)
1570 drev = fetch(params)
1552 if drev[b'id'] in visited:
1571 if drev[b'id'] in visited:
1553 continue
1572 continue
1554 visited.add(drev[b'id'])
1573 visited.add(drev[b'id'])
1555 result.append(int(drev[b'id']))
1574 result.append(int(drev[b'id']))
1556 auxiliary = drev.get(b'auxiliary', {})
1575 auxiliary = drev.get(b'auxiliary', {})
1557 depends = auxiliary.get(b'phabricator:depends-on', [])
1576 depends = auxiliary.get(b'phabricator:depends-on', [])
1558 for phid in depends:
1577 for phid in depends:
1559 queue.append({b'phids': [phid]})
1578 queue.append({b'phids': [phid]})
1560 result.reverse()
1579 result.reverse()
1561 return smartset.baseset(result)
1580 return smartset.baseset(result)
1562
1581
1563 # Initialize prefetch cache
1582 # Initialize prefetch cache
1564 prefetched = {} # {id or phid: drev}
1583 prefetched = {} # {id or phid: drev}
1565
1584
1566 tree = _parse(spec)
1585 tree = _parse(spec)
1567 drevs, ancestordrevs = _prefetchdrevs(tree)
1586 drevs, ancestordrevs = _prefetchdrevs(tree)
1568
1587
1569 # developer config: phabricator.batchsize
1588 # developer config: phabricator.batchsize
1570 batchsize = ui.configint(b'phabricator', b'batchsize')
1589 batchsize = ui.configint(b'phabricator', b'batchsize')
1571
1590
1572 # Prefetch Differential Revisions in batch
1591 # Prefetch Differential Revisions in batch
1573 tofetch = set(drevs)
1592 tofetch = set(drevs)
1574 for r in ancestordrevs:
1593 for r in ancestordrevs:
1575 tofetch.update(range(max(1, r - batchsize), r + 1))
1594 tofetch.update(range(max(1, r - batchsize), r + 1))
1576 if drevs:
1595 if drevs:
1577 fetch({b'ids': list(tofetch)})
1596 fetch({b'ids': list(tofetch)})
1578 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1597 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1579
1598
1580 # Walk through the tree, return smartsets
1599 # Walk through the tree, return smartsets
1581 def walk(tree):
1600 def walk(tree):
1582 op = tree[0]
1601 op = tree[0]
1583 if op == b'symbol':
1602 if op == b'symbol':
1584 drev = _parsedrev(tree[1])
1603 drev = _parsedrev(tree[1])
1585 if drev:
1604 if drev:
1586 return smartset.baseset([drev])
1605 return smartset.baseset([drev])
1587 elif tree[1] in _knownstatusnames:
1606 elif tree[1] in _knownstatusnames:
1588 drevs = [
1607 drevs = [
1589 r
1608 r
1590 for r in validids
1609 for r in validids
1591 if _getstatusname(prefetched[r]) == tree[1]
1610 if _getstatusname(prefetched[r]) == tree[1]
1592 ]
1611 ]
1593 return smartset.baseset(drevs)
1612 return smartset.baseset(drevs)
1594 else:
1613 else:
1595 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1614 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1596 elif op in {b'and_', b'add', b'sub'}:
1615 elif op in {b'and_', b'add', b'sub'}:
1597 assert len(tree) == 3
1616 assert len(tree) == 3
1598 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1617 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1599 elif op == b'group':
1618 elif op == b'group':
1600 return walk(tree[1])
1619 return walk(tree[1])
1601 elif op == b'ancestors':
1620 elif op == b'ancestors':
1602 return getstack(walk(tree[1]))
1621 return getstack(walk(tree[1]))
1603 else:
1622 else:
1604 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1623 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1605
1624
1606 return [prefetched[r] for r in walk(tree)]
1625 return [prefetched[r] for r in walk(tree)]
1607
1626
1608
1627
1609 def getdescfromdrev(drev):
1628 def getdescfromdrev(drev):
1610 """get description (commit message) from "Differential Revision"
1629 """get description (commit message) from "Differential Revision"
1611
1630
1612 This is similar to differential.getcommitmessage API. But we only care
1631 This is similar to differential.getcommitmessage API. But we only care
1613 about limited fields: title, summary, test plan, and URL.
1632 about limited fields: title, summary, test plan, and URL.
1614 """
1633 """
1615 title = drev[b'title']
1634 title = drev[b'title']
1616 summary = drev[b'summary'].rstrip()
1635 summary = drev[b'summary'].rstrip()
1617 testplan = drev[b'testPlan'].rstrip()
1636 testplan = drev[b'testPlan'].rstrip()
1618 if testplan:
1637 if testplan:
1619 testplan = b'Test Plan:\n%s' % testplan
1638 testplan = b'Test Plan:\n%s' % testplan
1620 uri = b'Differential Revision: %s' % drev[b'uri']
1639 uri = b'Differential Revision: %s' % drev[b'uri']
1621 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1640 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1622
1641
1623
1642
1624 def get_amended_desc(drev, ctx, folded):
1643 def get_amended_desc(drev, ctx, folded):
1625 """similar to ``getdescfromdrev``, but supports a folded series of commits
1644 """similar to ``getdescfromdrev``, but supports a folded series of commits
1626
1645
1627 This is used when determining if an individual commit needs to have its
1646 This is used when determining if an individual commit needs to have its
1628 message amended after posting it for review. The determination is made for
1647 message amended after posting it for review. The determination is made for
1629 each individual commit, even when they were folded into one review.
1648 each individual commit, even when they were folded into one review.
1630 """
1649 """
1631 if not folded:
1650 if not folded:
1632 return getdescfromdrev(drev)
1651 return getdescfromdrev(drev)
1633
1652
1634 uri = b'Differential Revision: %s' % drev[b'uri']
1653 uri = b'Differential Revision: %s' % drev[b'uri']
1635
1654
1636 # Since the commit messages were combined when posting multiple commits
1655 # Since the commit messages were combined when posting multiple commits
1637 # with --fold, the fields can't be read from Phabricator here, or *all*
1656 # with --fold, the fields can't be read from Phabricator here, or *all*
1638 # affected local revisions will end up with the same commit message after
1657 # affected local revisions will end up with the same commit message after
1639 # the URI is amended in. Append in the DREV line, or update it if it
1658 # the URI is amended in. Append in the DREV line, or update it if it
1640 # exists. At worst, this means commit message or test plan updates on
1659 # exists. At worst, this means commit message or test plan updates on
1641 # Phabricator aren't propagated back to the repository, but that seems
1660 # Phabricator aren't propagated back to the repository, but that seems
1642 # reasonable for the case where local commits are effectively combined
1661 # reasonable for the case where local commits are effectively combined
1643 # in Phabricator.
1662 # in Phabricator.
1644 m = _differentialrevisiondescre.search(ctx.description())
1663 m = _differentialrevisiondescre.search(ctx.description())
1645 if not m:
1664 if not m:
1646 return b'\n\n'.join([ctx.description(), uri])
1665 return b'\n\n'.join([ctx.description(), uri])
1647
1666
1648 return _differentialrevisiondescre.sub(uri, ctx.description())
1667 return _differentialrevisiondescre.sub(uri, ctx.description())
1649
1668
1650
1669
1651 def getdiffmeta(diff):
1670 def getdiffmeta(diff):
1652 """get commit metadata (date, node, user, p1) from a diff object
1671 """get commit metadata (date, node, user, p1) from a diff object
1653
1672
1654 The metadata could be "hg:meta", sent by phabsend, like:
1673 The metadata could be "hg:meta", sent by phabsend, like:
1655
1674
1656 "properties": {
1675 "properties": {
1657 "hg:meta": {
1676 "hg:meta": {
1658 "branch": "default",
1677 "branch": "default",
1659 "date": "1499571514 25200",
1678 "date": "1499571514 25200",
1660 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1679 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1661 "user": "Foo Bar <foo@example.com>",
1680 "user": "Foo Bar <foo@example.com>",
1662 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1681 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1663 }
1682 }
1664 }
1683 }
1665
1684
1666 Or converted from "local:commits", sent by "arc", like:
1685 Or converted from "local:commits", sent by "arc", like:
1667
1686
1668 "properties": {
1687 "properties": {
1669 "local:commits": {
1688 "local:commits": {
1670 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1689 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1671 "author": "Foo Bar",
1690 "author": "Foo Bar",
1672 "authorEmail": "foo@example.com"
1691 "authorEmail": "foo@example.com"
1673 "branch": "default",
1692 "branch": "default",
1674 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1693 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1675 "local": "1000",
1694 "local": "1000",
1676 "message": "...",
1695 "message": "...",
1677 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1696 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1678 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1697 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1679 "summary": "...",
1698 "summary": "...",
1680 "tag": "",
1699 "tag": "",
1681 "time": 1499546314,
1700 "time": 1499546314,
1682 }
1701 }
1683 }
1702 }
1684 }
1703 }
1685
1704
1686 Note: metadata extracted from "local:commits" will lose time zone
1705 Note: metadata extracted from "local:commits" will lose time zone
1687 information.
1706 information.
1688 """
1707 """
1689 props = diff.get(b'properties') or {}
1708 props = diff.get(b'properties') or {}
1690 meta = props.get(b'hg:meta')
1709 meta = props.get(b'hg:meta')
1691 if not meta:
1710 if not meta:
1692 if props.get(b'local:commits'):
1711 if props.get(b'local:commits'):
1693 commit = sorted(props[b'local:commits'].values())[0]
1712 commit = sorted(props[b'local:commits'].values())[0]
1694 meta = {}
1713 meta = {}
1695 if b'author' in commit and b'authorEmail' in commit:
1714 if b'author' in commit and b'authorEmail' in commit:
1696 meta[b'user'] = b'%s <%s>' % (
1715 meta[b'user'] = b'%s <%s>' % (
1697 commit[b'author'],
1716 commit[b'author'],
1698 commit[b'authorEmail'],
1717 commit[b'authorEmail'],
1699 )
1718 )
1700 if b'time' in commit:
1719 if b'time' in commit:
1701 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1720 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1702 if b'branch' in commit:
1721 if b'branch' in commit:
1703 meta[b'branch'] = commit[b'branch']
1722 meta[b'branch'] = commit[b'branch']
1704 node = commit.get(b'commit', commit.get(b'rev'))
1723 node = commit.get(b'commit', commit.get(b'rev'))
1705 if node:
1724 if node:
1706 meta[b'node'] = node
1725 meta[b'node'] = node
1707 if len(commit.get(b'parents', ())) >= 1:
1726 if len(commit.get(b'parents', ())) >= 1:
1708 meta[b'parent'] = commit[b'parents'][0]
1727 meta[b'parent'] = commit[b'parents'][0]
1709 else:
1728 else:
1710 meta = {}
1729 meta = {}
1711 if b'date' not in meta and b'dateCreated' in diff:
1730 if b'date' not in meta and b'dateCreated' in diff:
1712 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1731 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1713 if b'branch' not in meta and diff.get(b'branch'):
1732 if b'branch' not in meta and diff.get(b'branch'):
1714 meta[b'branch'] = diff[b'branch']
1733 meta[b'branch'] = diff[b'branch']
1715 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1734 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1716 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1735 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1717 return meta
1736 return meta
1718
1737
1719
1738
1720 def _getdrevs(ui, stack, specs):
1739 def _getdrevs(ui, stack, specs):
1721 """convert user supplied DREVSPECs into "Differential Revision" dicts
1740 """convert user supplied DREVSPECs into "Differential Revision" dicts
1722
1741
1723 See ``hg help phabread`` for how to specify each DREVSPEC.
1742 See ``hg help phabread`` for how to specify each DREVSPEC.
1724 """
1743 """
1725 if len(specs) > 0:
1744 if len(specs) > 0:
1726
1745
1727 def _formatspec(s):
1746 def _formatspec(s):
1728 if stack:
1747 if stack:
1729 s = b':(%s)' % s
1748 s = b':(%s)' % s
1730 return b'(%s)' % s
1749 return b'(%s)' % s
1731
1750
1732 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1751 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1733
1752
1734 drevs = querydrev(ui, spec)
1753 drevs = querydrev(ui, spec)
1735 if drevs:
1754 if drevs:
1736 return drevs
1755 return drevs
1737
1756
1738 raise error.Abort(_(b"empty DREVSPEC set"))
1757 raise error.Abort(_(b"empty DREVSPEC set"))
1739
1758
1740
1759
1741 def readpatch(ui, drevs, write):
1760 def readpatch(ui, drevs, write):
1742 """generate plain-text patch readable by 'hg import'
1761 """generate plain-text patch readable by 'hg import'
1743
1762
1744 write takes a list of (DREV, bytes), where DREV is the differential number
1763 write takes a list of (DREV, bytes), where DREV is the differential number
1745 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1764 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1746 to be imported. drevs is what "querydrev" returns, results of
1765 to be imported. drevs is what "querydrev" returns, results of
1747 "differential.query".
1766 "differential.query".
1748 """
1767 """
1749 # Prefetch hg:meta property for all diffs
1768 # Prefetch hg:meta property for all diffs
1750 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1769 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1751 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1770 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1752
1771
1753 patches = []
1772 patches = []
1754
1773
1755 # Generate patch for each drev
1774 # Generate patch for each drev
1756 for drev in drevs:
1775 for drev in drevs:
1757 ui.note(_(b'reading D%s\n') % drev[b'id'])
1776 ui.note(_(b'reading D%s\n') % drev[b'id'])
1758
1777
1759 diffid = max(int(v) for v in drev[b'diffs'])
1778 diffid = max(int(v) for v in drev[b'diffs'])
1760 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1779 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1761 desc = getdescfromdrev(drev)
1780 desc = getdescfromdrev(drev)
1762 header = b'# HG changeset patch\n'
1781 header = b'# HG changeset patch\n'
1763
1782
1764 # Try to preserve metadata from hg:meta property. Write hg patch
1783 # Try to preserve metadata from hg:meta property. Write hg patch
1765 # headers that can be read by the "import" command. See patchheadermap
1784 # headers that can be read by the "import" command. See patchheadermap
1766 # and extract in mercurial/patch.py for supported headers.
1785 # and extract in mercurial/patch.py for supported headers.
1767 meta = getdiffmeta(diffs[b'%d' % diffid])
1786 meta = getdiffmeta(diffs[b'%d' % diffid])
1768 for k in _metanamemap.keys():
1787 for k in _metanamemap.keys():
1769 if k in meta:
1788 if k in meta:
1770 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1789 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1771
1790
1772 content = b'%s%s\n%s' % (header, desc, body)
1791 content = b'%s%s\n%s' % (header, desc, body)
1773 patches.append((drev[b'id'], content))
1792 patches.append((drev[b'id'], content))
1774
1793
1775 # Write patches to the supplied callback
1794 # Write patches to the supplied callback
1776 write(patches)
1795 write(patches)
1777
1796
1778
1797
1779 @vcrcommand(
1798 @vcrcommand(
1780 b'phabread',
1799 b'phabread',
1781 [(b'', b'stack', False, _(b'read dependencies'))],
1800 [(b'', b'stack', False, _(b'read dependencies'))],
1782 _(b'DREVSPEC... [OPTIONS]'),
1801 _(b'DREVSPEC... [OPTIONS]'),
1783 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1802 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1784 optionalrepo=True,
1803 optionalrepo=True,
1785 )
1804 )
1786 def phabread(ui, repo, *specs, **opts):
1805 def phabread(ui, repo, *specs, **opts):
1787 """print patches from Phabricator suitable for importing
1806 """print patches from Phabricator suitable for importing
1788
1807
1789 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1808 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1790 the number ``123``. It could also have common operators like ``+``, ``-``,
1809 the number ``123``. It could also have common operators like ``+``, ``-``,
1791 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1810 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1792 select a stack. If multiple DREVSPEC values are given, the result is the
1811 select a stack. If multiple DREVSPEC values are given, the result is the
1793 union of each individually evaluated value. No attempt is currently made
1812 union of each individually evaluated value. No attempt is currently made
1794 to reorder the values to run from parent to child.
1813 to reorder the values to run from parent to child.
1795
1814
1796 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1815 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1797 could be used to filter patches by status. For performance reason, they
1816 could be used to filter patches by status. For performance reason, they
1798 only represent a subset of non-status selections and cannot be used alone.
1817 only represent a subset of non-status selections and cannot be used alone.
1799
1818
1800 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1819 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1801 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1820 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1802 stack up to D9.
1821 stack up to D9.
1803
1822
1804 If --stack is given, follow dependencies information and read all patches.
1823 If --stack is given, follow dependencies information and read all patches.
1805 It is equivalent to the ``:`` operator.
1824 It is equivalent to the ``:`` operator.
1806 """
1825 """
1807 opts = pycompat.byteskwargs(opts)
1826 opts = pycompat.byteskwargs(opts)
1808 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1827 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1809
1828
1810 def _write(patches):
1829 def _write(patches):
1811 for drev, content in patches:
1830 for drev, content in patches:
1812 ui.write(content)
1831 ui.write(content)
1813
1832
1814 readpatch(ui, drevs, _write)
1833 readpatch(ui, drevs, _write)
1815
1834
1816
1835
1817 @vcrcommand(
1836 @vcrcommand(
1818 b'phabimport',
1837 b'phabimport',
1819 [(b'', b'stack', False, _(b'import dependencies as well'))],
1838 [(b'', b'stack', False, _(b'import dependencies as well'))],
1820 _(b'DREVSPEC... [OPTIONS]'),
1839 _(b'DREVSPEC... [OPTIONS]'),
1821 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1840 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1822 )
1841 )
1823 def phabimport(ui, repo, *specs, **opts):
1842 def phabimport(ui, repo, *specs, **opts):
1824 """import patches from Phabricator for the specified Differential Revisions
1843 """import patches from Phabricator for the specified Differential Revisions
1825
1844
1826 The patches are read and applied starting at the parent of the working
1845 The patches are read and applied starting at the parent of the working
1827 directory.
1846 directory.
1828
1847
1829 See ``hg help phabread`` for how to specify DREVSPEC.
1848 See ``hg help phabread`` for how to specify DREVSPEC.
1830 """
1849 """
1831 opts = pycompat.byteskwargs(opts)
1850 opts = pycompat.byteskwargs(opts)
1832
1851
1833 # --bypass avoids losing exec and symlink bits when importing on Windows,
1852 # --bypass avoids losing exec and symlink bits when importing on Windows,
1834 # and allows importing with a dirty wdir. It also aborts instead of leaving
1853 # and allows importing with a dirty wdir. It also aborts instead of leaving
1835 # rejects.
1854 # rejects.
1836 opts[b'bypass'] = True
1855 opts[b'bypass'] = True
1837
1856
1838 # Mandatory default values, synced with commands.import
1857 # Mandatory default values, synced with commands.import
1839 opts[b'strip'] = 1
1858 opts[b'strip'] = 1
1840 opts[b'prefix'] = b''
1859 opts[b'prefix'] = b''
1841 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1860 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1842 opts[b'obsolete'] = False
1861 opts[b'obsolete'] = False
1843
1862
1844 if ui.configbool(b'phabimport', b'secret'):
1863 if ui.configbool(b'phabimport', b'secret'):
1845 opts[b'secret'] = True
1864 opts[b'secret'] = True
1846 if ui.configbool(b'phabimport', b'obsolete'):
1865 if ui.configbool(b'phabimport', b'obsolete'):
1847 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1866 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1848
1867
1849 def _write(patches):
1868 def _write(patches):
1850 parents = repo[None].parents()
1869 parents = repo[None].parents()
1851
1870
1852 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1871 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1853 for drev, contents in patches:
1872 for drev, contents in patches:
1854 ui.status(_(b'applying patch from D%s\n') % drev)
1873 ui.status(_(b'applying patch from D%s\n') % drev)
1855
1874
1856 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1875 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1857 msg, node, rej = cmdutil.tryimportone(
1876 msg, node, rej = cmdutil.tryimportone(
1858 ui,
1877 ui,
1859 repo,
1878 repo,
1860 patchdata,
1879 patchdata,
1861 parents,
1880 parents,
1862 opts,
1881 opts,
1863 [],
1882 [],
1864 None, # Never update wdir to another revision
1883 None, # Never update wdir to another revision
1865 )
1884 )
1866
1885
1867 if not node:
1886 if not node:
1868 raise error.Abort(_(b'D%s: no diffs found') % drev)
1887 raise error.Abort(_(b'D%s: no diffs found') % drev)
1869
1888
1870 ui.note(msg + b'\n')
1889 ui.note(msg + b'\n')
1871 parents = [repo[node]]
1890 parents = [repo[node]]
1872
1891
1873 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1892 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1874
1893
1875 readpatch(repo.ui, drevs, _write)
1894 readpatch(repo.ui, drevs, _write)
1876
1895
1877
1896
1878 @vcrcommand(
1897 @vcrcommand(
1879 b'phabupdate',
1898 b'phabupdate',
1880 [
1899 [
1881 (b'', b'accept', False, _(b'accept revisions')),
1900 (b'', b'accept', False, _(b'accept revisions')),
1882 (b'', b'reject', False, _(b'reject revisions')),
1901 (b'', b'reject', False, _(b'reject revisions')),
1883 (b'', b'abandon', False, _(b'abandon revisions')),
1902 (b'', b'abandon', False, _(b'abandon revisions')),
1884 (b'', b'reclaim', False, _(b'reclaim revisions')),
1903 (b'', b'reclaim', False, _(b'reclaim revisions')),
1885 (b'm', b'comment', b'', _(b'comment on the last revision')),
1904 (b'm', b'comment', b'', _(b'comment on the last revision')),
1886 ],
1905 ],
1887 _(b'DREVSPEC... [OPTIONS]'),
1906 _(b'DREVSPEC... [OPTIONS]'),
1888 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1907 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1889 optionalrepo=True,
1908 optionalrepo=True,
1890 )
1909 )
1891 def phabupdate(ui, repo, *specs, **opts):
1910 def phabupdate(ui, repo, *specs, **opts):
1892 """update Differential Revision in batch
1911 """update Differential Revision in batch
1893
1912
1894 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1913 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1895 """
1914 """
1896 opts = pycompat.byteskwargs(opts)
1915 opts = pycompat.byteskwargs(opts)
1897 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1916 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1898 if len(flags) > 1:
1917 if len(flags) > 1:
1899 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1918 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1900
1919
1901 actions = []
1920 actions = []
1902 for f in flags:
1921 for f in flags:
1903 actions.append({b'type': f, b'value': True})
1922 actions.append({b'type': f, b'value': True})
1904
1923
1905 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1924 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1906 for i, drev in enumerate(drevs):
1925 for i, drev in enumerate(drevs):
1907 if i + 1 == len(drevs) and opts.get(b'comment'):
1926 if i + 1 == len(drevs) and opts.get(b'comment'):
1908 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1927 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1909 if actions:
1928 if actions:
1910 params = {
1929 params = {
1911 b'objectIdentifier': drev[b'phid'],
1930 b'objectIdentifier': drev[b'phid'],
1912 b'transactions': actions,
1931 b'transactions': actions,
1913 }
1932 }
1914 callconduit(ui, b'differential.revision.edit', params)
1933 callconduit(ui, b'differential.revision.edit', params)
1915
1934
1916
1935
1917 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1936 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1918 def template_review(context, mapping):
1937 def template_review(context, mapping):
1919 """:phabreview: Object describing the review for this changeset.
1938 """:phabreview: Object describing the review for this changeset.
1920 Has attributes `url` and `id`.
1939 Has attributes `url` and `id`.
1921 """
1940 """
1922 ctx = context.resource(mapping, b'ctx')
1941 ctx = context.resource(mapping, b'ctx')
1923 m = _differentialrevisiondescre.search(ctx.description())
1942 m = _differentialrevisiondescre.search(ctx.description())
1924 if m:
1943 if m:
1925 return templateutil.hybriddict(
1944 return templateutil.hybriddict(
1926 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1945 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1927 )
1946 )
1928 else:
1947 else:
1929 tags = ctx.repo().nodetags(ctx.node())
1948 tags = ctx.repo().nodetags(ctx.node())
1930 for t in tags:
1949 for t in tags:
1931 if _differentialrevisiontagre.match(t):
1950 if _differentialrevisiontagre.match(t):
1932 url = ctx.repo().ui.config(b'phabricator', b'url')
1951 url = ctx.repo().ui.config(b'phabricator', b'url')
1933 if not url.endswith(b'/'):
1952 if not url.endswith(b'/'):
1934 url += b'/'
1953 url += b'/'
1935 url += t
1954 url += t
1936
1955
1937 return templateutil.hybriddict({b'url': url, b'id': t,})
1956 return templateutil.hybriddict({b'url': url, b'id': t,})
1938 return None
1957 return None
1939
1958
1940
1959
1941 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1960 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1942 def template_status(context, mapping):
1961 def template_status(context, mapping):
1943 """:phabstatus: String. Status of Phabricator differential.
1962 """:phabstatus: String. Status of Phabricator differential.
1944 """
1963 """
1945 ctx = context.resource(mapping, b'ctx')
1964 ctx = context.resource(mapping, b'ctx')
1946 repo = context.resource(mapping, b'repo')
1965 repo = context.resource(mapping, b'repo')
1947 ui = context.resource(mapping, b'ui')
1966 ui = context.resource(mapping, b'ui')
1948
1967
1949 rev = ctx.rev()
1968 rev = ctx.rev()
1950 try:
1969 try:
1951 drevid = getdrevmap(repo, [rev])[rev]
1970 drevid = getdrevmap(repo, [rev])[rev]
1952 except KeyError:
1971 except KeyError:
1953 return None
1972 return None
1954 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1973 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1955 for drev in drevs:
1974 for drev in drevs:
1956 if int(drev[b'id']) == drevid:
1975 if int(drev[b'id']) == drevid:
1957 return templateutil.hybriddict(
1976 return templateutil.hybriddict(
1958 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1977 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1959 )
1978 )
1960 return None
1979 return None
1961
1980
1962
1981
1963 @show.showview(b'phabstatus', csettopic=b'work')
1982 @show.showview(b'phabstatus', csettopic=b'work')
1964 def phabstatusshowview(ui, repo, displayer):
1983 def phabstatusshowview(ui, repo, displayer):
1965 """Phabricator differiential status"""
1984 """Phabricator differiential status"""
1966 revs = repo.revs('sort(_underway(), topo)')
1985 revs = repo.revs('sort(_underway(), topo)')
1967 drevmap = getdrevmap(repo, revs)
1986 drevmap = getdrevmap(repo, revs)
1968 unknownrevs, drevids, revsbydrevid = [], set(), {}
1987 unknownrevs, drevids, revsbydrevid = [], set(), {}
1969 for rev, drevid in pycompat.iteritems(drevmap):
1988 for rev, drevid in pycompat.iteritems(drevmap):
1970 if drevid is not None:
1989 if drevid is not None:
1971 drevids.add(drevid)
1990 drevids.add(drevid)
1972 revsbydrevid.setdefault(drevid, set()).add(rev)
1991 revsbydrevid.setdefault(drevid, set()).add(rev)
1973 else:
1992 else:
1974 unknownrevs.append(rev)
1993 unknownrevs.append(rev)
1975
1994
1976 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1995 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1977 drevsbyrev = {}
1996 drevsbyrev = {}
1978 for drev in drevs:
1997 for drev in drevs:
1979 for rev in revsbydrevid[int(drev[b'id'])]:
1998 for rev in revsbydrevid[int(drev[b'id'])]:
1980 drevsbyrev[rev] = drev
1999 drevsbyrev[rev] = drev
1981
2000
1982 def phabstatus(ctx):
2001 def phabstatus(ctx):
1983 drev = drevsbyrev[ctx.rev()]
2002 drev = drevsbyrev[ctx.rev()]
1984 status = ui.label(
2003 status = ui.label(
1985 b'%(statusName)s' % drev,
2004 b'%(statusName)s' % drev,
1986 b'phabricator.status.%s' % _getstatusname(drev),
2005 b'phabricator.status.%s' % _getstatusname(drev),
1987 )
2006 )
1988 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2007 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1989
2008
1990 revs -= smartset.baseset(unknownrevs)
2009 revs -= smartset.baseset(unknownrevs)
1991 revdag = graphmod.dagwalker(repo, revs)
2010 revdag = graphmod.dagwalker(repo, revs)
1992
2011
1993 ui.setconfig(b'experimental', b'graphshorten', True)
2012 ui.setconfig(b'experimental', b'graphshorten', True)
1994 displayer._exthook = phabstatus
2013 displayer._exthook = phabstatus
1995 nodelen = show.longestshortest(repo, revs)
2014 nodelen = show.longestshortest(repo, revs)
1996 logcmdutil.displaygraph(
2015 logcmdutil.displaygraph(
1997 ui,
2016 ui,
1998 repo,
2017 repo,
1999 revdag,
2018 revdag,
2000 displayer,
2019 displayer,
2001 graphmod.asciiedges,
2020 graphmod.asciiedges,
2002 props={b'nodelen': nodelen},
2021 props={b'nodelen': nodelen},
2003 )
2022 )
General Comments 0
You need to be logged in to leave comments. Login now