##// END OF EJS Templates
phabricator: combine commit messages into the review when folding commits...
Matt Harbison -
r45134:dbe9182c default
parent child Browse files
Show More
@@ -1,1948 +1,2003 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
124 )
124 )
125 eh.configitem(
125 eh.configitem(
126 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
127 )
127 )
128 eh.configitem(
128 eh.configitem(
129 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
133 )
133 )
134
134
135 colortable = {
135 colortable = {
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
141 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
148 }
148 }
149
149
150 _VCR_FLAGS = [
150 _VCR_FLAGS = [
151 (
151 (
152 b'',
152 b'',
153 b'test-vcr',
153 b'test-vcr',
154 b'',
154 b'',
155 _(
155 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
158 b' (ADVANCED)'
159 ),
159 ),
160 ),
160 ),
161 ]
161 ]
162
162
163
163
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
167 """
168 result = False
168 result = False
169 arcconfig = {}
169 arcconfig = {}
170
170
171 try:
171 try:
172 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
178 else x,
178 else x,
179 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
180 )
180 )
181
181
182 result = True
182 result = True
183 except ValueError:
183 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
185 except IOError:
186 pass
186 pass
187
187
188 cfg = util.sortdict()
188 cfg = util.sortdict()
189
189
190 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
192
193 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
195
196 if cfg:
196 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
198
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
200
201
201
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
204
204
205 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
207 return False
208 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
210 for key in r1params:
211 if key not in r2params:
211 if key not in r2params:
212 return False
212 return False
213 value = r1params[key][0]
213 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
218 if r1json != r2json:
219 return False
219 return False
220 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
221 return False
221 return False
222 return True
222 return True
223
223
224 def sanitiserequest(request):
224 def sanitiserequest(request):
225 request.body = re.sub(
225 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
227 )
228 return request
228 return request
229
229
230 def sanitiseresponse(response):
230 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
233 return response
233 return response
234
234
235 def decorate(fn):
235 def decorate(fn):
236 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
238 if cassette:
239 import hgdemandimport
239 import hgdemandimport
240
240
241 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
242 import vcr as vcrmod
242 import vcr as vcrmod
243 import vcr.stubs as stubs
243 import vcr.stubs as stubs
244
244
245 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
246 serializer='json',
246 serializer='json',
247 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
249 custom_patches=[
249 custom_patches=[
250 (
250 (
251 urlmod,
251 urlmod,
252 'httpconnection',
252 'httpconnection',
253 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
254 ),
254 ),
255 (
255 (
256 urlmod,
256 urlmod,
257 'httpsconnection',
257 'httpsconnection',
258 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
259 ),
259 ),
260 ],
260 ],
261 )
261 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
266
266
267 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
270
270
271 return command(
271 return command(
272 name,
272 name,
273 fullflags,
273 fullflags,
274 spec,
274 spec,
275 helpcategory=helpcategory,
275 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
277 )(cmd)
277 )(cmd)
278
278
279 return decorate
279 return decorate
280
280
281
281
282 def urlencodenested(params):
282 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
284
284
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
288 """
289 flatparams = util.sortdict()
289 flatparams = util.sortdict()
290
290
291 def process(prefix, obj):
291 def process(prefix, obj):
292 if isinstance(obj, bool):
292 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
296 if items is None:
297 flatparams[prefix] = obj
297 flatparams[prefix] = obj
298 else:
298 else:
299 for k, v in items(obj):
299 for k, v in items(obj):
300 if prefix:
300 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
302 else:
302 else:
303 process(k, v)
303 process(k, v)
304
304
305 process(b'', params)
305 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
307
307
308
308
309 def readurltoken(ui):
309 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
311
311
312 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
314 """
314 """
315 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
316 if not url:
316 if not url:
317 raise error.Abort(
317 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
319 )
320
320
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
322 token = None
323
323
324 if res:
324 if res:
325 group, auth = res
325 group, auth = res
326
326
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
328
329 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
330
330
331 if not token:
331 if not token:
332 raise error.Abort(
332 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
334 )
335
335
336 return url, token
336 return url, token
337
337
338
338
339 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
344 params = params.copy()
345 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
346 b'token': token,
346 b'token': token,
347 }
347 }
348 rawdata = {
348 rawdata = {
349 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
350 b'output': b'json',
350 b'output': b'json',
351 b'__conduit__': 1,
351 b'__conduit__': 1,
352 }
352 }
353 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
355 if curlcmd:
356 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
358 )
359 sin.write(data)
359 sin.write(data)
360 sin.close()
360 sin.close()
361 body = sout.read()
361 body = sout.read()
362 else:
362 else:
363 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
366 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
371 else x,
371 else x,
372 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
374 )
375 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
377 parsed[b'error_code'],
378 parsed[b'error_info'],
378 parsed[b'error_info'],
379 )
379 )
380 raise error.Abort(msg)
380 raise error.Abort(msg)
381 return parsed[b'result']
381 return parsed[b'result']
382
382
383
383
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
386 """call Conduit API
386 """call Conduit API
387
387
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
389 to stdout as a JSON blob.
390 """
390 """
391 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
394 params = pycompat.rapply(
394 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
397 else x,
397 else x,
398 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
399 )
399 )
400 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
401 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
403 callconduit(ui, name, params),
404 )
404 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
407
407
408
408
409 def getrepophid(repo):
409 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
413 if repophid:
414 return repophid
414 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
416 if not callsign:
417 return None
417 return None
418 query = callconduit(
418 query = callconduit(
419 repo.ui,
419 repo.ui,
420 b'diffusion.repository.search',
420 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
422 )
422 )
423 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
424 return None
424 return None
425 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
427 return repophid
428
428
429
429
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
433 )
434
434
435
435
436 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
438
438
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
442 be ``None``.
443
443
444 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
445 association information.
445 association information.
446
446
447 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
450
450
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
453 """
453 """
454 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
456
456
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 for node in nodelist:
459 for node in nodelist:
460 ctx = unfi[node]
460 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
463 for n in precnodes:
464 if has_node(n):
464 if has_node(n):
465 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
467 if m:
467 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
469 break
470 else:
470 else:
471 continue # move to next predecessor
471 continue # move to next predecessor
472 break # found a tag, stop
472 break # found a tag, stop
473 else:
473 else:
474 # Check commit message
474 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
476 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
478
479 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
481 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
483 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
485 )
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 for newnode, (force, precset, drev) in toconfirm.items():
487 for newnode, (force, precset, drev) in toconfirm.items():
488 diffs = [
488 diffs = [
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 ]
490 ]
491
491
492 # "precursors" as known by Phabricator
492 # "precursors" as known by Phabricator
493 phprecset = {getnode(d) for d in diffs}
493 phprecset = {getnode(d) for d in diffs}
494
494
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 # and force is not set (when commit message says nothing)
496 # and force is not set (when commit message says nothing)
497 if not force and not bool(phprecset & precset):
497 if not force and not bool(phprecset & precset):
498 tagname = b'D%d' % drev
498 tagname = b'D%d' % drev
499 tags.tag(
499 tags.tag(
500 repo,
500 repo,
501 tagname,
501 tagname,
502 nullid,
502 nullid,
503 message=None,
503 message=None,
504 user=None,
504 user=None,
505 date=None,
505 date=None,
506 local=True,
506 local=True,
507 )
507 )
508 unfi.ui.warn(
508 unfi.ui.warn(
509 _(
509 _(
510 b'D%d: local tag removed - does not match '
510 b'D%d: local tag removed - does not match '
511 b'Differential history\n'
511 b'Differential history\n'
512 )
512 )
513 % drev
513 % drev
514 )
514 )
515 continue
515 continue
516
516
517 # Find the last node using Phabricator metadata, and make sure it
517 # Find the last node using Phabricator metadata, and make sure it
518 # exists in the repo
518 # exists in the repo
519 oldnode = lastdiff = None
519 oldnode = lastdiff = None
520 if diffs:
520 if diffs:
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 oldnode = getnode(lastdiff)
522 oldnode = getnode(lastdiff)
523 if oldnode and not has_node(oldnode):
523 if oldnode and not has_node(oldnode):
524 oldnode = None
524 oldnode = None
525
525
526 result[newnode] = (oldnode, lastdiff, drev)
526 result[newnode] = (oldnode, lastdiff, drev)
527
527
528 return result
528 return result
529
529
530
530
531 def getdrevmap(repo, revs):
531 def getdrevmap(repo, revs):
532 """Return a dict mapping each rev in `revs` to their Differential Revision
532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 ID or None.
533 ID or None.
534 """
534 """
535 result = {}
535 result = {}
536 for rev in revs:
536 for rev in revs:
537 result[rev] = None
537 result[rev] = None
538 ctx = repo[rev]
538 ctx = repo[rev]
539 # Check commit message
539 # Check commit message
540 m = _differentialrevisiondescre.search(ctx.description())
540 m = _differentialrevisiondescre.search(ctx.description())
541 if m:
541 if m:
542 result[rev] = int(m.group('id'))
542 result[rev] = int(m.group('id'))
543 continue
543 continue
544 # Check tags
544 # Check tags
545 for tag in repo.nodetags(ctx.node()):
545 for tag in repo.nodetags(ctx.node()):
546 m = _differentialrevisiontagre.match(tag)
546 m = _differentialrevisiontagre.match(tag)
547 if m:
547 if m:
548 result[rev] = int(m.group(1))
548 result[rev] = int(m.group(1))
549 break
549 break
550
550
551 return result
551 return result
552
552
553
553
554 def getdiff(basectx, ctx, diffopts):
554 def getdiff(basectx, ctx, diffopts):
555 """plain-text diff without header (user, commit message, etc)"""
555 """plain-text diff without header (user, commit message, etc)"""
556 output = util.stringio()
556 output = util.stringio()
557 for chunk, _label in patch.diffui(
557 for chunk, _label in patch.diffui(
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 ):
559 ):
560 output.write(chunk)
560 output.write(chunk)
561 return output.getvalue()
561 return output.getvalue()
562
562
563
563
564 class DiffChangeType(object):
564 class DiffChangeType(object):
565 ADD = 1
565 ADD = 1
566 CHANGE = 2
566 CHANGE = 2
567 DELETE = 3
567 DELETE = 3
568 MOVE_AWAY = 4
568 MOVE_AWAY = 4
569 COPY_AWAY = 5
569 COPY_AWAY = 5
570 MOVE_HERE = 6
570 MOVE_HERE = 6
571 COPY_HERE = 7
571 COPY_HERE = 7
572 MULTICOPY = 8
572 MULTICOPY = 8
573
573
574
574
575 class DiffFileType(object):
575 class DiffFileType(object):
576 TEXT = 1
576 TEXT = 1
577 IMAGE = 2
577 IMAGE = 2
578 BINARY = 3
578 BINARY = 3
579
579
580
580
581 @attr.s
581 @attr.s
582 class phabhunk(dict):
582 class phabhunk(dict):
583 """Represents a Differential hunk, which is owned by a Differential change
583 """Represents a Differential hunk, which is owned by a Differential change
584 """
584 """
585
585
586 oldOffset = attr.ib(default=0) # camelcase-required
586 oldOffset = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
590 corpus = attr.ib(default='')
590 corpus = attr.ib(default='')
591 # These get added to the phabchange's equivalents
591 # These get added to the phabchange's equivalents
592 addLines = attr.ib(default=0) # camelcase-required
592 addLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
594
594
595
595
596 @attr.s
596 @attr.s
597 class phabchange(object):
597 class phabchange(object):
598 """Represents a Differential change, owns Differential hunks and owned by a
598 """Represents a Differential change, owns Differential hunks and owned by a
599 Differential diff. Each one represents one file in a diff.
599 Differential diff. Each one represents one file in a diff.
600 """
600 """
601
601
602 currentPath = attr.ib(default=None) # camelcase-required
602 currentPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 metadata = attr.ib(default=attr.Factory(dict))
605 metadata = attr.ib(default=attr.Factory(dict))
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 type = attr.ib(default=DiffChangeType.CHANGE)
608 type = attr.ib(default=DiffChangeType.CHANGE)
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
613 hunks = attr.ib(default=attr.Factory(list))
613 hunks = attr.ib(default=attr.Factory(list))
614
614
615 def copynewmetadatatoold(self):
615 def copynewmetadatatoold(self):
616 for key in list(self.metadata.keys()):
616 for key in list(self.metadata.keys()):
617 newkey = key.replace(b'new:', b'old:')
617 newkey = key.replace(b'new:', b'old:')
618 self.metadata[newkey] = self.metadata[key]
618 self.metadata[newkey] = self.metadata[key]
619
619
620 def addoldmode(self, value):
620 def addoldmode(self, value):
621 self.oldProperties[b'unix:filemode'] = value
621 self.oldProperties[b'unix:filemode'] = value
622
622
623 def addnewmode(self, value):
623 def addnewmode(self, value):
624 self.newProperties[b'unix:filemode'] = value
624 self.newProperties[b'unix:filemode'] = value
625
625
626 def addhunk(self, hunk):
626 def addhunk(self, hunk):
627 if not isinstance(hunk, phabhunk):
627 if not isinstance(hunk, phabhunk):
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 # It's useful to include these stats since the Phab web UI shows them,
630 # It's useful to include these stats since the Phab web UI shows them,
631 # and uses them to estimate how large a change a Revision is. Also used
631 # and uses them to estimate how large a change a Revision is. Also used
632 # in email subjects for the [+++--] bit.
632 # in email subjects for the [+++--] bit.
633 self.addLines += hunk.addLines
633 self.addLines += hunk.addLines
634 self.delLines += hunk.delLines
634 self.delLines += hunk.delLines
635
635
636
636
637 @attr.s
637 @attr.s
638 class phabdiff(object):
638 class phabdiff(object):
639 """Represents a Differential diff, owns Differential changes. Corresponds
639 """Represents a Differential diff, owns Differential changes. Corresponds
640 to a commit.
640 to a commit.
641 """
641 """
642
642
643 # Doesn't seem to be any reason to send this (output of uname -n)
643 # Doesn't seem to be any reason to send this (output of uname -n)
644 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 branch = attr.ib(default=b'default')
649 branch = attr.ib(default=b'default')
650 bookmark = attr.ib(default=None)
650 bookmark = attr.ib(default=None)
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 changes = attr.ib(default=attr.Factory(dict))
654 changes = attr.ib(default=attr.Factory(dict))
655 repositoryPHID = attr.ib(default=None) # camelcase-required
655 repositoryPHID = attr.ib(default=None) # camelcase-required
656
656
657 def addchange(self, change):
657 def addchange(self, change):
658 if not isinstance(change, phabchange):
658 if not isinstance(change, phabchange):
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 self.changes[change.currentPath] = pycompat.byteskwargs(
660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 attr.asdict(change)
661 attr.asdict(change)
662 )
662 )
663
663
664
664
665 def maketext(pchange, basectx, ctx, fname):
665 def maketext(pchange, basectx, ctx, fname):
666 """populate the phabchange for a text file"""
666 """populate the phabchange for a text file"""
667 repo = ctx.repo()
667 repo = ctx.repo()
668 fmatcher = match.exact([fname])
668 fmatcher = match.exact([fname])
669 diffopts = mdiff.diffopts(git=True, context=32767)
669 diffopts = mdiff.diffopts(git=True, context=32767)
670 _pfctx, _fctx, header, fhunks = next(
670 _pfctx, _fctx, header, fhunks = next(
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 )
672 )
673
673
674 for fhunk in fhunks:
674 for fhunk in fhunks:
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 corpus = b''.join(lines[1:])
676 corpus = b''.join(lines[1:])
677 shunk = list(header)
677 shunk = list(header)
678 shunk.extend(lines)
678 shunk.extend(lines)
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 patch.diffstatdata(util.iterlines(shunk))
680 patch.diffstatdata(util.iterlines(shunk))
681 )
681 )
682 pchange.addhunk(
682 pchange.addhunk(
683 phabhunk(
683 phabhunk(
684 oldOffset,
684 oldOffset,
685 oldLength,
685 oldLength,
686 newOffset,
686 newOffset,
687 newLength,
687 newLength,
688 corpus,
688 corpus,
689 addLines,
689 addLines,
690 delLines,
690 delLines,
691 )
691 )
692 )
692 )
693
693
694
694
695 def uploadchunks(fctx, fphid):
695 def uploadchunks(fctx, fphid):
696 """upload large binary files as separate chunks.
696 """upload large binary files as separate chunks.
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 """
698 """
699 ui = fctx.repo().ui
699 ui = fctx.repo().ui
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 with ui.makeprogress(
701 with ui.makeprogress(
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 ) as progress:
703 ) as progress:
704 for chunk in chunks:
704 for chunk in chunks:
705 progress.increment()
705 progress.increment()
706 if chunk[b'complete']:
706 if chunk[b'complete']:
707 continue
707 continue
708 bstart = int(chunk[b'byteStart'])
708 bstart = int(chunk[b'byteStart'])
709 bend = int(chunk[b'byteEnd'])
709 bend = int(chunk[b'byteEnd'])
710 callconduit(
710 callconduit(
711 ui,
711 ui,
712 b'file.uploadchunk',
712 b'file.uploadchunk',
713 {
713 {
714 b'filePHID': fphid,
714 b'filePHID': fphid,
715 b'byteStart': bstart,
715 b'byteStart': bstart,
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 b'dataEncoding': b'base64',
717 b'dataEncoding': b'base64',
718 },
718 },
719 )
719 )
720
720
721
721
722 def uploadfile(fctx):
722 def uploadfile(fctx):
723 """upload binary files to Phabricator"""
723 """upload binary files to Phabricator"""
724 repo = fctx.repo()
724 repo = fctx.repo()
725 ui = repo.ui
725 ui = repo.ui
726 fname = fctx.path()
726 fname = fctx.path()
727 size = fctx.size()
727 size = fctx.size()
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729
729
730 # an allocate call is required first to see if an upload is even required
730 # an allocate call is required first to see if an upload is even required
731 # (Phab might already have it) and to determine if chunking is needed
731 # (Phab might already have it) and to determine if chunking is needed
732 allocateparams = {
732 allocateparams = {
733 b'name': fname,
733 b'name': fname,
734 b'contentLength': size,
734 b'contentLength': size,
735 b'contentHash': fhash,
735 b'contentHash': fhash,
736 }
736 }
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 fphid = filealloc[b'filePHID']
738 fphid = filealloc[b'filePHID']
739
739
740 if filealloc[b'upload']:
740 if filealloc[b'upload']:
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 if not fphid:
742 if not fphid:
743 uploadparams = {
743 uploadparams = {
744 b'name': fname,
744 b'name': fname,
745 b'data_base64': base64.b64encode(fctx.data()),
745 b'data_base64': base64.b64encode(fctx.data()),
746 }
746 }
747 fphid = callconduit(ui, b'file.upload', uploadparams)
747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 else:
748 else:
749 uploadchunks(fctx, fphid)
749 uploadchunks(fctx, fphid)
750 else:
750 else:
751 ui.debug(b'server already has %s\n' % bytes(fctx))
751 ui.debug(b'server already has %s\n' % bytes(fctx))
752
752
753 if not fphid:
753 if not fphid:
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755
755
756 return fphid
756 return fphid
757
757
758
758
759 def addoldbinary(pchange, oldfctx, fctx):
759 def addoldbinary(pchange, oldfctx, fctx):
760 """add the metadata for the previous version of a binary file to the
760 """add the metadata for the previous version of a binary file to the
761 phabchange for the new version
761 phabchange for the new version
762
762
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 version of the file, or None if the file is being removed.
764 version of the file, or None if the file is being removed.
765 """
765 """
766 if not fctx or fctx.cmp(oldfctx):
766 if not fctx or fctx.cmp(oldfctx):
767 # Files differ, add the old one
767 # Files differ, add the old one
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 mimeguess, _enc = mimetypes.guess_type(
769 mimeguess, _enc = mimetypes.guess_type(
770 encoding.unifromlocal(oldfctx.path())
770 encoding.unifromlocal(oldfctx.path())
771 )
771 )
772 if mimeguess:
772 if mimeguess:
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 mimeguess
774 mimeguess
775 )
775 )
776 fphid = uploadfile(oldfctx)
776 fphid = uploadfile(oldfctx)
777 pchange.metadata[b'old:binary-phid'] = fphid
777 pchange.metadata[b'old:binary-phid'] = fphid
778 else:
778 else:
779 # If it's left as IMAGE/BINARY web UI might try to display it
779 # If it's left as IMAGE/BINARY web UI might try to display it
780 pchange.fileType = DiffFileType.TEXT
780 pchange.fileType = DiffFileType.TEXT
781 pchange.copynewmetadatatoold()
781 pchange.copynewmetadatatoold()
782
782
783
783
784 def makebinary(pchange, fctx):
784 def makebinary(pchange, fctx):
785 """populate the phabchange for a binary file"""
785 """populate the phabchange for a binary file"""
786 pchange.fileType = DiffFileType.BINARY
786 pchange.fileType = DiffFileType.BINARY
787 fphid = uploadfile(fctx)
787 fphid = uploadfile(fctx)
788 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:binary-phid'] = fphid
789 pchange.metadata[b'new:file:size'] = fctx.size()
789 pchange.metadata[b'new:file:size'] = fctx.size()
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 if mimeguess:
791 if mimeguess:
792 mimeguess = pycompat.bytestr(mimeguess)
792 mimeguess = pycompat.bytestr(mimeguess)
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 if mimeguess.startswith(b'image/'):
794 if mimeguess.startswith(b'image/'):
795 pchange.fileType = DiffFileType.IMAGE
795 pchange.fileType = DiffFileType.IMAGE
796
796
797
797
798 # Copied from mercurial/patch.py
798 # Copied from mercurial/patch.py
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800
800
801
801
802 def notutf8(fctx):
802 def notutf8(fctx):
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 as binary
804 as binary
805 """
805 """
806 try:
806 try:
807 fctx.data().decode('utf-8')
807 fctx.data().decode('utf-8')
808 return False
808 return False
809 except UnicodeDecodeError:
809 except UnicodeDecodeError:
810 fctx.repo().ui.write(
810 fctx.repo().ui.write(
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 % fctx.path()
812 % fctx.path()
813 )
813 )
814 return True
814 return True
815
815
816
816
817 def addremoved(pdiff, basectx, ctx, removed):
817 def addremoved(pdiff, basectx, ctx, removed):
818 """add removed files to the phabdiff. Shouldn't include moves"""
818 """add removed files to the phabdiff. Shouldn't include moves"""
819 for fname in removed:
819 for fname in removed:
820 pchange = phabchange(
820 pchange = phabchange(
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 )
822 )
823 oldfctx = basectx.p1()[fname]
823 oldfctx = basectx.p1()[fname]
824 pchange.addoldmode(gitmode[oldfctx.flags()])
824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 maketext(pchange, basectx, ctx, fname)
826 maketext(pchange, basectx, ctx, fname)
827
827
828 pdiff.addchange(pchange)
828 pdiff.addchange(pchange)
829
829
830
830
831 def addmodified(pdiff, basectx, ctx, modified):
831 def addmodified(pdiff, basectx, ctx, modified):
832 """add modified files to the phabdiff"""
832 """add modified files to the phabdiff"""
833 for fname in modified:
833 for fname in modified:
834 fctx = ctx[fname]
834 fctx = ctx[fname]
835 oldfctx = basectx.p1()[fname]
835 oldfctx = basectx.p1()[fname]
836 pchange = phabchange(currentPath=fname, oldPath=fname)
836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 filemode = gitmode[fctx.flags()]
837 filemode = gitmode[fctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
839 if filemode != originalmode:
839 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
842
842
843 if (
843 if (
844 fctx.isbinary()
844 fctx.isbinary()
845 or notutf8(fctx)
845 or notutf8(fctx)
846 or oldfctx.isbinary()
846 or oldfctx.isbinary()
847 or notutf8(oldfctx)
847 or notutf8(oldfctx)
848 ):
848 ):
849 makebinary(pchange, fctx)
849 makebinary(pchange, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
851 else:
851 else:
852 maketext(pchange, basectx, ctx, fname)
852 maketext(pchange, basectx, ctx, fname)
853
853
854 pdiff.addchange(pchange)
854 pdiff.addchange(pchange)
855
855
856
856
857 def addadded(pdiff, basectx, ctx, added, removed):
857 def addadded(pdiff, basectx, ctx, added, removed):
858 """add file adds to the phabdiff, both new files and copies/moves"""
858 """add file adds to the phabdiff, both new files and copies/moves"""
859 # Keep track of files that've been recorded as moved/copied, so if there are
859 # Keep track of files that've been recorded as moved/copied, so if there are
860 # additional copies we can mark them (moves get removed from removed)
860 # additional copies we can mark them (moves get removed from removed)
861 copiedchanges = {}
861 copiedchanges = {}
862 movedchanges = {}
862 movedchanges = {}
863
863
864 copy = {}
864 copy = {}
865 if basectx != ctx:
865 if basectx != ctx:
866 copy = copies.pathcopies(basectx.p1(), ctx)
866 copy = copies.pathcopies(basectx.p1(), ctx)
867
867
868 for fname in added:
868 for fname in added:
869 fctx = ctx[fname]
869 fctx = ctx[fname]
870 oldfctx = None
870 oldfctx = None
871 pchange = phabchange(currentPath=fname)
871 pchange = phabchange(currentPath=fname)
872
872
873 filemode = gitmode[fctx.flags()]
873 filemode = gitmode[fctx.flags()]
874
874
875 if copy:
875 if copy:
876 originalfname = copy.get(fname, fname)
876 originalfname = copy.get(fname, fname)
877 else:
877 else:
878 originalfname = fname
878 originalfname = fname
879 if fctx.renamed():
879 if fctx.renamed():
880 originalfname = fctx.renamed()[0]
880 originalfname = fctx.renamed()[0]
881
881
882 renamed = fname != originalfname
882 renamed = fname != originalfname
883
883
884 if renamed:
884 if renamed:
885 oldfctx = basectx.p1()[originalfname]
885 oldfctx = basectx.p1()[originalfname]
886 originalmode = gitmode[oldfctx.flags()]
886 originalmode = gitmode[oldfctx.flags()]
887 pchange.oldPath = originalfname
887 pchange.oldPath = originalfname
888
888
889 if originalfname in removed:
889 if originalfname in removed:
890 origpchange = phabchange(
890 origpchange = phabchange(
891 currentPath=originalfname,
891 currentPath=originalfname,
892 oldPath=originalfname,
892 oldPath=originalfname,
893 type=DiffChangeType.MOVE_AWAY,
893 type=DiffChangeType.MOVE_AWAY,
894 awayPaths=[fname],
894 awayPaths=[fname],
895 )
895 )
896 movedchanges[originalfname] = origpchange
896 movedchanges[originalfname] = origpchange
897 removed.remove(originalfname)
897 removed.remove(originalfname)
898 pchange.type = DiffChangeType.MOVE_HERE
898 pchange.type = DiffChangeType.MOVE_HERE
899 elif originalfname in movedchanges:
899 elif originalfname in movedchanges:
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 movedchanges[originalfname].awayPaths.append(fname)
901 movedchanges[originalfname].awayPaths.append(fname)
902 pchange.type = DiffChangeType.COPY_HERE
902 pchange.type = DiffChangeType.COPY_HERE
903 else: # pure copy
903 else: # pure copy
904 if originalfname not in copiedchanges:
904 if originalfname not in copiedchanges:
905 origpchange = phabchange(
905 origpchange = phabchange(
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 )
907 )
908 copiedchanges[originalfname] = origpchange
908 copiedchanges[originalfname] = origpchange
909 else:
909 else:
910 origpchange = copiedchanges[originalfname]
910 origpchange = copiedchanges[originalfname]
911 origpchange.awayPaths.append(fname)
911 origpchange.awayPaths.append(fname)
912 pchange.type = DiffChangeType.COPY_HERE
912 pchange.type = DiffChangeType.COPY_HERE
913
913
914 if filemode != originalmode:
914 if filemode != originalmode:
915 pchange.addoldmode(originalmode)
915 pchange.addoldmode(originalmode)
916 pchange.addnewmode(filemode)
916 pchange.addnewmode(filemode)
917 else: # Brand-new file
917 else: # Brand-new file
918 pchange.addnewmode(gitmode[fctx.flags()])
918 pchange.addnewmode(gitmode[fctx.flags()])
919 pchange.type = DiffChangeType.ADD
919 pchange.type = DiffChangeType.ADD
920
920
921 if (
921 if (
922 fctx.isbinary()
922 fctx.isbinary()
923 or notutf8(fctx)
923 or notutf8(fctx)
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 ):
925 ):
926 makebinary(pchange, fctx)
926 makebinary(pchange, fctx)
927 if renamed:
927 if renamed:
928 addoldbinary(pchange, oldfctx, fctx)
928 addoldbinary(pchange, oldfctx, fctx)
929 else:
929 else:
930 maketext(pchange, basectx, ctx, fname)
930 maketext(pchange, basectx, ctx, fname)
931
931
932 pdiff.addchange(pchange)
932 pdiff.addchange(pchange)
933
933
934 for _path, copiedchange in copiedchanges.items():
934 for _path, copiedchange in copiedchanges.items():
935 pdiff.addchange(copiedchange)
935 pdiff.addchange(copiedchange)
936 for _path, movedchange in movedchanges.items():
936 for _path, movedchange in movedchanges.items():
937 pdiff.addchange(movedchange)
937 pdiff.addchange(movedchange)
938
938
939
939
940 def creatediff(basectx, ctx):
940 def creatediff(basectx, ctx):
941 """create a Differential Diff"""
941 """create a Differential Diff"""
942 repo = ctx.repo()
942 repo = ctx.repo()
943 repophid = getrepophid(repo)
943 repophid = getrepophid(repo)
944 # Create a "Differential Diff" via "differential.creatediff" API
944 # Create a "Differential Diff" via "differential.creatediff" API
945 pdiff = phabdiff(
945 pdiff = phabdiff(
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 branch=b'%s' % ctx.branch(),
947 branch=b'%s' % ctx.branch(),
948 )
948 )
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 # addadded will remove moved files from removed, so addremoved won't get
950 # addadded will remove moved files from removed, so addremoved won't get
951 # them
951 # them
952 addadded(pdiff, basectx, ctx, added, removed)
952 addadded(pdiff, basectx, ctx, added, removed)
953 addmodified(pdiff, basectx, ctx, modified)
953 addmodified(pdiff, basectx, ctx, modified)
954 addremoved(pdiff, basectx, ctx, removed)
954 addremoved(pdiff, basectx, ctx, removed)
955 if repophid:
955 if repophid:
956 pdiff.repositoryPHID = repophid
956 pdiff.repositoryPHID = repophid
957 diff = callconduit(
957 diff = callconduit(
958 repo.ui,
958 repo.ui,
959 b'differential.creatediff',
959 b'differential.creatediff',
960 pycompat.byteskwargs(attr.asdict(pdiff)),
960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 )
961 )
962 if not diff:
962 if not diff:
963 if basectx != ctx:
963 if basectx != ctx:
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 else:
965 else:
966 msg = _(b'cannot create diff for %s') % ctx
966 msg = _(b'cannot create diff for %s') % ctx
967 raise error.Abort(msg)
967 raise error.Abort(msg)
968 return diff
968 return diff
969
969
970
970
971 def writediffproperties(ctxs, diff):
971 def writediffproperties(ctxs, diff):
972 """write metadata to diff so patches could be applied losslessly
972 """write metadata to diff so patches could be applied losslessly
973
973
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 The list is generally a single commit, but may be several when using
975 The list is generally a single commit, but may be several when using
976 ``phabsend --fold``.
976 ``phabsend --fold``.
977 """
977 """
978 # creatediff returns with a diffid but query returns with an id
978 # creatediff returns with a diffid but query returns with an id
979 diffid = diff.get(b'diffid', diff.get(b'id'))
979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 basectx = ctxs[0]
980 basectx = ctxs[0]
981 tipctx = ctxs[-1]
981 tipctx = ctxs[-1]
982
982
983 params = {
983 params = {
984 b'diff_id': diffid,
984 b'diff_id': diffid,
985 b'name': b'hg:meta',
985 b'name': b'hg:meta',
986 b'data': templatefilters.json(
986 b'data': templatefilters.json(
987 {
987 {
988 b'user': tipctx.user(),
988 b'user': tipctx.user(),
989 b'date': b'%d %d' % tipctx.date(),
989 b'date': b'%d %d' % tipctx.date(),
990 b'branch': tipctx.branch(),
990 b'branch': tipctx.branch(),
991 b'node': tipctx.hex(),
991 b'node': tipctx.hex(),
992 b'parent': basectx.p1().hex(),
992 b'parent': basectx.p1().hex(),
993 }
993 }
994 ),
994 ),
995 }
995 }
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997
997
998 commits = {}
998 commits = {}
999 for ctx in ctxs:
999 for ctx in ctxs:
1000 commits[ctx.hex()] = {
1000 commits[ctx.hex()] = {
1001 b'author': stringutil.person(ctx.user()),
1001 b'author': stringutil.person(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1003 b'time': int(ctx.date()[0]),
1003 b'time': int(ctx.date()[0]),
1004 b'commit': ctx.hex(),
1004 b'commit': ctx.hex(),
1005 b'parents': [ctx.p1().hex()],
1005 b'parents': [ctx.p1().hex()],
1006 b'branch': ctx.branch(),
1006 b'branch': ctx.branch(),
1007 }
1007 }
1008 params = {
1008 params = {
1009 b'diff_id': diffid,
1009 b'diff_id': diffid,
1010 b'name': b'local:commits',
1010 b'name': b'local:commits',
1011 b'data': templatefilters.json(commits),
1011 b'data': templatefilters.json(commits),
1012 }
1012 }
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014
1014
1015
1015
1016 def createdifferentialrevision(
1016 def createdifferentialrevision(
1017 ctx,
1017 ctx,
1018 revid=None,
1018 revid=None,
1019 parentrevphid=None,
1019 parentrevphid=None,
1020 oldnode=None,
1020 oldnode=None,
1021 olddiff=None,
1021 olddiff=None,
1022 actions=None,
1022 actions=None,
1023 comment=None,
1023 comment=None,
1024 ):
1024 ):
1025 """create or update a Differential Revision
1025 """create or update a Differential Revision
1026
1026
1027 If revid is None, create a new Differential Revision, otherwise update
1027 If revid is None, create a new Differential Revision, otherwise update
1028 revid. If parentrevphid is not None, set it as a dependency.
1028 revid. If parentrevphid is not None, set it as a dependency.
1029
1029
1030 If oldnode is not None, check if the patch content (without commit message
1030 If oldnode is not None, check if the patch content (without commit message
1031 and metadata) has changed before creating another diff.
1031 and metadata) has changed before creating another diff.
1032
1032
1033 If actions is not None, they will be appended to the transaction.
1033 If actions is not None, they will be appended to the transaction.
1034 """
1034 """
1035 basectx = ctx
1035 basectx = ctx
1036 repo = ctx.repo()
1036 repo = ctx.repo()
1037 if oldnode:
1037 if oldnode:
1038 diffopts = mdiff.diffopts(git=True, context=32767)
1038 diffopts = mdiff.diffopts(git=True, context=32767)
1039 oldctx = repo.unfiltered()[oldnode]
1039 oldctx = repo.unfiltered()[oldnode]
1040 oldbasectx = oldctx
1040 oldbasectx = oldctx
1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1042 oldbasectx, oldctx, diffopts
1042 oldbasectx, oldctx, diffopts
1043 )
1043 )
1044 else:
1044 else:
1045 neednewdiff = True
1045 neednewdiff = True
1046
1046
1047 transactions = []
1047 transactions = []
1048 if neednewdiff:
1048 if neednewdiff:
1049 diff = creatediff(basectx, ctx)
1049 diff = creatediff(basectx, ctx)
1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1051 if comment:
1051 if comment:
1052 transactions.append({b'type': b'comment', b'value': comment})
1052 transactions.append({b'type': b'comment', b'value': comment})
1053 else:
1053 else:
1054 # Even if we don't need to upload a new diff because the patch content
1054 # Even if we don't need to upload a new diff because the patch content
1055 # does not change. We might still need to update its metadata so
1055 # does not change. We might still need to update its metadata so
1056 # pushers could know the correct node metadata.
1056 # pushers could know the correct node metadata.
1057 assert olddiff
1057 assert olddiff
1058 diff = olddiff
1058 diff = olddiff
1059 writediffproperties([ctx], diff)
1059 writediffproperties([ctx], diff)
1060
1060
1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1062 if parentrevphid:
1062 if parentrevphid:
1063 transactions.append(
1063 transactions.append(
1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1065 )
1065 )
1066
1066
1067 if actions:
1067 if actions:
1068 transactions += actions
1068 transactions += actions
1069
1069
1070 # When folding multiple local commits into a single review, arcanist will
1071 # take the summary line of the first commit as the title, and then
1072 # concatenate the rest of the remaining messages (including each of their
1073 # first lines) to the rest of the first commit message (each separated by
1074 # an empty line), and use that as the summary field. Do the same here.
1075 # For commits with only a one line message, there is no summary field, as
1076 # this gets assigned to the title.
1077 fields = util.sortdict() # sorted for stable wire protocol in tests
1078
1079 for i, _ctx in enumerate([ctx]):
1070 # Parse commit message and update related fields.
1080 # Parse commit message and update related fields.
1071 desc = ctx.description()
1081 desc = _ctx.description()
1072 info = callconduit(
1082 info = callconduit(
1073 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1083 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1074 )
1084 )
1075 for k, v in info[b'fields'].items():
1085
1076 if k in [b'title', b'summary', b'testPlan']:
1086 for k in [b'title', b'summary', b'testPlan']:
1087 v = info[b'fields'].get(k)
1088 if not v:
1089 continue
1090
1091 if i == 0:
1092 # Title, summary and test plan (if present) are taken verbatim
1093 # for the first commit.
1094 fields[k] = v.rstrip()
1095 continue
1096 elif k == b'title':
1097 # Add subsequent titles (i.e. the first line of the commit
1098 # message) back to the summary.
1099 k = b'summary'
1100
1101 # Append any current field to the existing composite field
1102 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1103
1104 for k, v in fields.items():
1077 transactions.append({b'type': k, b'value': v})
1105 transactions.append({b'type': k, b'value': v})
1078
1106
1079 params = {b'transactions': transactions}
1107 params = {b'transactions': transactions}
1080 if revid is not None:
1108 if revid is not None:
1081 # Update an existing Differential Revision
1109 # Update an existing Differential Revision
1082 params[b'objectIdentifier'] = revid
1110 params[b'objectIdentifier'] = revid
1083
1111
1084 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1112 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1085 if not revision:
1113 if not revision:
1086 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1114 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1087
1115
1088 return revision, diff
1116 return revision, diff
1089
1117
1090
1118
1091 def userphids(ui, names):
1119 def userphids(ui, names):
1092 """convert user names to PHIDs"""
1120 """convert user names to PHIDs"""
1093 names = [name.lower() for name in names]
1121 names = [name.lower() for name in names]
1094 query = {b'constraints': {b'usernames': names}}
1122 query = {b'constraints': {b'usernames': names}}
1095 result = callconduit(ui, b'user.search', query)
1123 result = callconduit(ui, b'user.search', query)
1096 # username not found is not an error of the API. So check if we have missed
1124 # username not found is not an error of the API. So check if we have missed
1097 # some names here.
1125 # some names here.
1098 data = result[b'data']
1126 data = result[b'data']
1099 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1127 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1100 unresolved = set(names) - resolved
1128 unresolved = set(names) - resolved
1101 if unresolved:
1129 if unresolved:
1102 raise error.Abort(
1130 raise error.Abort(
1103 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1131 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1104 )
1132 )
1105 return [entry[b'phid'] for entry in data]
1133 return [entry[b'phid'] for entry in data]
1106
1134
1107
1135
1108 @vcrcommand(
1136 @vcrcommand(
1109 b'phabsend',
1137 b'phabsend',
1110 [
1138 [
1111 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1139 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1112 (b'', b'amend', True, _(b'update commit messages')),
1140 (b'', b'amend', True, _(b'update commit messages')),
1113 (b'', b'reviewer', [], _(b'specify reviewers')),
1141 (b'', b'reviewer', [], _(b'specify reviewers')),
1114 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1142 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1115 (
1143 (
1116 b'm',
1144 b'm',
1117 b'comment',
1145 b'comment',
1118 b'',
1146 b'',
1119 _(b'add a comment to Revisions with new/updated Diffs'),
1147 _(b'add a comment to Revisions with new/updated Diffs'),
1120 ),
1148 ),
1121 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1149 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1122 ],
1150 ],
1123 _(b'REV [OPTIONS]'),
1151 _(b'REV [OPTIONS]'),
1124 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1152 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1125 )
1153 )
1126 def phabsend(ui, repo, *revs, **opts):
1154 def phabsend(ui, repo, *revs, **opts):
1127 """upload changesets to Phabricator
1155 """upload changesets to Phabricator
1128
1156
1129 If there are multiple revisions specified, they will be send as a stack
1157 If there are multiple revisions specified, they will be send as a stack
1130 with a linear dependencies relationship using the order specified by the
1158 with a linear dependencies relationship using the order specified by the
1131 revset.
1159 revset.
1132
1160
1133 For the first time uploading changesets, local tags will be created to
1161 For the first time uploading changesets, local tags will be created to
1134 maintain the association. After the first time, phabsend will check
1162 maintain the association. After the first time, phabsend will check
1135 obsstore and tags information so it can figure out whether to update an
1163 obsstore and tags information so it can figure out whether to update an
1136 existing Differential Revision, or create a new one.
1164 existing Differential Revision, or create a new one.
1137
1165
1138 If --amend is set, update commit messages so they have the
1166 If --amend is set, update commit messages so they have the
1139 ``Differential Revision`` URL, remove related tags. This is similar to what
1167 ``Differential Revision`` URL, remove related tags. This is similar to what
1140 arcanist will do, and is more desired in author-push workflows. Otherwise,
1168 arcanist will do, and is more desired in author-push workflows. Otherwise,
1141 use local tags to record the ``Differential Revision`` association.
1169 use local tags to record the ``Differential Revision`` association.
1142
1170
1143 The --confirm option lets you confirm changesets before sending them. You
1171 The --confirm option lets you confirm changesets before sending them. You
1144 can also add following to your configuration file to make it default
1172 can also add following to your configuration file to make it default
1145 behaviour::
1173 behaviour::
1146
1174
1147 [phabsend]
1175 [phabsend]
1148 confirm = true
1176 confirm = true
1149
1177
1150 phabsend will check obsstore and the above association to decide whether to
1178 phabsend will check obsstore and the above association to decide whether to
1151 update an existing Differential Revision, or create a new one.
1179 update an existing Differential Revision, or create a new one.
1152 """
1180 """
1153 opts = pycompat.byteskwargs(opts)
1181 opts = pycompat.byteskwargs(opts)
1154 revs = list(revs) + opts.get(b'rev', [])
1182 revs = list(revs) + opts.get(b'rev', [])
1155 revs = scmutil.revrange(repo, revs)
1183 revs = scmutil.revrange(repo, revs)
1156 revs.sort() # ascending order to preserve topological parent/child in phab
1184 revs.sort() # ascending order to preserve topological parent/child in phab
1157
1185
1158 if not revs:
1186 if not revs:
1159 raise error.Abort(_(b'phabsend requires at least one changeset'))
1187 raise error.Abort(_(b'phabsend requires at least one changeset'))
1160 if opts.get(b'amend'):
1188 if opts.get(b'amend'):
1161 cmdutil.checkunfinished(repo)
1189 cmdutil.checkunfinished(repo)
1162
1190
1163 # {newnode: (oldnode, olddiff, olddrev}
1191 # {newnode: (oldnode, olddiff, olddrev}
1164 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1192 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1165
1193
1166 confirm = ui.configbool(b'phabsend', b'confirm')
1194 confirm = ui.configbool(b'phabsend', b'confirm')
1167 confirm |= bool(opts.get(b'confirm'))
1195 confirm |= bool(opts.get(b'confirm'))
1168 if confirm:
1196 if confirm:
1169 confirmed = _confirmbeforesend(repo, revs, oldmap)
1197 confirmed = _confirmbeforesend(repo, revs, oldmap)
1170 if not confirmed:
1198 if not confirmed:
1171 raise error.Abort(_(b'phabsend cancelled'))
1199 raise error.Abort(_(b'phabsend cancelled'))
1172
1200
1173 actions = []
1201 actions = []
1174 reviewers = opts.get(b'reviewer', [])
1202 reviewers = opts.get(b'reviewer', [])
1175 blockers = opts.get(b'blocker', [])
1203 blockers = opts.get(b'blocker', [])
1176 phids = []
1204 phids = []
1177 if reviewers:
1205 if reviewers:
1178 phids.extend(userphids(repo.ui, reviewers))
1206 phids.extend(userphids(repo.ui, reviewers))
1179 if blockers:
1207 if blockers:
1180 phids.extend(
1208 phids.extend(
1181 map(
1209 map(
1182 lambda phid: b'blocking(%s)' % phid,
1210 lambda phid: b'blocking(%s)' % phid,
1183 userphids(repo.ui, blockers),
1211 userphids(repo.ui, blockers),
1184 )
1212 )
1185 )
1213 )
1186 if phids:
1214 if phids:
1187 actions.append({b'type': b'reviewers.add', b'value': phids})
1215 actions.append({b'type': b'reviewers.add', b'value': phids})
1188
1216
1189 drevids = [] # [int]
1217 drevids = [] # [int]
1190 diffmap = {} # {newnode: diff}
1218 diffmap = {} # {newnode: diff}
1191
1219
1192 # Send patches one by one so we know their Differential Revision PHIDs and
1220 # Send patches one by one so we know their Differential Revision PHIDs and
1193 # can provide dependency relationship
1221 # can provide dependency relationship
1194 lastrevphid = None
1222 lastrevphid = None
1195 for rev in revs:
1223 for rev in revs:
1196 ui.debug(b'sending rev %d\n' % rev)
1224 ui.debug(b'sending rev %d\n' % rev)
1197 ctx = repo[rev]
1225 ctx = repo[rev]
1198
1226
1199 # Get Differential Revision ID
1227 # Get Differential Revision ID
1200 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1228 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1201 if oldnode != ctx.node() or opts.get(b'amend'):
1229 if oldnode != ctx.node() or opts.get(b'amend'):
1202 # Create or update Differential Revision
1230 # Create or update Differential Revision
1203 revision, diff = createdifferentialrevision(
1231 revision, diff = createdifferentialrevision(
1204 ctx,
1232 ctx,
1205 revid,
1233 revid,
1206 lastrevphid,
1234 lastrevphid,
1207 oldnode,
1235 oldnode,
1208 olddiff,
1236 olddiff,
1209 actions,
1237 actions,
1210 opts.get(b'comment'),
1238 opts.get(b'comment'),
1211 )
1239 )
1212 diffmap[ctx.node()] = diff
1240 diffmap[ctx.node()] = diff
1213 newrevid = int(revision[b'object'][b'id'])
1241 newrevid = int(revision[b'object'][b'id'])
1214 newrevphid = revision[b'object'][b'phid']
1242 newrevphid = revision[b'object'][b'phid']
1215 if revid:
1243 if revid:
1216 action = b'updated'
1244 action = b'updated'
1217 else:
1245 else:
1218 action = b'created'
1246 action = b'created'
1219
1247
1220 # Create a local tag to note the association, if commit message
1248 # Create a local tag to note the association, if commit message
1221 # does not have it already
1249 # does not have it already
1222 m = _differentialrevisiondescre.search(ctx.description())
1250 m = _differentialrevisiondescre.search(ctx.description())
1223 if not m or int(m.group('id')) != newrevid:
1251 if not m or int(m.group('id')) != newrevid:
1224 tagname = b'D%d' % newrevid
1252 tagname = b'D%d' % newrevid
1225 tags.tag(
1253 tags.tag(
1226 repo,
1254 repo,
1227 tagname,
1255 tagname,
1228 ctx.node(),
1256 ctx.node(),
1229 message=None,
1257 message=None,
1230 user=None,
1258 user=None,
1231 date=None,
1259 date=None,
1232 local=True,
1260 local=True,
1233 )
1261 )
1234 else:
1262 else:
1235 # Nothing changed. But still set "newrevphid" so the next revision
1263 # Nothing changed. But still set "newrevphid" so the next revision
1236 # could depend on this one and "newrevid" for the summary line.
1264 # could depend on this one and "newrevid" for the summary line.
1237 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1265 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1238 newrevid = revid
1266 newrevid = revid
1239 action = b'skipped'
1267 action = b'skipped'
1240
1268
1241 actiondesc = ui.label(
1269 actiondesc = ui.label(
1242 {
1270 {
1243 b'created': _(b'created'),
1271 b'created': _(b'created'),
1244 b'skipped': _(b'skipped'),
1272 b'skipped': _(b'skipped'),
1245 b'updated': _(b'updated'),
1273 b'updated': _(b'updated'),
1246 }[action],
1274 }[action],
1247 b'phabricator.action.%s' % action,
1275 b'phabricator.action.%s' % action,
1248 )
1276 )
1249 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1277 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1250 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1278 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1251 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1279 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1252 ui.write(
1280 ui.write(
1253 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1281 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1254 )
1282 )
1255 drevids.append(newrevid)
1283 drevids.append(newrevid)
1256 lastrevphid = newrevphid
1284 lastrevphid = newrevphid
1257
1285
1258 # Update commit messages and remove tags
1286 # Update commit messages and remove tags
1259 if opts.get(b'amend'):
1287 if opts.get(b'amend'):
1260 unfi = repo.unfiltered()
1288 unfi = repo.unfiltered()
1261 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1289 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1262 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1290 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1263 wnode = unfi[b'.'].node()
1291 wnode = unfi[b'.'].node()
1264 mapping = {} # {oldnode: [newnode]}
1292 mapping = {} # {oldnode: [newnode]}
1265 for i, rev in enumerate(revs):
1293 for i, rev in enumerate(revs):
1266 old = unfi[rev]
1294 old = unfi[rev]
1267 drevid = drevids[i]
1295 drevid = drevids[i]
1268 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1296 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1269 newdesc = getdescfromdrev(drev)
1297 newdesc = get_amended_desc(drev, old, False)
1270 # Make sure commit message contain "Differential Revision"
1298 # Make sure commit message contain "Differential Revision"
1271 if old.description() != newdesc:
1299 if old.description() != newdesc:
1272 if old.phase() == phases.public:
1300 if old.phase() == phases.public:
1273 ui.warn(
1301 ui.warn(
1274 _(b"warning: not updating public commit %s\n")
1302 _(b"warning: not updating public commit %s\n")
1275 % scmutil.formatchangeid(old)
1303 % scmutil.formatchangeid(old)
1276 )
1304 )
1277 continue
1305 continue
1278 parents = [
1306 parents = [
1279 mapping.get(old.p1().node(), (old.p1(),))[0],
1307 mapping.get(old.p1().node(), (old.p1(),))[0],
1280 mapping.get(old.p2().node(), (old.p2(),))[0],
1308 mapping.get(old.p2().node(), (old.p2(),))[0],
1281 ]
1309 ]
1282 new = context.metadataonlyctx(
1310 new = context.metadataonlyctx(
1283 repo,
1311 repo,
1284 old,
1312 old,
1285 parents=parents,
1313 parents=parents,
1286 text=newdesc,
1314 text=newdesc,
1287 user=old.user(),
1315 user=old.user(),
1288 date=old.date(),
1316 date=old.date(),
1289 extra=old.extra(),
1317 extra=old.extra(),
1290 )
1318 )
1291
1319
1292 newnode = new.commit()
1320 newnode = new.commit()
1293
1321
1294 mapping[old.node()] = [newnode]
1322 mapping[old.node()] = [newnode]
1295 # Update diff property
1323 # Update diff property
1296 # If it fails just warn and keep going, otherwise the DREV
1324 # If it fails just warn and keep going, otherwise the DREV
1297 # associations will be lost
1325 # associations will be lost
1298 try:
1326 try:
1299 writediffproperties(
1327 writediffproperties(
1300 [unfi[newnode]], diffmap[old.node()]
1328 [unfi[newnode]], diffmap[old.node()]
1301 )
1329 )
1302 except util.urlerr.urlerror:
1330 except util.urlerr.urlerror:
1303 ui.warnnoi18n(
1331 ui.warnnoi18n(
1304 b'Failed to update metadata for D%d\n' % drevid
1332 b'Failed to update metadata for D%d\n' % drevid
1305 )
1333 )
1306 # Remove local tags since it's no longer necessary
1334 # Remove local tags since it's no longer necessary
1307 tagname = b'D%d' % drevid
1335 tagname = b'D%d' % drevid
1308 if tagname in repo.tags():
1336 if tagname in repo.tags():
1309 tags.tag(
1337 tags.tag(
1310 repo,
1338 repo,
1311 tagname,
1339 tagname,
1312 nullid,
1340 nullid,
1313 message=None,
1341 message=None,
1314 user=None,
1342 user=None,
1315 date=None,
1343 date=None,
1316 local=True,
1344 local=True,
1317 )
1345 )
1318 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1346 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1319 if wnode in mapping:
1347 if wnode in mapping:
1320 unfi.setparents(mapping[wnode][0])
1348 unfi.setparents(mapping[wnode][0])
1321
1349
1322
1350
1323 # Map from "hg:meta" keys to header understood by "hg import". The order is
1351 # Map from "hg:meta" keys to header understood by "hg import". The order is
1324 # consistent with "hg export" output.
1352 # consistent with "hg export" output.
1325 _metanamemap = util.sortdict(
1353 _metanamemap = util.sortdict(
1326 [
1354 [
1327 (b'user', b'User'),
1355 (b'user', b'User'),
1328 (b'date', b'Date'),
1356 (b'date', b'Date'),
1329 (b'branch', b'Branch'),
1357 (b'branch', b'Branch'),
1330 (b'node', b'Node ID'),
1358 (b'node', b'Node ID'),
1331 (b'parent', b'Parent '),
1359 (b'parent', b'Parent '),
1332 ]
1360 ]
1333 )
1361 )
1334
1362
1335
1363
1336 def _confirmbeforesend(repo, revs, oldmap):
1364 def _confirmbeforesend(repo, revs, oldmap):
1337 url, token = readurltoken(repo.ui)
1365 url, token = readurltoken(repo.ui)
1338 ui = repo.ui
1366 ui = repo.ui
1339 for rev in revs:
1367 for rev in revs:
1340 ctx = repo[rev]
1368 ctx = repo[rev]
1341 desc = ctx.description().splitlines()[0]
1369 desc = ctx.description().splitlines()[0]
1342 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1370 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1343 if drevid:
1371 if drevid:
1344 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1372 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1345 else:
1373 else:
1346 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1374 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1347
1375
1348 ui.write(
1376 ui.write(
1349 _(b'%s - %s: %s\n')
1377 _(b'%s - %s: %s\n')
1350 % (
1378 % (
1351 drevdesc,
1379 drevdesc,
1352 ui.label(bytes(ctx), b'phabricator.node'),
1380 ui.label(bytes(ctx), b'phabricator.node'),
1353 ui.label(desc, b'phabricator.desc'),
1381 ui.label(desc, b'phabricator.desc'),
1354 )
1382 )
1355 )
1383 )
1356
1384
1357 if ui.promptchoice(
1385 if ui.promptchoice(
1358 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1386 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1359 ):
1387 ):
1360 return False
1388 return False
1361
1389
1362 return True
1390 return True
1363
1391
1364
1392
1365 _knownstatusnames = {
1393 _knownstatusnames = {
1366 b'accepted',
1394 b'accepted',
1367 b'needsreview',
1395 b'needsreview',
1368 b'needsrevision',
1396 b'needsrevision',
1369 b'closed',
1397 b'closed',
1370 b'abandoned',
1398 b'abandoned',
1371 b'changesplanned',
1399 b'changesplanned',
1372 }
1400 }
1373
1401
1374
1402
1375 def _getstatusname(drev):
1403 def _getstatusname(drev):
1376 """get normalized status name from a Differential Revision"""
1404 """get normalized status name from a Differential Revision"""
1377 return drev[b'statusName'].replace(b' ', b'').lower()
1405 return drev[b'statusName'].replace(b' ', b'').lower()
1378
1406
1379
1407
1380 # Small language to specify differential revisions. Support symbols: (), :X,
1408 # Small language to specify differential revisions. Support symbols: (), :X,
1381 # +, and -.
1409 # +, and -.
1382
1410
1383 _elements = {
1411 _elements = {
1384 # token-type: binding-strength, primary, prefix, infix, suffix
1412 # token-type: binding-strength, primary, prefix, infix, suffix
1385 b'(': (12, None, (b'group', 1, b')'), None, None),
1413 b'(': (12, None, (b'group', 1, b')'), None, None),
1386 b':': (8, None, (b'ancestors', 8), None, None),
1414 b':': (8, None, (b'ancestors', 8), None, None),
1387 b'&': (5, None, None, (b'and_', 5), None),
1415 b'&': (5, None, None, (b'and_', 5), None),
1388 b'+': (4, None, None, (b'add', 4), None),
1416 b'+': (4, None, None, (b'add', 4), None),
1389 b'-': (4, None, None, (b'sub', 4), None),
1417 b'-': (4, None, None, (b'sub', 4), None),
1390 b')': (0, None, None, None, None),
1418 b')': (0, None, None, None, None),
1391 b'symbol': (0, b'symbol', None, None, None),
1419 b'symbol': (0, b'symbol', None, None, None),
1392 b'end': (0, None, None, None, None),
1420 b'end': (0, None, None, None, None),
1393 }
1421 }
1394
1422
1395
1423
1396 def _tokenize(text):
1424 def _tokenize(text):
1397 view = memoryview(text) # zero-copy slice
1425 view = memoryview(text) # zero-copy slice
1398 special = b'():+-& '
1426 special = b'():+-& '
1399 pos = 0
1427 pos = 0
1400 length = len(text)
1428 length = len(text)
1401 while pos < length:
1429 while pos < length:
1402 symbol = b''.join(
1430 symbol = b''.join(
1403 itertools.takewhile(
1431 itertools.takewhile(
1404 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1432 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1405 )
1433 )
1406 )
1434 )
1407 if symbol:
1435 if symbol:
1408 yield (b'symbol', symbol, pos)
1436 yield (b'symbol', symbol, pos)
1409 pos += len(symbol)
1437 pos += len(symbol)
1410 else: # special char, ignore space
1438 else: # special char, ignore space
1411 if text[pos : pos + 1] != b' ':
1439 if text[pos : pos + 1] != b' ':
1412 yield (text[pos : pos + 1], None, pos)
1440 yield (text[pos : pos + 1], None, pos)
1413 pos += 1
1441 pos += 1
1414 yield (b'end', None, pos)
1442 yield (b'end', None, pos)
1415
1443
1416
1444
1417 def _parse(text):
1445 def _parse(text):
1418 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1446 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1419 if pos != len(text):
1447 if pos != len(text):
1420 raise error.ParseError(b'invalid token', pos)
1448 raise error.ParseError(b'invalid token', pos)
1421 return tree
1449 return tree
1422
1450
1423
1451
1424 def _parsedrev(symbol):
1452 def _parsedrev(symbol):
1425 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1453 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1426 if symbol.startswith(b'D') and symbol[1:].isdigit():
1454 if symbol.startswith(b'D') and symbol[1:].isdigit():
1427 return int(symbol[1:])
1455 return int(symbol[1:])
1428 if symbol.isdigit():
1456 if symbol.isdigit():
1429 return int(symbol)
1457 return int(symbol)
1430
1458
1431
1459
1432 def _prefetchdrevs(tree):
1460 def _prefetchdrevs(tree):
1433 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1461 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1434 drevs = set()
1462 drevs = set()
1435 ancestordrevs = set()
1463 ancestordrevs = set()
1436 op = tree[0]
1464 op = tree[0]
1437 if op == b'symbol':
1465 if op == b'symbol':
1438 r = _parsedrev(tree[1])
1466 r = _parsedrev(tree[1])
1439 if r:
1467 if r:
1440 drevs.add(r)
1468 drevs.add(r)
1441 elif op == b'ancestors':
1469 elif op == b'ancestors':
1442 r, a = _prefetchdrevs(tree[1])
1470 r, a = _prefetchdrevs(tree[1])
1443 drevs.update(r)
1471 drevs.update(r)
1444 ancestordrevs.update(r)
1472 ancestordrevs.update(r)
1445 ancestordrevs.update(a)
1473 ancestordrevs.update(a)
1446 else:
1474 else:
1447 for t in tree[1:]:
1475 for t in tree[1:]:
1448 r, a = _prefetchdrevs(t)
1476 r, a = _prefetchdrevs(t)
1449 drevs.update(r)
1477 drevs.update(r)
1450 ancestordrevs.update(a)
1478 ancestordrevs.update(a)
1451 return drevs, ancestordrevs
1479 return drevs, ancestordrevs
1452
1480
1453
1481
1454 def querydrev(ui, spec):
1482 def querydrev(ui, spec):
1455 """return a list of "Differential Revision" dicts
1483 """return a list of "Differential Revision" dicts
1456
1484
1457 spec is a string using a simple query language, see docstring in phabread
1485 spec is a string using a simple query language, see docstring in phabread
1458 for details.
1486 for details.
1459
1487
1460 A "Differential Revision dict" looks like:
1488 A "Differential Revision dict" looks like:
1461
1489
1462 {
1490 {
1463 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1491 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1464 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1492 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1465 "auxiliary": {
1493 "auxiliary": {
1466 "phabricator:depends-on": [
1494 "phabricator:depends-on": [
1467 "PHID-DREV-gbapp366kutjebt7agcd"
1495 "PHID-DREV-gbapp366kutjebt7agcd"
1468 ]
1496 ]
1469 "phabricator:projects": [],
1497 "phabricator:projects": [],
1470 },
1498 },
1471 "branch": "default",
1499 "branch": "default",
1472 "ccs": [],
1500 "ccs": [],
1473 "commits": [],
1501 "commits": [],
1474 "dateCreated": "1499181406",
1502 "dateCreated": "1499181406",
1475 "dateModified": "1499182103",
1503 "dateModified": "1499182103",
1476 "diffs": [
1504 "diffs": [
1477 "3",
1505 "3",
1478 "4",
1506 "4",
1479 ],
1507 ],
1480 "hashes": [],
1508 "hashes": [],
1481 "id": "2",
1509 "id": "2",
1482 "lineCount": "2",
1510 "lineCount": "2",
1483 "phid": "PHID-DREV-672qvysjcczopag46qty",
1511 "phid": "PHID-DREV-672qvysjcczopag46qty",
1484 "properties": {},
1512 "properties": {},
1485 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1513 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1486 "reviewers": [],
1514 "reviewers": [],
1487 "sourcePath": null
1515 "sourcePath": null
1488 "status": "0",
1516 "status": "0",
1489 "statusName": "Needs Review",
1517 "statusName": "Needs Review",
1490 "summary": "",
1518 "summary": "",
1491 "testPlan": "",
1519 "testPlan": "",
1492 "title": "example",
1520 "title": "example",
1493 "uri": "https://phab.example.com/D2",
1521 "uri": "https://phab.example.com/D2",
1494 }
1522 }
1495 """
1523 """
1496 # TODO: replace differential.query and differential.querydiffs with
1524 # TODO: replace differential.query and differential.querydiffs with
1497 # differential.diff.search because the former (and their output) are
1525 # differential.diff.search because the former (and their output) are
1498 # frozen, and planned to be deprecated and removed.
1526 # frozen, and planned to be deprecated and removed.
1499
1527
1500 def fetch(params):
1528 def fetch(params):
1501 """params -> single drev or None"""
1529 """params -> single drev or None"""
1502 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1530 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1503 if key in prefetched:
1531 if key in prefetched:
1504 return prefetched[key]
1532 return prefetched[key]
1505 drevs = callconduit(ui, b'differential.query', params)
1533 drevs = callconduit(ui, b'differential.query', params)
1506 # Fill prefetched with the result
1534 # Fill prefetched with the result
1507 for drev in drevs:
1535 for drev in drevs:
1508 prefetched[drev[b'phid']] = drev
1536 prefetched[drev[b'phid']] = drev
1509 prefetched[int(drev[b'id'])] = drev
1537 prefetched[int(drev[b'id'])] = drev
1510 if key not in prefetched:
1538 if key not in prefetched:
1511 raise error.Abort(
1539 raise error.Abort(
1512 _(b'cannot get Differential Revision %r') % params
1540 _(b'cannot get Differential Revision %r') % params
1513 )
1541 )
1514 return prefetched[key]
1542 return prefetched[key]
1515
1543
1516 def getstack(topdrevids):
1544 def getstack(topdrevids):
1517 """given a top, get a stack from the bottom, [id] -> [id]"""
1545 """given a top, get a stack from the bottom, [id] -> [id]"""
1518 visited = set()
1546 visited = set()
1519 result = []
1547 result = []
1520 queue = [{b'ids': [i]} for i in topdrevids]
1548 queue = [{b'ids': [i]} for i in topdrevids]
1521 while queue:
1549 while queue:
1522 params = queue.pop()
1550 params = queue.pop()
1523 drev = fetch(params)
1551 drev = fetch(params)
1524 if drev[b'id'] in visited:
1552 if drev[b'id'] in visited:
1525 continue
1553 continue
1526 visited.add(drev[b'id'])
1554 visited.add(drev[b'id'])
1527 result.append(int(drev[b'id']))
1555 result.append(int(drev[b'id']))
1528 auxiliary = drev.get(b'auxiliary', {})
1556 auxiliary = drev.get(b'auxiliary', {})
1529 depends = auxiliary.get(b'phabricator:depends-on', [])
1557 depends = auxiliary.get(b'phabricator:depends-on', [])
1530 for phid in depends:
1558 for phid in depends:
1531 queue.append({b'phids': [phid]})
1559 queue.append({b'phids': [phid]})
1532 result.reverse()
1560 result.reverse()
1533 return smartset.baseset(result)
1561 return smartset.baseset(result)
1534
1562
1535 # Initialize prefetch cache
1563 # Initialize prefetch cache
1536 prefetched = {} # {id or phid: drev}
1564 prefetched = {} # {id or phid: drev}
1537
1565
1538 tree = _parse(spec)
1566 tree = _parse(spec)
1539 drevs, ancestordrevs = _prefetchdrevs(tree)
1567 drevs, ancestordrevs = _prefetchdrevs(tree)
1540
1568
1541 # developer config: phabricator.batchsize
1569 # developer config: phabricator.batchsize
1542 batchsize = ui.configint(b'phabricator', b'batchsize')
1570 batchsize = ui.configint(b'phabricator', b'batchsize')
1543
1571
1544 # Prefetch Differential Revisions in batch
1572 # Prefetch Differential Revisions in batch
1545 tofetch = set(drevs)
1573 tofetch = set(drevs)
1546 for r in ancestordrevs:
1574 for r in ancestordrevs:
1547 tofetch.update(range(max(1, r - batchsize), r + 1))
1575 tofetch.update(range(max(1, r - batchsize), r + 1))
1548 if drevs:
1576 if drevs:
1549 fetch({b'ids': list(tofetch)})
1577 fetch({b'ids': list(tofetch)})
1550 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1578 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1551
1579
1552 # Walk through the tree, return smartsets
1580 # Walk through the tree, return smartsets
1553 def walk(tree):
1581 def walk(tree):
1554 op = tree[0]
1582 op = tree[0]
1555 if op == b'symbol':
1583 if op == b'symbol':
1556 drev = _parsedrev(tree[1])
1584 drev = _parsedrev(tree[1])
1557 if drev:
1585 if drev:
1558 return smartset.baseset([drev])
1586 return smartset.baseset([drev])
1559 elif tree[1] in _knownstatusnames:
1587 elif tree[1] in _knownstatusnames:
1560 drevs = [
1588 drevs = [
1561 r
1589 r
1562 for r in validids
1590 for r in validids
1563 if _getstatusname(prefetched[r]) == tree[1]
1591 if _getstatusname(prefetched[r]) == tree[1]
1564 ]
1592 ]
1565 return smartset.baseset(drevs)
1593 return smartset.baseset(drevs)
1566 else:
1594 else:
1567 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1595 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1568 elif op in {b'and_', b'add', b'sub'}:
1596 elif op in {b'and_', b'add', b'sub'}:
1569 assert len(tree) == 3
1597 assert len(tree) == 3
1570 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1598 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1571 elif op == b'group':
1599 elif op == b'group':
1572 return walk(tree[1])
1600 return walk(tree[1])
1573 elif op == b'ancestors':
1601 elif op == b'ancestors':
1574 return getstack(walk(tree[1]))
1602 return getstack(walk(tree[1]))
1575 else:
1603 else:
1576 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1604 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1577
1605
1578 return [prefetched[r] for r in walk(tree)]
1606 return [prefetched[r] for r in walk(tree)]
1579
1607
1580
1608
1581 def getdescfromdrev(drev):
1609 def getdescfromdrev(drev):
1582 """get description (commit message) from "Differential Revision"
1610 """get description (commit message) from "Differential Revision"
1583
1611
1584 This is similar to differential.getcommitmessage API. But we only care
1612 This is similar to differential.getcommitmessage API. But we only care
1585 about limited fields: title, summary, test plan, and URL.
1613 about limited fields: title, summary, test plan, and URL.
1586 """
1614 """
1587 title = drev[b'title']
1615 title = drev[b'title']
1588 summary = drev[b'summary'].rstrip()
1616 summary = drev[b'summary'].rstrip()
1589 testplan = drev[b'testPlan'].rstrip()
1617 testplan = drev[b'testPlan'].rstrip()
1590 if testplan:
1618 if testplan:
1591 testplan = b'Test Plan:\n%s' % testplan
1619 testplan = b'Test Plan:\n%s' % testplan
1592 uri = b'Differential Revision: %s' % drev[b'uri']
1620 uri = b'Differential Revision: %s' % drev[b'uri']
1593 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1621 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1594
1622
1595
1623
1624 def get_amended_desc(drev, ctx, folded):
1625 """similar to ``getdescfromdrev``, but supports a folded series of commits
1626
1627 This is used when determining if an individual commit needs to have its
1628 message amended after posting it for review. The determination is made for
1629 each individual commit, even when they were folded into one review.
1630 """
1631 if not folded:
1632 return getdescfromdrev(drev)
1633
1634 uri = b'Differential Revision: %s' % drev[b'uri']
1635
1636 # Since the commit messages were combined when posting multiple commits
1637 # with --fold, the fields can't be read from Phabricator here, or *all*
1638 # affected local revisions will end up with the same commit message after
1639 # the URI is amended in. Append in the DREV line, or update it if it
1640 # exists. At worst, this means commit message or test plan updates on
1641 # Phabricator aren't propagated back to the repository, but that seems
1642 # reasonable for the case where local commits are effectively combined
1643 # in Phabricator.
1644 m = _differentialrevisiondescre.search(ctx.description())
1645 if not m:
1646 return b'\n\n'.join([ctx.description(), uri])
1647
1648 return _differentialrevisiondescre.sub(uri, ctx.description())
1649
1650
1596 def getdiffmeta(diff):
1651 def getdiffmeta(diff):
1597 """get commit metadata (date, node, user, p1) from a diff object
1652 """get commit metadata (date, node, user, p1) from a diff object
1598
1653
1599 The metadata could be "hg:meta", sent by phabsend, like:
1654 The metadata could be "hg:meta", sent by phabsend, like:
1600
1655
1601 "properties": {
1656 "properties": {
1602 "hg:meta": {
1657 "hg:meta": {
1603 "branch": "default",
1658 "branch": "default",
1604 "date": "1499571514 25200",
1659 "date": "1499571514 25200",
1605 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1660 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1606 "user": "Foo Bar <foo@example.com>",
1661 "user": "Foo Bar <foo@example.com>",
1607 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1662 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1608 }
1663 }
1609 }
1664 }
1610
1665
1611 Or converted from "local:commits", sent by "arc", like:
1666 Or converted from "local:commits", sent by "arc", like:
1612
1667
1613 "properties": {
1668 "properties": {
1614 "local:commits": {
1669 "local:commits": {
1615 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1670 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1616 "author": "Foo Bar",
1671 "author": "Foo Bar",
1617 "authorEmail": "foo@example.com"
1672 "authorEmail": "foo@example.com"
1618 "branch": "default",
1673 "branch": "default",
1619 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1674 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1620 "local": "1000",
1675 "local": "1000",
1621 "message": "...",
1676 "message": "...",
1622 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1677 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1623 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1678 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1624 "summary": "...",
1679 "summary": "...",
1625 "tag": "",
1680 "tag": "",
1626 "time": 1499546314,
1681 "time": 1499546314,
1627 }
1682 }
1628 }
1683 }
1629 }
1684 }
1630
1685
1631 Note: metadata extracted from "local:commits" will lose time zone
1686 Note: metadata extracted from "local:commits" will lose time zone
1632 information.
1687 information.
1633 """
1688 """
1634 props = diff.get(b'properties') or {}
1689 props = diff.get(b'properties') or {}
1635 meta = props.get(b'hg:meta')
1690 meta = props.get(b'hg:meta')
1636 if not meta:
1691 if not meta:
1637 if props.get(b'local:commits'):
1692 if props.get(b'local:commits'):
1638 commit = sorted(props[b'local:commits'].values())[0]
1693 commit = sorted(props[b'local:commits'].values())[0]
1639 meta = {}
1694 meta = {}
1640 if b'author' in commit and b'authorEmail' in commit:
1695 if b'author' in commit and b'authorEmail' in commit:
1641 meta[b'user'] = b'%s <%s>' % (
1696 meta[b'user'] = b'%s <%s>' % (
1642 commit[b'author'],
1697 commit[b'author'],
1643 commit[b'authorEmail'],
1698 commit[b'authorEmail'],
1644 )
1699 )
1645 if b'time' in commit:
1700 if b'time' in commit:
1646 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1701 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1647 if b'branch' in commit:
1702 if b'branch' in commit:
1648 meta[b'branch'] = commit[b'branch']
1703 meta[b'branch'] = commit[b'branch']
1649 node = commit.get(b'commit', commit.get(b'rev'))
1704 node = commit.get(b'commit', commit.get(b'rev'))
1650 if node:
1705 if node:
1651 meta[b'node'] = node
1706 meta[b'node'] = node
1652 if len(commit.get(b'parents', ())) >= 1:
1707 if len(commit.get(b'parents', ())) >= 1:
1653 meta[b'parent'] = commit[b'parents'][0]
1708 meta[b'parent'] = commit[b'parents'][0]
1654 else:
1709 else:
1655 meta = {}
1710 meta = {}
1656 if b'date' not in meta and b'dateCreated' in diff:
1711 if b'date' not in meta and b'dateCreated' in diff:
1657 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1712 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1658 if b'branch' not in meta and diff.get(b'branch'):
1713 if b'branch' not in meta and diff.get(b'branch'):
1659 meta[b'branch'] = diff[b'branch']
1714 meta[b'branch'] = diff[b'branch']
1660 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1715 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1661 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1716 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1662 return meta
1717 return meta
1663
1718
1664
1719
1665 def _getdrevs(ui, stack, specs):
1720 def _getdrevs(ui, stack, specs):
1666 """convert user supplied DREVSPECs into "Differential Revision" dicts
1721 """convert user supplied DREVSPECs into "Differential Revision" dicts
1667
1722
1668 See ``hg help phabread`` for how to specify each DREVSPEC.
1723 See ``hg help phabread`` for how to specify each DREVSPEC.
1669 """
1724 """
1670 if len(specs) > 0:
1725 if len(specs) > 0:
1671
1726
1672 def _formatspec(s):
1727 def _formatspec(s):
1673 if stack:
1728 if stack:
1674 s = b':(%s)' % s
1729 s = b':(%s)' % s
1675 return b'(%s)' % s
1730 return b'(%s)' % s
1676
1731
1677 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1732 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1678
1733
1679 drevs = querydrev(ui, spec)
1734 drevs = querydrev(ui, spec)
1680 if drevs:
1735 if drevs:
1681 return drevs
1736 return drevs
1682
1737
1683 raise error.Abort(_(b"empty DREVSPEC set"))
1738 raise error.Abort(_(b"empty DREVSPEC set"))
1684
1739
1685
1740
1686 def readpatch(ui, drevs, write):
1741 def readpatch(ui, drevs, write):
1687 """generate plain-text patch readable by 'hg import'
1742 """generate plain-text patch readable by 'hg import'
1688
1743
1689 write takes a list of (DREV, bytes), where DREV is the differential number
1744 write takes a list of (DREV, bytes), where DREV is the differential number
1690 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1745 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1691 to be imported. drevs is what "querydrev" returns, results of
1746 to be imported. drevs is what "querydrev" returns, results of
1692 "differential.query".
1747 "differential.query".
1693 """
1748 """
1694 # Prefetch hg:meta property for all diffs
1749 # Prefetch hg:meta property for all diffs
1695 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1750 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1696 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1751 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1697
1752
1698 patches = []
1753 patches = []
1699
1754
1700 # Generate patch for each drev
1755 # Generate patch for each drev
1701 for drev in drevs:
1756 for drev in drevs:
1702 ui.note(_(b'reading D%s\n') % drev[b'id'])
1757 ui.note(_(b'reading D%s\n') % drev[b'id'])
1703
1758
1704 diffid = max(int(v) for v in drev[b'diffs'])
1759 diffid = max(int(v) for v in drev[b'diffs'])
1705 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1760 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1706 desc = getdescfromdrev(drev)
1761 desc = getdescfromdrev(drev)
1707 header = b'# HG changeset patch\n'
1762 header = b'# HG changeset patch\n'
1708
1763
1709 # Try to preserve metadata from hg:meta property. Write hg patch
1764 # Try to preserve metadata from hg:meta property. Write hg patch
1710 # headers that can be read by the "import" command. See patchheadermap
1765 # headers that can be read by the "import" command. See patchheadermap
1711 # and extract in mercurial/patch.py for supported headers.
1766 # and extract in mercurial/patch.py for supported headers.
1712 meta = getdiffmeta(diffs[b'%d' % diffid])
1767 meta = getdiffmeta(diffs[b'%d' % diffid])
1713 for k in _metanamemap.keys():
1768 for k in _metanamemap.keys():
1714 if k in meta:
1769 if k in meta:
1715 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1770 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1716
1771
1717 content = b'%s%s\n%s' % (header, desc, body)
1772 content = b'%s%s\n%s' % (header, desc, body)
1718 patches.append((drev[b'id'], content))
1773 patches.append((drev[b'id'], content))
1719
1774
1720 # Write patches to the supplied callback
1775 # Write patches to the supplied callback
1721 write(patches)
1776 write(patches)
1722
1777
1723
1778
1724 @vcrcommand(
1779 @vcrcommand(
1725 b'phabread',
1780 b'phabread',
1726 [(b'', b'stack', False, _(b'read dependencies'))],
1781 [(b'', b'stack', False, _(b'read dependencies'))],
1727 _(b'DREVSPEC... [OPTIONS]'),
1782 _(b'DREVSPEC... [OPTIONS]'),
1728 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1783 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1729 optionalrepo=True,
1784 optionalrepo=True,
1730 )
1785 )
1731 def phabread(ui, repo, *specs, **opts):
1786 def phabread(ui, repo, *specs, **opts):
1732 """print patches from Phabricator suitable for importing
1787 """print patches from Phabricator suitable for importing
1733
1788
1734 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1789 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1735 the number ``123``. It could also have common operators like ``+``, ``-``,
1790 the number ``123``. It could also have common operators like ``+``, ``-``,
1736 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1791 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1737 select a stack. If multiple DREVSPEC values are given, the result is the
1792 select a stack. If multiple DREVSPEC values are given, the result is the
1738 union of each individually evaluated value. No attempt is currently made
1793 union of each individually evaluated value. No attempt is currently made
1739 to reorder the values to run from parent to child.
1794 to reorder the values to run from parent to child.
1740
1795
1741 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1796 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1742 could be used to filter patches by status. For performance reason, they
1797 could be used to filter patches by status. For performance reason, they
1743 only represent a subset of non-status selections and cannot be used alone.
1798 only represent a subset of non-status selections and cannot be used alone.
1744
1799
1745 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1800 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1746 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1801 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1747 stack up to D9.
1802 stack up to D9.
1748
1803
1749 If --stack is given, follow dependencies information and read all patches.
1804 If --stack is given, follow dependencies information and read all patches.
1750 It is equivalent to the ``:`` operator.
1805 It is equivalent to the ``:`` operator.
1751 """
1806 """
1752 opts = pycompat.byteskwargs(opts)
1807 opts = pycompat.byteskwargs(opts)
1753 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1808 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1754
1809
1755 def _write(patches):
1810 def _write(patches):
1756 for drev, content in patches:
1811 for drev, content in patches:
1757 ui.write(content)
1812 ui.write(content)
1758
1813
1759 readpatch(ui, drevs, _write)
1814 readpatch(ui, drevs, _write)
1760
1815
1761
1816
1762 @vcrcommand(
1817 @vcrcommand(
1763 b'phabimport',
1818 b'phabimport',
1764 [(b'', b'stack', False, _(b'import dependencies as well'))],
1819 [(b'', b'stack', False, _(b'import dependencies as well'))],
1765 _(b'DREVSPEC... [OPTIONS]'),
1820 _(b'DREVSPEC... [OPTIONS]'),
1766 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1821 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1767 )
1822 )
1768 def phabimport(ui, repo, *specs, **opts):
1823 def phabimport(ui, repo, *specs, **opts):
1769 """import patches from Phabricator for the specified Differential Revisions
1824 """import patches from Phabricator for the specified Differential Revisions
1770
1825
1771 The patches are read and applied starting at the parent of the working
1826 The patches are read and applied starting at the parent of the working
1772 directory.
1827 directory.
1773
1828
1774 See ``hg help phabread`` for how to specify DREVSPEC.
1829 See ``hg help phabread`` for how to specify DREVSPEC.
1775 """
1830 """
1776 opts = pycompat.byteskwargs(opts)
1831 opts = pycompat.byteskwargs(opts)
1777
1832
1778 # --bypass avoids losing exec and symlink bits when importing on Windows,
1833 # --bypass avoids losing exec and symlink bits when importing on Windows,
1779 # and allows importing with a dirty wdir. It also aborts instead of leaving
1834 # and allows importing with a dirty wdir. It also aborts instead of leaving
1780 # rejects.
1835 # rejects.
1781 opts[b'bypass'] = True
1836 opts[b'bypass'] = True
1782
1837
1783 # Mandatory default values, synced with commands.import
1838 # Mandatory default values, synced with commands.import
1784 opts[b'strip'] = 1
1839 opts[b'strip'] = 1
1785 opts[b'prefix'] = b''
1840 opts[b'prefix'] = b''
1786 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1841 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1787 opts[b'obsolete'] = False
1842 opts[b'obsolete'] = False
1788
1843
1789 if ui.configbool(b'phabimport', b'secret'):
1844 if ui.configbool(b'phabimport', b'secret'):
1790 opts[b'secret'] = True
1845 opts[b'secret'] = True
1791 if ui.configbool(b'phabimport', b'obsolete'):
1846 if ui.configbool(b'phabimport', b'obsolete'):
1792 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1847 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1793
1848
1794 def _write(patches):
1849 def _write(patches):
1795 parents = repo[None].parents()
1850 parents = repo[None].parents()
1796
1851
1797 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1852 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1798 for drev, contents in patches:
1853 for drev, contents in patches:
1799 ui.status(_(b'applying patch from D%s\n') % drev)
1854 ui.status(_(b'applying patch from D%s\n') % drev)
1800
1855
1801 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1856 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1802 msg, node, rej = cmdutil.tryimportone(
1857 msg, node, rej = cmdutil.tryimportone(
1803 ui,
1858 ui,
1804 repo,
1859 repo,
1805 patchdata,
1860 patchdata,
1806 parents,
1861 parents,
1807 opts,
1862 opts,
1808 [],
1863 [],
1809 None, # Never update wdir to another revision
1864 None, # Never update wdir to another revision
1810 )
1865 )
1811
1866
1812 if not node:
1867 if not node:
1813 raise error.Abort(_(b'D%s: no diffs found') % drev)
1868 raise error.Abort(_(b'D%s: no diffs found') % drev)
1814
1869
1815 ui.note(msg + b'\n')
1870 ui.note(msg + b'\n')
1816 parents = [repo[node]]
1871 parents = [repo[node]]
1817
1872
1818 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1873 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1819
1874
1820 readpatch(repo.ui, drevs, _write)
1875 readpatch(repo.ui, drevs, _write)
1821
1876
1822
1877
1823 @vcrcommand(
1878 @vcrcommand(
1824 b'phabupdate',
1879 b'phabupdate',
1825 [
1880 [
1826 (b'', b'accept', False, _(b'accept revisions')),
1881 (b'', b'accept', False, _(b'accept revisions')),
1827 (b'', b'reject', False, _(b'reject revisions')),
1882 (b'', b'reject', False, _(b'reject revisions')),
1828 (b'', b'abandon', False, _(b'abandon revisions')),
1883 (b'', b'abandon', False, _(b'abandon revisions')),
1829 (b'', b'reclaim', False, _(b'reclaim revisions')),
1884 (b'', b'reclaim', False, _(b'reclaim revisions')),
1830 (b'm', b'comment', b'', _(b'comment on the last revision')),
1885 (b'm', b'comment', b'', _(b'comment on the last revision')),
1831 ],
1886 ],
1832 _(b'DREVSPEC... [OPTIONS]'),
1887 _(b'DREVSPEC... [OPTIONS]'),
1833 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1888 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1834 optionalrepo=True,
1889 optionalrepo=True,
1835 )
1890 )
1836 def phabupdate(ui, repo, *specs, **opts):
1891 def phabupdate(ui, repo, *specs, **opts):
1837 """update Differential Revision in batch
1892 """update Differential Revision in batch
1838
1893
1839 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1894 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1840 """
1895 """
1841 opts = pycompat.byteskwargs(opts)
1896 opts = pycompat.byteskwargs(opts)
1842 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1897 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1843 if len(flags) > 1:
1898 if len(flags) > 1:
1844 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1899 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1845
1900
1846 actions = []
1901 actions = []
1847 for f in flags:
1902 for f in flags:
1848 actions.append({b'type': f, b'value': True})
1903 actions.append({b'type': f, b'value': True})
1849
1904
1850 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1905 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1851 for i, drev in enumerate(drevs):
1906 for i, drev in enumerate(drevs):
1852 if i + 1 == len(drevs) and opts.get(b'comment'):
1907 if i + 1 == len(drevs) and opts.get(b'comment'):
1853 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1908 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1854 if actions:
1909 if actions:
1855 params = {
1910 params = {
1856 b'objectIdentifier': drev[b'phid'],
1911 b'objectIdentifier': drev[b'phid'],
1857 b'transactions': actions,
1912 b'transactions': actions,
1858 }
1913 }
1859 callconduit(ui, b'differential.revision.edit', params)
1914 callconduit(ui, b'differential.revision.edit', params)
1860
1915
1861
1916
1862 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1917 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1863 def template_review(context, mapping):
1918 def template_review(context, mapping):
1864 """:phabreview: Object describing the review for this changeset.
1919 """:phabreview: Object describing the review for this changeset.
1865 Has attributes `url` and `id`.
1920 Has attributes `url` and `id`.
1866 """
1921 """
1867 ctx = context.resource(mapping, b'ctx')
1922 ctx = context.resource(mapping, b'ctx')
1868 m = _differentialrevisiondescre.search(ctx.description())
1923 m = _differentialrevisiondescre.search(ctx.description())
1869 if m:
1924 if m:
1870 return templateutil.hybriddict(
1925 return templateutil.hybriddict(
1871 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1926 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1872 )
1927 )
1873 else:
1928 else:
1874 tags = ctx.repo().nodetags(ctx.node())
1929 tags = ctx.repo().nodetags(ctx.node())
1875 for t in tags:
1930 for t in tags:
1876 if _differentialrevisiontagre.match(t):
1931 if _differentialrevisiontagre.match(t):
1877 url = ctx.repo().ui.config(b'phabricator', b'url')
1932 url = ctx.repo().ui.config(b'phabricator', b'url')
1878 if not url.endswith(b'/'):
1933 if not url.endswith(b'/'):
1879 url += b'/'
1934 url += b'/'
1880 url += t
1935 url += t
1881
1936
1882 return templateutil.hybriddict({b'url': url, b'id': t,})
1937 return templateutil.hybriddict({b'url': url, b'id': t,})
1883 return None
1938 return None
1884
1939
1885
1940
1886 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1941 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1887 def template_status(context, mapping):
1942 def template_status(context, mapping):
1888 """:phabstatus: String. Status of Phabricator differential.
1943 """:phabstatus: String. Status of Phabricator differential.
1889 """
1944 """
1890 ctx = context.resource(mapping, b'ctx')
1945 ctx = context.resource(mapping, b'ctx')
1891 repo = context.resource(mapping, b'repo')
1946 repo = context.resource(mapping, b'repo')
1892 ui = context.resource(mapping, b'ui')
1947 ui = context.resource(mapping, b'ui')
1893
1948
1894 rev = ctx.rev()
1949 rev = ctx.rev()
1895 try:
1950 try:
1896 drevid = getdrevmap(repo, [rev])[rev]
1951 drevid = getdrevmap(repo, [rev])[rev]
1897 except KeyError:
1952 except KeyError:
1898 return None
1953 return None
1899 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1954 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1900 for drev in drevs:
1955 for drev in drevs:
1901 if int(drev[b'id']) == drevid:
1956 if int(drev[b'id']) == drevid:
1902 return templateutil.hybriddict(
1957 return templateutil.hybriddict(
1903 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1958 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1904 )
1959 )
1905 return None
1960 return None
1906
1961
1907
1962
1908 @show.showview(b'phabstatus', csettopic=b'work')
1963 @show.showview(b'phabstatus', csettopic=b'work')
1909 def phabstatusshowview(ui, repo, displayer):
1964 def phabstatusshowview(ui, repo, displayer):
1910 """Phabricator differiential status"""
1965 """Phabricator differiential status"""
1911 revs = repo.revs('sort(_underway(), topo)')
1966 revs = repo.revs('sort(_underway(), topo)')
1912 drevmap = getdrevmap(repo, revs)
1967 drevmap = getdrevmap(repo, revs)
1913 unknownrevs, drevids, revsbydrevid = [], set(), {}
1968 unknownrevs, drevids, revsbydrevid = [], set(), {}
1914 for rev, drevid in pycompat.iteritems(drevmap):
1969 for rev, drevid in pycompat.iteritems(drevmap):
1915 if drevid is not None:
1970 if drevid is not None:
1916 drevids.add(drevid)
1971 drevids.add(drevid)
1917 revsbydrevid.setdefault(drevid, set()).add(rev)
1972 revsbydrevid.setdefault(drevid, set()).add(rev)
1918 else:
1973 else:
1919 unknownrevs.append(rev)
1974 unknownrevs.append(rev)
1920
1975
1921 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1976 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1922 drevsbyrev = {}
1977 drevsbyrev = {}
1923 for drev in drevs:
1978 for drev in drevs:
1924 for rev in revsbydrevid[int(drev[b'id'])]:
1979 for rev in revsbydrevid[int(drev[b'id'])]:
1925 drevsbyrev[rev] = drev
1980 drevsbyrev[rev] = drev
1926
1981
1927 def phabstatus(ctx):
1982 def phabstatus(ctx):
1928 drev = drevsbyrev[ctx.rev()]
1983 drev = drevsbyrev[ctx.rev()]
1929 status = ui.label(
1984 status = ui.label(
1930 b'%(statusName)s' % drev,
1985 b'%(statusName)s' % drev,
1931 b'phabricator.status.%s' % _getstatusname(drev),
1986 b'phabricator.status.%s' % _getstatusname(drev),
1932 )
1987 )
1933 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1988 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1934
1989
1935 revs -= smartset.baseset(unknownrevs)
1990 revs -= smartset.baseset(unknownrevs)
1936 revdag = graphmod.dagwalker(repo, revs)
1991 revdag = graphmod.dagwalker(repo, revs)
1937
1992
1938 ui.setconfig(b'experimental', b'graphshorten', True)
1993 ui.setconfig(b'experimental', b'graphshorten', True)
1939 displayer._exthook = phabstatus
1994 displayer._exthook = phabstatus
1940 nodelen = show.longestshortest(repo, revs)
1995 nodelen = show.longestshortest(repo, revs)
1941 logcmdutil.displaygraph(
1996 logcmdutil.displaygraph(
1942 ui,
1997 ui,
1943 repo,
1998 repo,
1944 revdag,
1999 revdag,
1945 displayer,
2000 displayer,
1946 graphmod.asciiedges,
2001 graphmod.asciiedges,
1947 props={b'nodelen': nodelen},
2002 props={b'nodelen': nodelen},
1948 )
2003 )
General Comments 0
You need to be logged in to leave comments. Login now