##// END OF EJS Templates
phabricator: remove *-argument from _getdrevs()...
Yuya Nishihara -
r45076:2d63a891 default
parent child Browse files
Show More
@@ -1,1918 +1,1918 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 localrepo,
69 localrepo,
70 logcmdutil,
70 logcmdutil,
71 match,
71 match,
72 mdiff,
72 mdiff,
73 obsutil,
73 obsutil,
74 parser,
74 parser,
75 patch,
75 patch,
76 phases,
76 phases,
77 pycompat,
77 pycompat,
78 scmutil,
78 scmutil,
79 smartset,
79 smartset,
80 tags,
80 tags,
81 templatefilters,
81 templatefilters,
82 templateutil,
82 templateutil,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 )
85 )
86 from mercurial.utils import (
86 from mercurial.utils import (
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90 from . import show
90 from . import show
91
91
92
92
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
96 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
97 testedwith = b'ships-with-hg-core'
98
98
99 eh = exthelper.exthelper()
99 eh = exthelper.exthelper()
100
100
101 cmdtable = eh.cmdtable
101 cmdtable = eh.cmdtable
102 command = eh.command
102 command = eh.command
103 configtable = eh.configtable
103 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
104 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
105 uisetup = eh.finaluisetup
106
106
107 # developer config: phabricator.batchsize
107 # developer config: phabricator.batchsize
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
109 b'phabricator', b'batchsize', default=12,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'callsign', default=None,
112 b'phabricator', b'callsign', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
115 b'phabricator', b'curlcmd', default=None,
116 )
116 )
117 # developer config: phabricator.repophid
117 # developer config: phabricator.repophid
118 eh.configitem(
118 eh.configitem(
119 b'phabricator', b'repophid', default=None,
119 b'phabricator', b'repophid', default=None,
120 )
120 )
121 eh.configitem(
121 eh.configitem(
122 b'phabricator', b'url', default=None,
122 b'phabricator', b'url', default=None,
123 )
123 )
124 eh.configitem(
124 eh.configitem(
125 b'phabsend', b'confirm', default=False,
125 b'phabsend', b'confirm', default=False,
126 )
126 )
127 eh.configitem(
127 eh.configitem(
128 b'phabimport', b'secret', default=False,
128 b'phabimport', b'secret', default=False,
129 )
129 )
130 eh.configitem(
130 eh.configitem(
131 b'phabimport', b'obsolete', default=False,
131 b'phabimport', b'obsolete', default=False,
132 )
132 )
133
133
134 colortable = {
134 colortable = {
135 b'phabricator.action.created': b'green',
135 b'phabricator.action.created': b'green',
136 b'phabricator.action.skipped': b'magenta',
136 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.updated': b'magenta',
137 b'phabricator.action.updated': b'magenta',
138 b'phabricator.desc': b'',
138 b'phabricator.desc': b'',
139 b'phabricator.drev': b'bold',
139 b'phabricator.drev': b'bold',
140 b'phabricator.node': b'',
140 b'phabricator.node': b'',
141 b'phabricator.status.abandoned': b'magenta dim',
141 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.accepted': b'green bold',
142 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.closed': b'green',
143 b'phabricator.status.closed': b'green',
144 b'phabricator.status.needsreview': b'yellow',
144 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsrevision': b'red',
145 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.changesplanned': b'red',
146 b'phabricator.status.changesplanned': b'red',
147 }
147 }
148
148
149 _VCR_FLAGS = [
149 _VCR_FLAGS = [
150 (
150 (
151 b'',
151 b'',
152 b'test-vcr',
152 b'test-vcr',
153 b'',
153 b'',
154 _(
154 _(
155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b', otherwise will mock all http requests using the specified vcr file.'
156 b', otherwise will mock all http requests using the specified vcr file.'
157 b' (ADVANCED)'
157 b' (ADVANCED)'
158 ),
158 ),
159 ),
159 ),
160 ]
160 ]
161
161
162
162
163 @eh.wrapfunction(localrepo, "loadhgrc")
163 @eh.wrapfunction(localrepo, "loadhgrc")
164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 """Load ``.arcconfig`` content into a ui instance on repository open.
165 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """
166 """
167 result = False
167 result = False
168 arcconfig = {}
168 arcconfig = {}
169
169
170 try:
170 try:
171 # json.loads only accepts bytes from 3.6+
171 # json.loads only accepts bytes from 3.6+
172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 # json.loads only returns unicode strings
173 # json.loads only returns unicode strings
174 arcconfig = pycompat.rapply(
174 arcconfig = pycompat.rapply(
175 lambda x: encoding.unitolocal(x)
175 lambda x: encoding.unitolocal(x)
176 if isinstance(x, pycompat.unicode)
176 if isinstance(x, pycompat.unicode)
177 else x,
177 else x,
178 pycompat.json_loads(rawparams),
178 pycompat.json_loads(rawparams),
179 )
179 )
180
180
181 result = True
181 result = True
182 except ValueError:
182 except ValueError:
183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 except IOError:
184 except IOError:
185 pass
185 pass
186
186
187 cfg = util.sortdict()
187 cfg = util.sortdict()
188
188
189 if b"repository.callsign" in arcconfig:
189 if b"repository.callsign" in arcconfig:
190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191
191
192 if b"phabricator.uri" in arcconfig:
192 if b"phabricator.uri" in arcconfig:
193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194
194
195 if cfg:
195 if cfg:
196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197
197
198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199
199
200
200
201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 fullflags = flags + _VCR_FLAGS
202 fullflags = flags + _VCR_FLAGS
203
203
204 def hgmatcher(r1, r2):
204 def hgmatcher(r1, r2):
205 if r1.uri != r2.uri or r1.method != r2.method:
205 if r1.uri != r2.uri or r1.method != r2.method:
206 return False
206 return False
207 r1params = util.urlreq.parseqs(r1.body)
207 r1params = util.urlreq.parseqs(r1.body)
208 r2params = util.urlreq.parseqs(r2.body)
208 r2params = util.urlreq.parseqs(r2.body)
209 for key in r1params:
209 for key in r1params:
210 if key not in r2params:
210 if key not in r2params:
211 return False
211 return False
212 value = r1params[key][0]
212 value = r1params[key][0]
213 # we want to compare json payloads without worrying about ordering
213 # we want to compare json payloads without worrying about ordering
214 if value.startswith(b'{') and value.endswith(b'}'):
214 if value.startswith(b'{') and value.endswith(b'}'):
215 r1json = pycompat.json_loads(value)
215 r1json = pycompat.json_loads(value)
216 r2json = pycompat.json_loads(r2params[key][0])
216 r2json = pycompat.json_loads(r2params[key][0])
217 if r1json != r2json:
217 if r1json != r2json:
218 return False
218 return False
219 elif r2params[key][0] != value:
219 elif r2params[key][0] != value:
220 return False
220 return False
221 return True
221 return True
222
222
223 def sanitiserequest(request):
223 def sanitiserequest(request):
224 request.body = re.sub(
224 request.body = re.sub(
225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 )
226 )
227 return request
227 return request
228
228
229 def sanitiseresponse(response):
229 def sanitiseresponse(response):
230 if 'set-cookie' in response['headers']:
230 if 'set-cookie' in response['headers']:
231 del response['headers']['set-cookie']
231 del response['headers']['set-cookie']
232 return response
232 return response
233
233
234 def decorate(fn):
234 def decorate(fn):
235 def inner(*args, **kwargs):
235 def inner(*args, **kwargs):
236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 if cassette:
237 if cassette:
238 import hgdemandimport
238 import hgdemandimport
239
239
240 with hgdemandimport.deactivated():
240 with hgdemandimport.deactivated():
241 import vcr as vcrmod
241 import vcr as vcrmod
242 import vcr.stubs as stubs
242 import vcr.stubs as stubs
243
243
244 vcr = vcrmod.VCR(
244 vcr = vcrmod.VCR(
245 serializer='json',
245 serializer='json',
246 before_record_request=sanitiserequest,
246 before_record_request=sanitiserequest,
247 before_record_response=sanitiseresponse,
247 before_record_response=sanitiseresponse,
248 custom_patches=[
248 custom_patches=[
249 (
249 (
250 urlmod,
250 urlmod,
251 'httpconnection',
251 'httpconnection',
252 stubs.VCRHTTPConnection,
252 stubs.VCRHTTPConnection,
253 ),
253 ),
254 (
254 (
255 urlmod,
255 urlmod,
256 'httpsconnection',
256 'httpsconnection',
257 stubs.VCRHTTPSConnection,
257 stubs.VCRHTTPSConnection,
258 ),
258 ),
259 ],
259 ],
260 )
260 )
261 vcr.register_matcher('hgmatcher', hgmatcher)
261 vcr.register_matcher('hgmatcher', hgmatcher)
262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 return fn(*args, **kwargs)
263 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265
265
266 cmd = util.checksignature(inner, depth=2)
266 cmd = util.checksignature(inner, depth=2)
267 cmd.__name__ = fn.__name__
267 cmd.__name__ = fn.__name__
268 cmd.__doc__ = fn.__doc__
268 cmd.__doc__ = fn.__doc__
269
269
270 return command(
270 return command(
271 name,
271 name,
272 fullflags,
272 fullflags,
273 spec,
273 spec,
274 helpcategory=helpcategory,
274 helpcategory=helpcategory,
275 optionalrepo=optionalrepo,
275 optionalrepo=optionalrepo,
276 )(cmd)
276 )(cmd)
277
277
278 return decorate
278 return decorate
279
279
280
280
281 def urlencodenested(params):
281 def urlencodenested(params):
282 """like urlencode, but works with nested parameters.
282 """like urlencode, but works with nested parameters.
283
283
284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 """
287 """
288 flatparams = util.sortdict()
288 flatparams = util.sortdict()
289
289
290 def process(prefix, obj):
290 def process(prefix, obj):
291 if isinstance(obj, bool):
291 if isinstance(obj, bool):
292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 if items is None:
295 if items is None:
296 flatparams[prefix] = obj
296 flatparams[prefix] = obj
297 else:
297 else:
298 for k, v in items(obj):
298 for k, v in items(obj):
299 if prefix:
299 if prefix:
300 process(b'%s[%s]' % (prefix, k), v)
300 process(b'%s[%s]' % (prefix, k), v)
301 else:
301 else:
302 process(k, v)
302 process(k, v)
303
303
304 process(b'', params)
304 process(b'', params)
305 return util.urlreq.urlencode(flatparams)
305 return util.urlreq.urlencode(flatparams)
306
306
307
307
308 def readurltoken(ui):
308 def readurltoken(ui):
309 """return conduit url, token and make sure they exist
309 """return conduit url, token and make sure they exist
310
310
311 Currently read from [auth] config section. In the future, it might
311 Currently read from [auth] config section. In the future, it might
312 make sense to read from .arcconfig and .arcrc as well.
312 make sense to read from .arcconfig and .arcrc as well.
313 """
313 """
314 url = ui.config(b'phabricator', b'url')
314 url = ui.config(b'phabricator', b'url')
315 if not url:
315 if not url:
316 raise error.Abort(
316 raise error.Abort(
317 _(b'config %s.%s is required') % (b'phabricator', b'url')
317 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 )
318 )
319
319
320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 token = None
321 token = None
322
322
323 if res:
323 if res:
324 group, auth = res
324 group, auth = res
325
325
326 ui.debug(b"using auth.%s.* for authentication\n" % group)
326 ui.debug(b"using auth.%s.* for authentication\n" % group)
327
327
328 token = auth.get(b'phabtoken')
328 token = auth.get(b'phabtoken')
329
329
330 if not token:
330 if not token:
331 raise error.Abort(
331 raise error.Abort(
332 _(b'Can\'t find conduit token associated to %s') % (url,)
332 _(b'Can\'t find conduit token associated to %s') % (url,)
333 )
333 )
334
334
335 return url, token
335 return url, token
336
336
337
337
338 def callconduit(ui, name, params):
338 def callconduit(ui, name, params):
339 """call Conduit API, params is a dict. return json.loads result, or None"""
339 """call Conduit API, params is a dict. return json.loads result, or None"""
340 host, token = readurltoken(ui)
340 host, token = readurltoken(ui)
341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 params = params.copy()
343 params = params.copy()
344 params[b'__conduit__'] = {
344 params[b'__conduit__'] = {
345 b'token': token,
345 b'token': token,
346 }
346 }
347 rawdata = {
347 rawdata = {
348 b'params': templatefilters.json(params),
348 b'params': templatefilters.json(params),
349 b'output': b'json',
349 b'output': b'json',
350 b'__conduit__': 1,
350 b'__conduit__': 1,
351 }
351 }
352 data = urlencodenested(rawdata)
352 data = urlencodenested(rawdata)
353 curlcmd = ui.config(b'phabricator', b'curlcmd')
353 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 if curlcmd:
354 if curlcmd:
355 sin, sout = procutil.popen2(
355 sin, sout = procutil.popen2(
356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 )
357 )
358 sin.write(data)
358 sin.write(data)
359 sin.close()
359 sin.close()
360 body = sout.read()
360 body = sout.read()
361 else:
361 else:
362 urlopener = urlmod.opener(ui, authinfo)
362 urlopener = urlmod.opener(ui, authinfo)
363 request = util.urlreq.request(pycompat.strurl(url), data=data)
363 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 with contextlib.closing(urlopener.open(request)) as rsp:
364 with contextlib.closing(urlopener.open(request)) as rsp:
365 body = rsp.read()
365 body = rsp.read()
366 ui.debug(b'Conduit Response: %s\n' % body)
366 ui.debug(b'Conduit Response: %s\n' % body)
367 parsed = pycompat.rapply(
367 parsed = pycompat.rapply(
368 lambda x: encoding.unitolocal(x)
368 lambda x: encoding.unitolocal(x)
369 if isinstance(x, pycompat.unicode)
369 if isinstance(x, pycompat.unicode)
370 else x,
370 else x,
371 # json.loads only accepts bytes from py3.6+
371 # json.loads only accepts bytes from py3.6+
372 pycompat.json_loads(encoding.unifromlocal(body)),
372 pycompat.json_loads(encoding.unifromlocal(body)),
373 )
373 )
374 if parsed.get(b'error_code'):
374 if parsed.get(b'error_code'):
375 msg = _(b'Conduit Error (%s): %s') % (
375 msg = _(b'Conduit Error (%s): %s') % (
376 parsed[b'error_code'],
376 parsed[b'error_code'],
377 parsed[b'error_info'],
377 parsed[b'error_info'],
378 )
378 )
379 raise error.Abort(msg)
379 raise error.Abort(msg)
380 return parsed[b'result']
380 return parsed[b'result']
381
381
382
382
383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 def debugcallconduit(ui, repo, name):
384 def debugcallconduit(ui, repo, name):
385 """call Conduit API
385 """call Conduit API
386
386
387 Call parameters are read from stdin as a JSON blob. Result will be written
387 Call parameters are read from stdin as a JSON blob. Result will be written
388 to stdout as a JSON blob.
388 to stdout as a JSON blob.
389 """
389 """
390 # json.loads only accepts bytes from 3.6+
390 # json.loads only accepts bytes from 3.6+
391 rawparams = encoding.unifromlocal(ui.fin.read())
391 rawparams = encoding.unifromlocal(ui.fin.read())
392 # json.loads only returns unicode strings
392 # json.loads only returns unicode strings
393 params = pycompat.rapply(
393 params = pycompat.rapply(
394 lambda x: encoding.unitolocal(x)
394 lambda x: encoding.unitolocal(x)
395 if isinstance(x, pycompat.unicode)
395 if isinstance(x, pycompat.unicode)
396 else x,
396 else x,
397 pycompat.json_loads(rawparams),
397 pycompat.json_loads(rawparams),
398 )
398 )
399 # json.dumps only accepts unicode strings
399 # json.dumps only accepts unicode strings
400 result = pycompat.rapply(
400 result = pycompat.rapply(
401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 callconduit(ui, name, params),
402 callconduit(ui, name, params),
403 )
403 )
404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 ui.write(b'%s\n' % encoding.unitolocal(s))
405 ui.write(b'%s\n' % encoding.unitolocal(s))
406
406
407
407
408 def getrepophid(repo):
408 def getrepophid(repo):
409 """given callsign, return repository PHID or None"""
409 """given callsign, return repository PHID or None"""
410 # developer config: phabricator.repophid
410 # developer config: phabricator.repophid
411 repophid = repo.ui.config(b'phabricator', b'repophid')
411 repophid = repo.ui.config(b'phabricator', b'repophid')
412 if repophid:
412 if repophid:
413 return repophid
413 return repophid
414 callsign = repo.ui.config(b'phabricator', b'callsign')
414 callsign = repo.ui.config(b'phabricator', b'callsign')
415 if not callsign:
415 if not callsign:
416 return None
416 return None
417 query = callconduit(
417 query = callconduit(
418 repo.ui,
418 repo.ui,
419 b'diffusion.repository.search',
419 b'diffusion.repository.search',
420 {b'constraints': {b'callsigns': [callsign]}},
420 {b'constraints': {b'callsigns': [callsign]}},
421 )
421 )
422 if len(query[b'data']) == 0:
422 if len(query[b'data']) == 0:
423 return None
423 return None
424 repophid = query[b'data'][0][b'phid']
424 repophid = query[b'data'][0][b'phid']
425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 return repophid
426 return repophid
427
427
428
428
429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiondescre = re.compile(
430 _differentialrevisiondescre = re.compile(
431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 )
432 )
433
433
434
434
435 def getoldnodedrevmap(repo, nodelist):
435 def getoldnodedrevmap(repo, nodelist):
436 """find previous nodes that has been sent to Phabricator
436 """find previous nodes that has been sent to Phabricator
437
437
438 return {node: (oldnode, Differential diff, Differential Revision ID)}
438 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 for node in nodelist with known previous sent versions, or associated
439 for node in nodelist with known previous sent versions, or associated
440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 be ``None``.
441 be ``None``.
442
442
443 Examines commit messages like "Differential Revision:" to get the
443 Examines commit messages like "Differential Revision:" to get the
444 association information.
444 association information.
445
445
446 If such commit message line is not found, examines all precursors and their
446 If such commit message line is not found, examines all precursors and their
447 tags. Tags with format like "D1234" are considered a match and the node
447 tags. Tags with format like "D1234" are considered a match and the node
448 with that tag, and the number after "D" (ex. 1234) will be returned.
448 with that tag, and the number after "D" (ex. 1234) will be returned.
449
449
450 The ``old node``, if not None, is guaranteed to be the last diff of
450 The ``old node``, if not None, is guaranteed to be the last diff of
451 corresponding Differential Revision, and exist in the repo.
451 corresponding Differential Revision, and exist in the repo.
452 """
452 """
453 unfi = repo.unfiltered()
453 unfi = repo.unfiltered()
454 has_node = unfi.changelog.index.has_node
454 has_node = unfi.changelog.index.has_node
455
455
456 result = {} # {node: (oldnode?, lastdiff?, drev)}
456 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 toconfirm = {} # {node: (force, {precnode}, drev)}
457 toconfirm = {} # {node: (force, {precnode}, drev)}
458 for node in nodelist:
458 for node in nodelist:
459 ctx = unfi[node]
459 ctx = unfi[node]
460 # For tags like "D123", put them into "toconfirm" to verify later
460 # For tags like "D123", put them into "toconfirm" to verify later
461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 for n in precnodes:
462 for n in precnodes:
463 if has_node(n):
463 if has_node(n):
464 for tag in unfi.nodetags(n):
464 for tag in unfi.nodetags(n):
465 m = _differentialrevisiontagre.match(tag)
465 m = _differentialrevisiontagre.match(tag)
466 if m:
466 if m:
467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 break
468 break
469 else:
469 else:
470 continue # move to next predecessor
470 continue # move to next predecessor
471 break # found a tag, stop
471 break # found a tag, stop
472 else:
472 else:
473 # Check commit message
473 # Check commit message
474 m = _differentialrevisiondescre.search(ctx.description())
474 m = _differentialrevisiondescre.search(ctx.description())
475 if m:
475 if m:
476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477
477
478 # Double check if tags are genuine by collecting all old nodes from
478 # Double check if tags are genuine by collecting all old nodes from
479 # Phabricator, and expect precursors overlap with it.
479 # Phabricator, and expect precursors overlap with it.
480 if toconfirm:
480 if toconfirm:
481 drevs = [drev for force, precs, drev in toconfirm.values()]
481 drevs = [drev for force, precs, drev in toconfirm.values()]
482 alldiffs = callconduit(
482 alldiffs = callconduit(
483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 )
484 )
485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 for newnode, (force, precset, drev) in toconfirm.items():
486 for newnode, (force, precset, drev) in toconfirm.items():
487 diffs = [
487 diffs = [
488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 ]
489 ]
490
490
491 # "precursors" as known by Phabricator
491 # "precursors" as known by Phabricator
492 phprecset = {getnode(d) for d in diffs}
492 phprecset = {getnode(d) for d in diffs}
493
493
494 # Ignore if precursors (Phabricator and local repo) do not overlap,
494 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # and force is not set (when commit message says nothing)
495 # and force is not set (when commit message says nothing)
496 if not force and not bool(phprecset & precset):
496 if not force and not bool(phprecset & precset):
497 tagname = b'D%d' % drev
497 tagname = b'D%d' % drev
498 tags.tag(
498 tags.tag(
499 repo,
499 repo,
500 tagname,
500 tagname,
501 nullid,
501 nullid,
502 message=None,
502 message=None,
503 user=None,
503 user=None,
504 date=None,
504 date=None,
505 local=True,
505 local=True,
506 )
506 )
507 unfi.ui.warn(
507 unfi.ui.warn(
508 _(
508 _(
509 b'D%d: local tag removed - does not match '
509 b'D%d: local tag removed - does not match '
510 b'Differential history\n'
510 b'Differential history\n'
511 )
511 )
512 % drev
512 % drev
513 )
513 )
514 continue
514 continue
515
515
516 # Find the last node using Phabricator metadata, and make sure it
516 # Find the last node using Phabricator metadata, and make sure it
517 # exists in the repo
517 # exists in the repo
518 oldnode = lastdiff = None
518 oldnode = lastdiff = None
519 if diffs:
519 if diffs:
520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 oldnode = getnode(lastdiff)
521 oldnode = getnode(lastdiff)
522 if oldnode and not has_node(oldnode):
522 if oldnode and not has_node(oldnode):
523 oldnode = None
523 oldnode = None
524
524
525 result[newnode] = (oldnode, lastdiff, drev)
525 result[newnode] = (oldnode, lastdiff, drev)
526
526
527 return result
527 return result
528
528
529
529
530 def getdrevmap(repo, revs):
530 def getdrevmap(repo, revs):
531 """Return a dict mapping each rev in `revs` to their Differential Revision
531 """Return a dict mapping each rev in `revs` to their Differential Revision
532 ID or None.
532 ID or None.
533 """
533 """
534 result = {}
534 result = {}
535 for rev in revs:
535 for rev in revs:
536 result[rev] = None
536 result[rev] = None
537 ctx = repo[rev]
537 ctx = repo[rev]
538 # Check commit message
538 # Check commit message
539 m = _differentialrevisiondescre.search(ctx.description())
539 m = _differentialrevisiondescre.search(ctx.description())
540 if m:
540 if m:
541 result[rev] = int(m.group('id'))
541 result[rev] = int(m.group('id'))
542 continue
542 continue
543 # Check tags
543 # Check tags
544 for tag in repo.nodetags(ctx.node()):
544 for tag in repo.nodetags(ctx.node()):
545 m = _differentialrevisiontagre.match(tag)
545 m = _differentialrevisiontagre.match(tag)
546 if m:
546 if m:
547 result[rev] = int(m.group(1))
547 result[rev] = int(m.group(1))
548 break
548 break
549
549
550 return result
550 return result
551
551
552
552
553 def getdiff(ctx, diffopts):
553 def getdiff(ctx, diffopts):
554 """plain-text diff without header (user, commit message, etc)"""
554 """plain-text diff without header (user, commit message, etc)"""
555 output = util.stringio()
555 output = util.stringio()
556 for chunk, _label in patch.diffui(
556 for chunk, _label in patch.diffui(
557 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
557 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
558 ):
558 ):
559 output.write(chunk)
559 output.write(chunk)
560 return output.getvalue()
560 return output.getvalue()
561
561
562
562
563 class DiffChangeType(object):
563 class DiffChangeType(object):
564 ADD = 1
564 ADD = 1
565 CHANGE = 2
565 CHANGE = 2
566 DELETE = 3
566 DELETE = 3
567 MOVE_AWAY = 4
567 MOVE_AWAY = 4
568 COPY_AWAY = 5
568 COPY_AWAY = 5
569 MOVE_HERE = 6
569 MOVE_HERE = 6
570 COPY_HERE = 7
570 COPY_HERE = 7
571 MULTICOPY = 8
571 MULTICOPY = 8
572
572
573
573
574 class DiffFileType(object):
574 class DiffFileType(object):
575 TEXT = 1
575 TEXT = 1
576 IMAGE = 2
576 IMAGE = 2
577 BINARY = 3
577 BINARY = 3
578
578
579
579
580 @attr.s
580 @attr.s
581 class phabhunk(dict):
581 class phabhunk(dict):
582 """Represents a Differential hunk, which is owned by a Differential change
582 """Represents a Differential hunk, which is owned by a Differential change
583 """
583 """
584
584
585 oldOffset = attr.ib(default=0) # camelcase-required
585 oldOffset = attr.ib(default=0) # camelcase-required
586 oldLength = attr.ib(default=0) # camelcase-required
586 oldLength = attr.ib(default=0) # camelcase-required
587 newOffset = attr.ib(default=0) # camelcase-required
587 newOffset = attr.ib(default=0) # camelcase-required
588 newLength = attr.ib(default=0) # camelcase-required
588 newLength = attr.ib(default=0) # camelcase-required
589 corpus = attr.ib(default='')
589 corpus = attr.ib(default='')
590 # These get added to the phabchange's equivalents
590 # These get added to the phabchange's equivalents
591 addLines = attr.ib(default=0) # camelcase-required
591 addLines = attr.ib(default=0) # camelcase-required
592 delLines = attr.ib(default=0) # camelcase-required
592 delLines = attr.ib(default=0) # camelcase-required
593
593
594
594
595 @attr.s
595 @attr.s
596 class phabchange(object):
596 class phabchange(object):
597 """Represents a Differential change, owns Differential hunks and owned by a
597 """Represents a Differential change, owns Differential hunks and owned by a
598 Differential diff. Each one represents one file in a diff.
598 Differential diff. Each one represents one file in a diff.
599 """
599 """
600
600
601 currentPath = attr.ib(default=None) # camelcase-required
601 currentPath = attr.ib(default=None) # camelcase-required
602 oldPath = attr.ib(default=None) # camelcase-required
602 oldPath = attr.ib(default=None) # camelcase-required
603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 metadata = attr.ib(default=attr.Factory(dict))
604 metadata = attr.ib(default=attr.Factory(dict))
605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 type = attr.ib(default=DiffChangeType.CHANGE)
607 type = attr.ib(default=DiffChangeType.CHANGE)
608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 commitHash = attr.ib(default=None) # camelcase-required
609 commitHash = attr.ib(default=None) # camelcase-required
610 addLines = attr.ib(default=0) # camelcase-required
610 addLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
612 hunks = attr.ib(default=attr.Factory(list))
612 hunks = attr.ib(default=attr.Factory(list))
613
613
614 def copynewmetadatatoold(self):
614 def copynewmetadatatoold(self):
615 for key in list(self.metadata.keys()):
615 for key in list(self.metadata.keys()):
616 newkey = key.replace(b'new:', b'old:')
616 newkey = key.replace(b'new:', b'old:')
617 self.metadata[newkey] = self.metadata[key]
617 self.metadata[newkey] = self.metadata[key]
618
618
619 def addoldmode(self, value):
619 def addoldmode(self, value):
620 self.oldProperties[b'unix:filemode'] = value
620 self.oldProperties[b'unix:filemode'] = value
621
621
622 def addnewmode(self, value):
622 def addnewmode(self, value):
623 self.newProperties[b'unix:filemode'] = value
623 self.newProperties[b'unix:filemode'] = value
624
624
625 def addhunk(self, hunk):
625 def addhunk(self, hunk):
626 if not isinstance(hunk, phabhunk):
626 if not isinstance(hunk, phabhunk):
627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 # It's useful to include these stats since the Phab web UI shows them,
629 # It's useful to include these stats since the Phab web UI shows them,
630 # and uses them to estimate how large a change a Revision is. Also used
630 # and uses them to estimate how large a change a Revision is. Also used
631 # in email subjects for the [+++--] bit.
631 # in email subjects for the [+++--] bit.
632 self.addLines += hunk.addLines
632 self.addLines += hunk.addLines
633 self.delLines += hunk.delLines
633 self.delLines += hunk.delLines
634
634
635
635
636 @attr.s
636 @attr.s
637 class phabdiff(object):
637 class phabdiff(object):
638 """Represents a Differential diff, owns Differential changes. Corresponds
638 """Represents a Differential diff, owns Differential changes. Corresponds
639 to a commit.
639 to a commit.
640 """
640 """
641
641
642 # Doesn't seem to be any reason to send this (output of uname -n)
642 # Doesn't seem to be any reason to send this (output of uname -n)
643 sourceMachine = attr.ib(default=b'') # camelcase-required
643 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourcePath = attr.ib(default=b'/') # camelcase-required
644 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 branch = attr.ib(default=b'default')
648 branch = attr.ib(default=b'default')
649 bookmark = attr.ib(default=None)
649 bookmark = attr.ib(default=None)
650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 lintStatus = attr.ib(default=b'none') # camelcase-required
651 lintStatus = attr.ib(default=b'none') # camelcase-required
652 unitStatus = attr.ib(default=b'none') # camelcase-required
652 unitStatus = attr.ib(default=b'none') # camelcase-required
653 changes = attr.ib(default=attr.Factory(dict))
653 changes = attr.ib(default=attr.Factory(dict))
654 repositoryPHID = attr.ib(default=None) # camelcase-required
654 repositoryPHID = attr.ib(default=None) # camelcase-required
655
655
656 def addchange(self, change):
656 def addchange(self, change):
657 if not isinstance(change, phabchange):
657 if not isinstance(change, phabchange):
658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 self.changes[change.currentPath] = pycompat.byteskwargs(
659 self.changes[change.currentPath] = pycompat.byteskwargs(
660 attr.asdict(change)
660 attr.asdict(change)
661 )
661 )
662
662
663
663
664 def maketext(pchange, ctx, fname):
664 def maketext(pchange, ctx, fname):
665 """populate the phabchange for a text file"""
665 """populate the phabchange for a text file"""
666 repo = ctx.repo()
666 repo = ctx.repo()
667 fmatcher = match.exact([fname])
667 fmatcher = match.exact([fname])
668 diffopts = mdiff.diffopts(git=True, context=32767)
668 diffopts = mdiff.diffopts(git=True, context=32767)
669 _pfctx, _fctx, header, fhunks = next(
669 _pfctx, _fctx, header, fhunks = next(
670 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
670 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
671 )
671 )
672
672
673 for fhunk in fhunks:
673 for fhunk in fhunks:
674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 corpus = b''.join(lines[1:])
675 corpus = b''.join(lines[1:])
676 shunk = list(header)
676 shunk = list(header)
677 shunk.extend(lines)
677 shunk.extend(lines)
678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 patch.diffstatdata(util.iterlines(shunk))
679 patch.diffstatdata(util.iterlines(shunk))
680 )
680 )
681 pchange.addhunk(
681 pchange.addhunk(
682 phabhunk(
682 phabhunk(
683 oldOffset,
683 oldOffset,
684 oldLength,
684 oldLength,
685 newOffset,
685 newOffset,
686 newLength,
686 newLength,
687 corpus,
687 corpus,
688 addLines,
688 addLines,
689 delLines,
689 delLines,
690 )
690 )
691 )
691 )
692
692
693
693
694 def uploadchunks(fctx, fphid):
694 def uploadchunks(fctx, fphid):
695 """upload large binary files as separate chunks.
695 """upload large binary files as separate chunks.
696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 """
697 """
698 ui = fctx.repo().ui
698 ui = fctx.repo().ui
699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 with ui.makeprogress(
700 with ui.makeprogress(
701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 ) as progress:
702 ) as progress:
703 for chunk in chunks:
703 for chunk in chunks:
704 progress.increment()
704 progress.increment()
705 if chunk[b'complete']:
705 if chunk[b'complete']:
706 continue
706 continue
707 bstart = int(chunk[b'byteStart'])
707 bstart = int(chunk[b'byteStart'])
708 bend = int(chunk[b'byteEnd'])
708 bend = int(chunk[b'byteEnd'])
709 callconduit(
709 callconduit(
710 ui,
710 ui,
711 b'file.uploadchunk',
711 b'file.uploadchunk',
712 {
712 {
713 b'filePHID': fphid,
713 b'filePHID': fphid,
714 b'byteStart': bstart,
714 b'byteStart': bstart,
715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'dataEncoding': b'base64',
716 b'dataEncoding': b'base64',
717 },
717 },
718 )
718 )
719
719
720
720
721 def uploadfile(fctx):
721 def uploadfile(fctx):
722 """upload binary files to Phabricator"""
722 """upload binary files to Phabricator"""
723 repo = fctx.repo()
723 repo = fctx.repo()
724 ui = repo.ui
724 ui = repo.ui
725 fname = fctx.path()
725 fname = fctx.path()
726 size = fctx.size()
726 size = fctx.size()
727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728
728
729 # an allocate call is required first to see if an upload is even required
729 # an allocate call is required first to see if an upload is even required
730 # (Phab might already have it) and to determine if chunking is needed
730 # (Phab might already have it) and to determine if chunking is needed
731 allocateparams = {
731 allocateparams = {
732 b'name': fname,
732 b'name': fname,
733 b'contentLength': size,
733 b'contentLength': size,
734 b'contentHash': fhash,
734 b'contentHash': fhash,
735 }
735 }
736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 fphid = filealloc[b'filePHID']
737 fphid = filealloc[b'filePHID']
738
738
739 if filealloc[b'upload']:
739 if filealloc[b'upload']:
740 ui.write(_(b'uploading %s\n') % bytes(fctx))
740 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 if not fphid:
741 if not fphid:
742 uploadparams = {
742 uploadparams = {
743 b'name': fname,
743 b'name': fname,
744 b'data_base64': base64.b64encode(fctx.data()),
744 b'data_base64': base64.b64encode(fctx.data()),
745 }
745 }
746 fphid = callconduit(ui, b'file.upload', uploadparams)
746 fphid = callconduit(ui, b'file.upload', uploadparams)
747 else:
747 else:
748 uploadchunks(fctx, fphid)
748 uploadchunks(fctx, fphid)
749 else:
749 else:
750 ui.debug(b'server already has %s\n' % bytes(fctx))
750 ui.debug(b'server already has %s\n' % bytes(fctx))
751
751
752 if not fphid:
752 if not fphid:
753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754
754
755 return fphid
755 return fphid
756
756
757
757
758 def addoldbinary(pchange, oldfctx, fctx):
758 def addoldbinary(pchange, oldfctx, fctx):
759 """add the metadata for the previous version of a binary file to the
759 """add the metadata for the previous version of a binary file to the
760 phabchange for the new version
760 phabchange for the new version
761
761
762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 version of the file, or None if the file is being removed.
763 version of the file, or None if the file is being removed.
764 """
764 """
765 if not fctx or fctx.cmp(oldfctx):
765 if not fctx or fctx.cmp(oldfctx):
766 # Files differ, add the old one
766 # Files differ, add the old one
767 pchange.metadata[b'old:file:size'] = oldfctx.size()
767 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 mimeguess, _enc = mimetypes.guess_type(
768 mimeguess, _enc = mimetypes.guess_type(
769 encoding.unifromlocal(oldfctx.path())
769 encoding.unifromlocal(oldfctx.path())
770 )
770 )
771 if mimeguess:
771 if mimeguess:
772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 mimeguess
773 mimeguess
774 )
774 )
775 fphid = uploadfile(oldfctx)
775 fphid = uploadfile(oldfctx)
776 pchange.metadata[b'old:binary-phid'] = fphid
776 pchange.metadata[b'old:binary-phid'] = fphid
777 else:
777 else:
778 # If it's left as IMAGE/BINARY web UI might try to display it
778 # If it's left as IMAGE/BINARY web UI might try to display it
779 pchange.fileType = DiffFileType.TEXT
779 pchange.fileType = DiffFileType.TEXT
780 pchange.copynewmetadatatoold()
780 pchange.copynewmetadatatoold()
781
781
782
782
783 def makebinary(pchange, fctx):
783 def makebinary(pchange, fctx):
784 """populate the phabchange for a binary file"""
784 """populate the phabchange for a binary file"""
785 pchange.fileType = DiffFileType.BINARY
785 pchange.fileType = DiffFileType.BINARY
786 fphid = uploadfile(fctx)
786 fphid = uploadfile(fctx)
787 pchange.metadata[b'new:binary-phid'] = fphid
787 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:file:size'] = fctx.size()
788 pchange.metadata[b'new:file:size'] = fctx.size()
789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 if mimeguess:
790 if mimeguess:
791 mimeguess = pycompat.bytestr(mimeguess)
791 mimeguess = pycompat.bytestr(mimeguess)
792 pchange.metadata[b'new:file:mime-type'] = mimeguess
792 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 if mimeguess.startswith(b'image/'):
793 if mimeguess.startswith(b'image/'):
794 pchange.fileType = DiffFileType.IMAGE
794 pchange.fileType = DiffFileType.IMAGE
795
795
796
796
797 # Copied from mercurial/patch.py
797 # Copied from mercurial/patch.py
798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799
799
800
800
801 def notutf8(fctx):
801 def notutf8(fctx):
802 """detect non-UTF-8 text files since Phabricator requires them to be marked
802 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 as binary
803 as binary
804 """
804 """
805 try:
805 try:
806 fctx.data().decode('utf-8')
806 fctx.data().decode('utf-8')
807 return False
807 return False
808 except UnicodeDecodeError:
808 except UnicodeDecodeError:
809 fctx.repo().ui.write(
809 fctx.repo().ui.write(
810 _(b'file %s detected as non-UTF-8, marked as binary\n')
810 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 % fctx.path()
811 % fctx.path()
812 )
812 )
813 return True
813 return True
814
814
815
815
816 def addremoved(pdiff, ctx, removed):
816 def addremoved(pdiff, ctx, removed):
817 """add removed files to the phabdiff. Shouldn't include moves"""
817 """add removed files to the phabdiff. Shouldn't include moves"""
818 for fname in removed:
818 for fname in removed:
819 pchange = phabchange(
819 pchange = phabchange(
820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 )
821 )
822 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
822 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
823 oldfctx = ctx.p1()[fname]
823 oldfctx = ctx.p1()[fname]
824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 maketext(pchange, ctx, fname)
825 maketext(pchange, ctx, fname)
826
826
827 pdiff.addchange(pchange)
827 pdiff.addchange(pchange)
828
828
829
829
830 def addmodified(pdiff, ctx, modified):
830 def addmodified(pdiff, ctx, modified):
831 """add modified files to the phabdiff"""
831 """add modified files to the phabdiff"""
832 for fname in modified:
832 for fname in modified:
833 fctx = ctx[fname]
833 fctx = ctx[fname]
834 oldfctx = fctx.p1()
834 oldfctx = fctx.p1()
835 pchange = phabchange(currentPath=fname, oldPath=fname)
835 pchange = phabchange(currentPath=fname, oldPath=fname)
836 filemode = gitmode[ctx[fname].flags()]
836 filemode = gitmode[ctx[fname].flags()]
837 originalmode = gitmode[ctx.p1()[fname].flags()]
837 originalmode = gitmode[ctx.p1()[fname].flags()]
838 if filemode != originalmode:
838 if filemode != originalmode:
839 pchange.addoldmode(originalmode)
839 pchange.addoldmode(originalmode)
840 pchange.addnewmode(filemode)
840 pchange.addnewmode(filemode)
841
841
842 if (
842 if (
843 fctx.isbinary()
843 fctx.isbinary()
844 or notutf8(fctx)
844 or notutf8(fctx)
845 or oldfctx.isbinary()
845 or oldfctx.isbinary()
846 or notutf8(oldfctx)
846 or notutf8(oldfctx)
847 ):
847 ):
848 makebinary(pchange, fctx)
848 makebinary(pchange, fctx)
849 addoldbinary(pchange, fctx.p1(), fctx)
849 addoldbinary(pchange, fctx.p1(), fctx)
850 else:
850 else:
851 maketext(pchange, ctx, fname)
851 maketext(pchange, ctx, fname)
852
852
853 pdiff.addchange(pchange)
853 pdiff.addchange(pchange)
854
854
855
855
856 def addadded(pdiff, ctx, added, removed):
856 def addadded(pdiff, ctx, added, removed):
857 """add file adds to the phabdiff, both new files and copies/moves"""
857 """add file adds to the phabdiff, both new files and copies/moves"""
858 # Keep track of files that've been recorded as moved/copied, so if there are
858 # Keep track of files that've been recorded as moved/copied, so if there are
859 # additional copies we can mark them (moves get removed from removed)
859 # additional copies we can mark them (moves get removed from removed)
860 copiedchanges = {}
860 copiedchanges = {}
861 movedchanges = {}
861 movedchanges = {}
862 for fname in added:
862 for fname in added:
863 fctx = ctx[fname]
863 fctx = ctx[fname]
864 oldfctx = None
864 oldfctx = None
865 pchange = phabchange(currentPath=fname)
865 pchange = phabchange(currentPath=fname)
866
866
867 filemode = gitmode[ctx[fname].flags()]
867 filemode = gitmode[ctx[fname].flags()]
868 renamed = fctx.renamed()
868 renamed = fctx.renamed()
869
869
870 if renamed:
870 if renamed:
871 originalfname = renamed[0]
871 originalfname = renamed[0]
872 oldfctx = ctx.p1()[originalfname]
872 oldfctx = ctx.p1()[originalfname]
873 originalmode = gitmode[oldfctx.flags()]
873 originalmode = gitmode[oldfctx.flags()]
874 pchange.oldPath = originalfname
874 pchange.oldPath = originalfname
875
875
876 if originalfname in removed:
876 if originalfname in removed:
877 origpchange = phabchange(
877 origpchange = phabchange(
878 currentPath=originalfname,
878 currentPath=originalfname,
879 oldPath=originalfname,
879 oldPath=originalfname,
880 type=DiffChangeType.MOVE_AWAY,
880 type=DiffChangeType.MOVE_AWAY,
881 awayPaths=[fname],
881 awayPaths=[fname],
882 )
882 )
883 movedchanges[originalfname] = origpchange
883 movedchanges[originalfname] = origpchange
884 removed.remove(originalfname)
884 removed.remove(originalfname)
885 pchange.type = DiffChangeType.MOVE_HERE
885 pchange.type = DiffChangeType.MOVE_HERE
886 elif originalfname in movedchanges:
886 elif originalfname in movedchanges:
887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
888 movedchanges[originalfname].awayPaths.append(fname)
888 movedchanges[originalfname].awayPaths.append(fname)
889 pchange.type = DiffChangeType.COPY_HERE
889 pchange.type = DiffChangeType.COPY_HERE
890 else: # pure copy
890 else: # pure copy
891 if originalfname not in copiedchanges:
891 if originalfname not in copiedchanges:
892 origpchange = phabchange(
892 origpchange = phabchange(
893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
894 )
894 )
895 copiedchanges[originalfname] = origpchange
895 copiedchanges[originalfname] = origpchange
896 else:
896 else:
897 origpchange = copiedchanges[originalfname]
897 origpchange = copiedchanges[originalfname]
898 origpchange.awayPaths.append(fname)
898 origpchange.awayPaths.append(fname)
899 pchange.type = DiffChangeType.COPY_HERE
899 pchange.type = DiffChangeType.COPY_HERE
900
900
901 if filemode != originalmode:
901 if filemode != originalmode:
902 pchange.addoldmode(originalmode)
902 pchange.addoldmode(originalmode)
903 pchange.addnewmode(filemode)
903 pchange.addnewmode(filemode)
904 else: # Brand-new file
904 else: # Brand-new file
905 pchange.addnewmode(gitmode[fctx.flags()])
905 pchange.addnewmode(gitmode[fctx.flags()])
906 pchange.type = DiffChangeType.ADD
906 pchange.type = DiffChangeType.ADD
907
907
908 if (
908 if (
909 fctx.isbinary()
909 fctx.isbinary()
910 or notutf8(fctx)
910 or notutf8(fctx)
911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
912 ):
912 ):
913 makebinary(pchange, fctx)
913 makebinary(pchange, fctx)
914 if renamed:
914 if renamed:
915 addoldbinary(pchange, oldfctx, fctx)
915 addoldbinary(pchange, oldfctx, fctx)
916 else:
916 else:
917 maketext(pchange, ctx, fname)
917 maketext(pchange, ctx, fname)
918
918
919 pdiff.addchange(pchange)
919 pdiff.addchange(pchange)
920
920
921 for _path, copiedchange in copiedchanges.items():
921 for _path, copiedchange in copiedchanges.items():
922 pdiff.addchange(copiedchange)
922 pdiff.addchange(copiedchange)
923 for _path, movedchange in movedchanges.items():
923 for _path, movedchange in movedchanges.items():
924 pdiff.addchange(movedchange)
924 pdiff.addchange(movedchange)
925
925
926
926
927 def creatediff(ctx):
927 def creatediff(ctx):
928 """create a Differential Diff"""
928 """create a Differential Diff"""
929 repo = ctx.repo()
929 repo = ctx.repo()
930 repophid = getrepophid(repo)
930 repophid = getrepophid(repo)
931 # Create a "Differential Diff" via "differential.creatediff" API
931 # Create a "Differential Diff" via "differential.creatediff" API
932 pdiff = phabdiff(
932 pdiff = phabdiff(
933 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
933 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
934 branch=b'%s' % ctx.branch(),
934 branch=b'%s' % ctx.branch(),
935 )
935 )
936 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
936 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
937 # addadded will remove moved files from removed, so addremoved won't get
937 # addadded will remove moved files from removed, so addremoved won't get
938 # them
938 # them
939 addadded(pdiff, ctx, added, removed)
939 addadded(pdiff, ctx, added, removed)
940 addmodified(pdiff, ctx, modified)
940 addmodified(pdiff, ctx, modified)
941 addremoved(pdiff, ctx, removed)
941 addremoved(pdiff, ctx, removed)
942 if repophid:
942 if repophid:
943 pdiff.repositoryPHID = repophid
943 pdiff.repositoryPHID = repophid
944 diff = callconduit(
944 diff = callconduit(
945 repo.ui,
945 repo.ui,
946 b'differential.creatediff',
946 b'differential.creatediff',
947 pycompat.byteskwargs(attr.asdict(pdiff)),
947 pycompat.byteskwargs(attr.asdict(pdiff)),
948 )
948 )
949 if not diff:
949 if not diff:
950 raise error.Abort(_(b'cannot create diff for %s') % ctx)
950 raise error.Abort(_(b'cannot create diff for %s') % ctx)
951 return diff
951 return diff
952
952
953
953
954 def writediffproperties(ctx, diff):
954 def writediffproperties(ctx, diff):
955 """write metadata to diff so patches could be applied losslessly"""
955 """write metadata to diff so patches could be applied losslessly"""
956 # creatediff returns with a diffid but query returns with an id
956 # creatediff returns with a diffid but query returns with an id
957 diffid = diff.get(b'diffid', diff.get(b'id'))
957 diffid = diff.get(b'diffid', diff.get(b'id'))
958 params = {
958 params = {
959 b'diff_id': diffid,
959 b'diff_id': diffid,
960 b'name': b'hg:meta',
960 b'name': b'hg:meta',
961 b'data': templatefilters.json(
961 b'data': templatefilters.json(
962 {
962 {
963 b'user': ctx.user(),
963 b'user': ctx.user(),
964 b'date': b'%d %d' % ctx.date(),
964 b'date': b'%d %d' % ctx.date(),
965 b'branch': ctx.branch(),
965 b'branch': ctx.branch(),
966 b'node': ctx.hex(),
966 b'node': ctx.hex(),
967 b'parent': ctx.p1().hex(),
967 b'parent': ctx.p1().hex(),
968 }
968 }
969 ),
969 ),
970 }
970 }
971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972
972
973 params = {
973 params = {
974 b'diff_id': diffid,
974 b'diff_id': diffid,
975 b'name': b'local:commits',
975 b'name': b'local:commits',
976 b'data': templatefilters.json(
976 b'data': templatefilters.json(
977 {
977 {
978 ctx.hex(): {
978 ctx.hex(): {
979 b'author': stringutil.person(ctx.user()),
979 b'author': stringutil.person(ctx.user()),
980 b'authorEmail': stringutil.email(ctx.user()),
980 b'authorEmail': stringutil.email(ctx.user()),
981 b'time': int(ctx.date()[0]),
981 b'time': int(ctx.date()[0]),
982 b'commit': ctx.hex(),
982 b'commit': ctx.hex(),
983 b'parents': [ctx.p1().hex()],
983 b'parents': [ctx.p1().hex()],
984 b'branch': ctx.branch(),
984 b'branch': ctx.branch(),
985 },
985 },
986 }
986 }
987 ),
987 ),
988 }
988 }
989 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
989 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
990
990
991
991
992 def createdifferentialrevision(
992 def createdifferentialrevision(
993 ctx,
993 ctx,
994 revid=None,
994 revid=None,
995 parentrevphid=None,
995 parentrevphid=None,
996 oldnode=None,
996 oldnode=None,
997 olddiff=None,
997 olddiff=None,
998 actions=None,
998 actions=None,
999 comment=None,
999 comment=None,
1000 ):
1000 ):
1001 """create or update a Differential Revision
1001 """create or update a Differential Revision
1002
1002
1003 If revid is None, create a new Differential Revision, otherwise update
1003 If revid is None, create a new Differential Revision, otherwise update
1004 revid. If parentrevphid is not None, set it as a dependency.
1004 revid. If parentrevphid is not None, set it as a dependency.
1005
1005
1006 If oldnode is not None, check if the patch content (without commit message
1006 If oldnode is not None, check if the patch content (without commit message
1007 and metadata) has changed before creating another diff.
1007 and metadata) has changed before creating another diff.
1008
1008
1009 If actions is not None, they will be appended to the transaction.
1009 If actions is not None, they will be appended to the transaction.
1010 """
1010 """
1011 repo = ctx.repo()
1011 repo = ctx.repo()
1012 if oldnode:
1012 if oldnode:
1013 diffopts = mdiff.diffopts(git=True, context=32767)
1013 diffopts = mdiff.diffopts(git=True, context=32767)
1014 oldctx = repo.unfiltered()[oldnode]
1014 oldctx = repo.unfiltered()[oldnode]
1015 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1015 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1016 else:
1016 else:
1017 neednewdiff = True
1017 neednewdiff = True
1018
1018
1019 transactions = []
1019 transactions = []
1020 if neednewdiff:
1020 if neednewdiff:
1021 diff = creatediff(ctx)
1021 diff = creatediff(ctx)
1022 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1022 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1023 if comment:
1023 if comment:
1024 transactions.append({b'type': b'comment', b'value': comment})
1024 transactions.append({b'type': b'comment', b'value': comment})
1025 else:
1025 else:
1026 # Even if we don't need to upload a new diff because the patch content
1026 # Even if we don't need to upload a new diff because the patch content
1027 # does not change. We might still need to update its metadata so
1027 # does not change. We might still need to update its metadata so
1028 # pushers could know the correct node metadata.
1028 # pushers could know the correct node metadata.
1029 assert olddiff
1029 assert olddiff
1030 diff = olddiff
1030 diff = olddiff
1031 writediffproperties(ctx, diff)
1031 writediffproperties(ctx, diff)
1032
1032
1033 # Set the parent Revision every time, so commit re-ordering is picked-up
1033 # Set the parent Revision every time, so commit re-ordering is picked-up
1034 if parentrevphid:
1034 if parentrevphid:
1035 transactions.append(
1035 transactions.append(
1036 {b'type': b'parents.set', b'value': [parentrevphid]}
1036 {b'type': b'parents.set', b'value': [parentrevphid]}
1037 )
1037 )
1038
1038
1039 if actions:
1039 if actions:
1040 transactions += actions
1040 transactions += actions
1041
1041
1042 # Parse commit message and update related fields.
1042 # Parse commit message and update related fields.
1043 desc = ctx.description()
1043 desc = ctx.description()
1044 info = callconduit(
1044 info = callconduit(
1045 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1045 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1046 )
1046 )
1047 for k, v in info[b'fields'].items():
1047 for k, v in info[b'fields'].items():
1048 if k in [b'title', b'summary', b'testPlan']:
1048 if k in [b'title', b'summary', b'testPlan']:
1049 transactions.append({b'type': k, b'value': v})
1049 transactions.append({b'type': k, b'value': v})
1050
1050
1051 params = {b'transactions': transactions}
1051 params = {b'transactions': transactions}
1052 if revid is not None:
1052 if revid is not None:
1053 # Update an existing Differential Revision
1053 # Update an existing Differential Revision
1054 params[b'objectIdentifier'] = revid
1054 params[b'objectIdentifier'] = revid
1055
1055
1056 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1056 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1057 if not revision:
1057 if not revision:
1058 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1058 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1059
1059
1060 return revision, diff
1060 return revision, diff
1061
1061
1062
1062
1063 def userphids(ui, names):
1063 def userphids(ui, names):
1064 """convert user names to PHIDs"""
1064 """convert user names to PHIDs"""
1065 names = [name.lower() for name in names]
1065 names = [name.lower() for name in names]
1066 query = {b'constraints': {b'usernames': names}}
1066 query = {b'constraints': {b'usernames': names}}
1067 result = callconduit(ui, b'user.search', query)
1067 result = callconduit(ui, b'user.search', query)
1068 # username not found is not an error of the API. So check if we have missed
1068 # username not found is not an error of the API. So check if we have missed
1069 # some names here.
1069 # some names here.
1070 data = result[b'data']
1070 data = result[b'data']
1071 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1071 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1072 unresolved = set(names) - resolved
1072 unresolved = set(names) - resolved
1073 if unresolved:
1073 if unresolved:
1074 raise error.Abort(
1074 raise error.Abort(
1075 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1075 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1076 )
1076 )
1077 return [entry[b'phid'] for entry in data]
1077 return [entry[b'phid'] for entry in data]
1078
1078
1079
1079
1080 @vcrcommand(
1080 @vcrcommand(
1081 b'phabsend',
1081 b'phabsend',
1082 [
1082 [
1083 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1083 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1084 (b'', b'amend', True, _(b'update commit messages')),
1084 (b'', b'amend', True, _(b'update commit messages')),
1085 (b'', b'reviewer', [], _(b'specify reviewers')),
1085 (b'', b'reviewer', [], _(b'specify reviewers')),
1086 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1086 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1087 (
1087 (
1088 b'm',
1088 b'm',
1089 b'comment',
1089 b'comment',
1090 b'',
1090 b'',
1091 _(b'add a comment to Revisions with new/updated Diffs'),
1091 _(b'add a comment to Revisions with new/updated Diffs'),
1092 ),
1092 ),
1093 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1093 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1094 ],
1094 ],
1095 _(b'REV [OPTIONS]'),
1095 _(b'REV [OPTIONS]'),
1096 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1096 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1097 )
1097 )
1098 def phabsend(ui, repo, *revs, **opts):
1098 def phabsend(ui, repo, *revs, **opts):
1099 """upload changesets to Phabricator
1099 """upload changesets to Phabricator
1100
1100
1101 If there are multiple revisions specified, they will be send as a stack
1101 If there are multiple revisions specified, they will be send as a stack
1102 with a linear dependencies relationship using the order specified by the
1102 with a linear dependencies relationship using the order specified by the
1103 revset.
1103 revset.
1104
1104
1105 For the first time uploading changesets, local tags will be created to
1105 For the first time uploading changesets, local tags will be created to
1106 maintain the association. After the first time, phabsend will check
1106 maintain the association. After the first time, phabsend will check
1107 obsstore and tags information so it can figure out whether to update an
1107 obsstore and tags information so it can figure out whether to update an
1108 existing Differential Revision, or create a new one.
1108 existing Differential Revision, or create a new one.
1109
1109
1110 If --amend is set, update commit messages so they have the
1110 If --amend is set, update commit messages so they have the
1111 ``Differential Revision`` URL, remove related tags. This is similar to what
1111 ``Differential Revision`` URL, remove related tags. This is similar to what
1112 arcanist will do, and is more desired in author-push workflows. Otherwise,
1112 arcanist will do, and is more desired in author-push workflows. Otherwise,
1113 use local tags to record the ``Differential Revision`` association.
1113 use local tags to record the ``Differential Revision`` association.
1114
1114
1115 The --confirm option lets you confirm changesets before sending them. You
1115 The --confirm option lets you confirm changesets before sending them. You
1116 can also add following to your configuration file to make it default
1116 can also add following to your configuration file to make it default
1117 behaviour::
1117 behaviour::
1118
1118
1119 [phabsend]
1119 [phabsend]
1120 confirm = true
1120 confirm = true
1121
1121
1122 phabsend will check obsstore and the above association to decide whether to
1122 phabsend will check obsstore and the above association to decide whether to
1123 update an existing Differential Revision, or create a new one.
1123 update an existing Differential Revision, or create a new one.
1124 """
1124 """
1125 opts = pycompat.byteskwargs(opts)
1125 opts = pycompat.byteskwargs(opts)
1126 revs = list(revs) + opts.get(b'rev', [])
1126 revs = list(revs) + opts.get(b'rev', [])
1127 revs = scmutil.revrange(repo, revs)
1127 revs = scmutil.revrange(repo, revs)
1128 revs.sort() # ascending order to preserve topological parent/child in phab
1128 revs.sort() # ascending order to preserve topological parent/child in phab
1129
1129
1130 if not revs:
1130 if not revs:
1131 raise error.Abort(_(b'phabsend requires at least one changeset'))
1131 raise error.Abort(_(b'phabsend requires at least one changeset'))
1132 if opts.get(b'amend'):
1132 if opts.get(b'amend'):
1133 cmdutil.checkunfinished(repo)
1133 cmdutil.checkunfinished(repo)
1134
1134
1135 # {newnode: (oldnode, olddiff, olddrev}
1135 # {newnode: (oldnode, olddiff, olddrev}
1136 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1136 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1137
1137
1138 confirm = ui.configbool(b'phabsend', b'confirm')
1138 confirm = ui.configbool(b'phabsend', b'confirm')
1139 confirm |= bool(opts.get(b'confirm'))
1139 confirm |= bool(opts.get(b'confirm'))
1140 if confirm:
1140 if confirm:
1141 confirmed = _confirmbeforesend(repo, revs, oldmap)
1141 confirmed = _confirmbeforesend(repo, revs, oldmap)
1142 if not confirmed:
1142 if not confirmed:
1143 raise error.Abort(_(b'phabsend cancelled'))
1143 raise error.Abort(_(b'phabsend cancelled'))
1144
1144
1145 actions = []
1145 actions = []
1146 reviewers = opts.get(b'reviewer', [])
1146 reviewers = opts.get(b'reviewer', [])
1147 blockers = opts.get(b'blocker', [])
1147 blockers = opts.get(b'blocker', [])
1148 phids = []
1148 phids = []
1149 if reviewers:
1149 if reviewers:
1150 phids.extend(userphids(repo.ui, reviewers))
1150 phids.extend(userphids(repo.ui, reviewers))
1151 if blockers:
1151 if blockers:
1152 phids.extend(
1152 phids.extend(
1153 map(
1153 map(
1154 lambda phid: b'blocking(%s)' % phid,
1154 lambda phid: b'blocking(%s)' % phid,
1155 userphids(repo.ui, blockers),
1155 userphids(repo.ui, blockers),
1156 )
1156 )
1157 )
1157 )
1158 if phids:
1158 if phids:
1159 actions.append({b'type': b'reviewers.add', b'value': phids})
1159 actions.append({b'type': b'reviewers.add', b'value': phids})
1160
1160
1161 drevids = [] # [int]
1161 drevids = [] # [int]
1162 diffmap = {} # {newnode: diff}
1162 diffmap = {} # {newnode: diff}
1163
1163
1164 # Send patches one by one so we know their Differential Revision PHIDs and
1164 # Send patches one by one so we know their Differential Revision PHIDs and
1165 # can provide dependency relationship
1165 # can provide dependency relationship
1166 lastrevphid = None
1166 lastrevphid = None
1167 for rev in revs:
1167 for rev in revs:
1168 ui.debug(b'sending rev %d\n' % rev)
1168 ui.debug(b'sending rev %d\n' % rev)
1169 ctx = repo[rev]
1169 ctx = repo[rev]
1170
1170
1171 # Get Differential Revision ID
1171 # Get Differential Revision ID
1172 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1172 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1173 if oldnode != ctx.node() or opts.get(b'amend'):
1173 if oldnode != ctx.node() or opts.get(b'amend'):
1174 # Create or update Differential Revision
1174 # Create or update Differential Revision
1175 revision, diff = createdifferentialrevision(
1175 revision, diff = createdifferentialrevision(
1176 ctx,
1176 ctx,
1177 revid,
1177 revid,
1178 lastrevphid,
1178 lastrevphid,
1179 oldnode,
1179 oldnode,
1180 olddiff,
1180 olddiff,
1181 actions,
1181 actions,
1182 opts.get(b'comment'),
1182 opts.get(b'comment'),
1183 )
1183 )
1184 diffmap[ctx.node()] = diff
1184 diffmap[ctx.node()] = diff
1185 newrevid = int(revision[b'object'][b'id'])
1185 newrevid = int(revision[b'object'][b'id'])
1186 newrevphid = revision[b'object'][b'phid']
1186 newrevphid = revision[b'object'][b'phid']
1187 if revid:
1187 if revid:
1188 action = b'updated'
1188 action = b'updated'
1189 else:
1189 else:
1190 action = b'created'
1190 action = b'created'
1191
1191
1192 # Create a local tag to note the association, if commit message
1192 # Create a local tag to note the association, if commit message
1193 # does not have it already
1193 # does not have it already
1194 m = _differentialrevisiondescre.search(ctx.description())
1194 m = _differentialrevisiondescre.search(ctx.description())
1195 if not m or int(m.group('id')) != newrevid:
1195 if not m or int(m.group('id')) != newrevid:
1196 tagname = b'D%d' % newrevid
1196 tagname = b'D%d' % newrevid
1197 tags.tag(
1197 tags.tag(
1198 repo,
1198 repo,
1199 tagname,
1199 tagname,
1200 ctx.node(),
1200 ctx.node(),
1201 message=None,
1201 message=None,
1202 user=None,
1202 user=None,
1203 date=None,
1203 date=None,
1204 local=True,
1204 local=True,
1205 )
1205 )
1206 else:
1206 else:
1207 # Nothing changed. But still set "newrevphid" so the next revision
1207 # Nothing changed. But still set "newrevphid" so the next revision
1208 # could depend on this one and "newrevid" for the summary line.
1208 # could depend on this one and "newrevid" for the summary line.
1209 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1209 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1210 newrevid = revid
1210 newrevid = revid
1211 action = b'skipped'
1211 action = b'skipped'
1212
1212
1213 actiondesc = ui.label(
1213 actiondesc = ui.label(
1214 {
1214 {
1215 b'created': _(b'created'),
1215 b'created': _(b'created'),
1216 b'skipped': _(b'skipped'),
1216 b'skipped': _(b'skipped'),
1217 b'updated': _(b'updated'),
1217 b'updated': _(b'updated'),
1218 }[action],
1218 }[action],
1219 b'phabricator.action.%s' % action,
1219 b'phabricator.action.%s' % action,
1220 )
1220 )
1221 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1221 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1222 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1222 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1223 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1223 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1224 ui.write(
1224 ui.write(
1225 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1225 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1226 )
1226 )
1227 drevids.append(newrevid)
1227 drevids.append(newrevid)
1228 lastrevphid = newrevphid
1228 lastrevphid = newrevphid
1229
1229
1230 # Update commit messages and remove tags
1230 # Update commit messages and remove tags
1231 if opts.get(b'amend'):
1231 if opts.get(b'amend'):
1232 unfi = repo.unfiltered()
1232 unfi = repo.unfiltered()
1233 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1233 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1234 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1234 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1235 wnode = unfi[b'.'].node()
1235 wnode = unfi[b'.'].node()
1236 mapping = {} # {oldnode: [newnode]}
1236 mapping = {} # {oldnode: [newnode]}
1237 for i, rev in enumerate(revs):
1237 for i, rev in enumerate(revs):
1238 old = unfi[rev]
1238 old = unfi[rev]
1239 drevid = drevids[i]
1239 drevid = drevids[i]
1240 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1240 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1241 newdesc = getdescfromdrev(drev)
1241 newdesc = getdescfromdrev(drev)
1242 # Make sure commit message contain "Differential Revision"
1242 # Make sure commit message contain "Differential Revision"
1243 if old.description() != newdesc:
1243 if old.description() != newdesc:
1244 if old.phase() == phases.public:
1244 if old.phase() == phases.public:
1245 ui.warn(
1245 ui.warn(
1246 _(b"warning: not updating public commit %s\n")
1246 _(b"warning: not updating public commit %s\n")
1247 % scmutil.formatchangeid(old)
1247 % scmutil.formatchangeid(old)
1248 )
1248 )
1249 continue
1249 continue
1250 parents = [
1250 parents = [
1251 mapping.get(old.p1().node(), (old.p1(),))[0],
1251 mapping.get(old.p1().node(), (old.p1(),))[0],
1252 mapping.get(old.p2().node(), (old.p2(),))[0],
1252 mapping.get(old.p2().node(), (old.p2(),))[0],
1253 ]
1253 ]
1254 new = context.metadataonlyctx(
1254 new = context.metadataonlyctx(
1255 repo,
1255 repo,
1256 old,
1256 old,
1257 parents=parents,
1257 parents=parents,
1258 text=newdesc,
1258 text=newdesc,
1259 user=old.user(),
1259 user=old.user(),
1260 date=old.date(),
1260 date=old.date(),
1261 extra=old.extra(),
1261 extra=old.extra(),
1262 )
1262 )
1263
1263
1264 newnode = new.commit()
1264 newnode = new.commit()
1265
1265
1266 mapping[old.node()] = [newnode]
1266 mapping[old.node()] = [newnode]
1267 # Update diff property
1267 # Update diff property
1268 # If it fails just warn and keep going, otherwise the DREV
1268 # If it fails just warn and keep going, otherwise the DREV
1269 # associations will be lost
1269 # associations will be lost
1270 try:
1270 try:
1271 writediffproperties(unfi[newnode], diffmap[old.node()])
1271 writediffproperties(unfi[newnode], diffmap[old.node()])
1272 except util.urlerr.urlerror:
1272 except util.urlerr.urlerror:
1273 ui.warnnoi18n(
1273 ui.warnnoi18n(
1274 b'Failed to update metadata for D%d\n' % drevid
1274 b'Failed to update metadata for D%d\n' % drevid
1275 )
1275 )
1276 # Remove local tags since it's no longer necessary
1276 # Remove local tags since it's no longer necessary
1277 tagname = b'D%d' % drevid
1277 tagname = b'D%d' % drevid
1278 if tagname in repo.tags():
1278 if tagname in repo.tags():
1279 tags.tag(
1279 tags.tag(
1280 repo,
1280 repo,
1281 tagname,
1281 tagname,
1282 nullid,
1282 nullid,
1283 message=None,
1283 message=None,
1284 user=None,
1284 user=None,
1285 date=None,
1285 date=None,
1286 local=True,
1286 local=True,
1287 )
1287 )
1288 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1288 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1289 if wnode in mapping:
1289 if wnode in mapping:
1290 unfi.setparents(mapping[wnode][0])
1290 unfi.setparents(mapping[wnode][0])
1291
1291
1292
1292
1293 # Map from "hg:meta" keys to header understood by "hg import". The order is
1293 # Map from "hg:meta" keys to header understood by "hg import". The order is
1294 # consistent with "hg export" output.
1294 # consistent with "hg export" output.
1295 _metanamemap = util.sortdict(
1295 _metanamemap = util.sortdict(
1296 [
1296 [
1297 (b'user', b'User'),
1297 (b'user', b'User'),
1298 (b'date', b'Date'),
1298 (b'date', b'Date'),
1299 (b'branch', b'Branch'),
1299 (b'branch', b'Branch'),
1300 (b'node', b'Node ID'),
1300 (b'node', b'Node ID'),
1301 (b'parent', b'Parent '),
1301 (b'parent', b'Parent '),
1302 ]
1302 ]
1303 )
1303 )
1304
1304
1305
1305
1306 def _confirmbeforesend(repo, revs, oldmap):
1306 def _confirmbeforesend(repo, revs, oldmap):
1307 url, token = readurltoken(repo.ui)
1307 url, token = readurltoken(repo.ui)
1308 ui = repo.ui
1308 ui = repo.ui
1309 for rev in revs:
1309 for rev in revs:
1310 ctx = repo[rev]
1310 ctx = repo[rev]
1311 desc = ctx.description().splitlines()[0]
1311 desc = ctx.description().splitlines()[0]
1312 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1312 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1313 if drevid:
1313 if drevid:
1314 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1314 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1315 else:
1315 else:
1316 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1316 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1317
1317
1318 ui.write(
1318 ui.write(
1319 _(b'%s - %s: %s\n')
1319 _(b'%s - %s: %s\n')
1320 % (
1320 % (
1321 drevdesc,
1321 drevdesc,
1322 ui.label(bytes(ctx), b'phabricator.node'),
1322 ui.label(bytes(ctx), b'phabricator.node'),
1323 ui.label(desc, b'phabricator.desc'),
1323 ui.label(desc, b'phabricator.desc'),
1324 )
1324 )
1325 )
1325 )
1326
1326
1327 if ui.promptchoice(
1327 if ui.promptchoice(
1328 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1328 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1329 ):
1329 ):
1330 return False
1330 return False
1331
1331
1332 return True
1332 return True
1333
1333
1334
1334
1335 _knownstatusnames = {
1335 _knownstatusnames = {
1336 b'accepted',
1336 b'accepted',
1337 b'needsreview',
1337 b'needsreview',
1338 b'needsrevision',
1338 b'needsrevision',
1339 b'closed',
1339 b'closed',
1340 b'abandoned',
1340 b'abandoned',
1341 b'changesplanned',
1341 b'changesplanned',
1342 }
1342 }
1343
1343
1344
1344
1345 def _getstatusname(drev):
1345 def _getstatusname(drev):
1346 """get normalized status name from a Differential Revision"""
1346 """get normalized status name from a Differential Revision"""
1347 return drev[b'statusName'].replace(b' ', b'').lower()
1347 return drev[b'statusName'].replace(b' ', b'').lower()
1348
1348
1349
1349
1350 # Small language to specify differential revisions. Support symbols: (), :X,
1350 # Small language to specify differential revisions. Support symbols: (), :X,
1351 # +, and -.
1351 # +, and -.
1352
1352
1353 _elements = {
1353 _elements = {
1354 # token-type: binding-strength, primary, prefix, infix, suffix
1354 # token-type: binding-strength, primary, prefix, infix, suffix
1355 b'(': (12, None, (b'group', 1, b')'), None, None),
1355 b'(': (12, None, (b'group', 1, b')'), None, None),
1356 b':': (8, None, (b'ancestors', 8), None, None),
1356 b':': (8, None, (b'ancestors', 8), None, None),
1357 b'&': (5, None, None, (b'and_', 5), None),
1357 b'&': (5, None, None, (b'and_', 5), None),
1358 b'+': (4, None, None, (b'add', 4), None),
1358 b'+': (4, None, None, (b'add', 4), None),
1359 b'-': (4, None, None, (b'sub', 4), None),
1359 b'-': (4, None, None, (b'sub', 4), None),
1360 b')': (0, None, None, None, None),
1360 b')': (0, None, None, None, None),
1361 b'symbol': (0, b'symbol', None, None, None),
1361 b'symbol': (0, b'symbol', None, None, None),
1362 b'end': (0, None, None, None, None),
1362 b'end': (0, None, None, None, None),
1363 }
1363 }
1364
1364
1365
1365
1366 def _tokenize(text):
1366 def _tokenize(text):
1367 view = memoryview(text) # zero-copy slice
1367 view = memoryview(text) # zero-copy slice
1368 special = b'():+-& '
1368 special = b'():+-& '
1369 pos = 0
1369 pos = 0
1370 length = len(text)
1370 length = len(text)
1371 while pos < length:
1371 while pos < length:
1372 symbol = b''.join(
1372 symbol = b''.join(
1373 itertools.takewhile(
1373 itertools.takewhile(
1374 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1374 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1375 )
1375 )
1376 )
1376 )
1377 if symbol:
1377 if symbol:
1378 yield (b'symbol', symbol, pos)
1378 yield (b'symbol', symbol, pos)
1379 pos += len(symbol)
1379 pos += len(symbol)
1380 else: # special char, ignore space
1380 else: # special char, ignore space
1381 if text[pos : pos + 1] != b' ':
1381 if text[pos : pos + 1] != b' ':
1382 yield (text[pos : pos + 1], None, pos)
1382 yield (text[pos : pos + 1], None, pos)
1383 pos += 1
1383 pos += 1
1384 yield (b'end', None, pos)
1384 yield (b'end', None, pos)
1385
1385
1386
1386
1387 def _parse(text):
1387 def _parse(text):
1388 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1388 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1389 if pos != len(text):
1389 if pos != len(text):
1390 raise error.ParseError(b'invalid token', pos)
1390 raise error.ParseError(b'invalid token', pos)
1391 return tree
1391 return tree
1392
1392
1393
1393
1394 def _parsedrev(symbol):
1394 def _parsedrev(symbol):
1395 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1395 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1396 if symbol.startswith(b'D') and symbol[1:].isdigit():
1396 if symbol.startswith(b'D') and symbol[1:].isdigit():
1397 return int(symbol[1:])
1397 return int(symbol[1:])
1398 if symbol.isdigit():
1398 if symbol.isdigit():
1399 return int(symbol)
1399 return int(symbol)
1400
1400
1401
1401
1402 def _prefetchdrevs(tree):
1402 def _prefetchdrevs(tree):
1403 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1403 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1404 drevs = set()
1404 drevs = set()
1405 ancestordrevs = set()
1405 ancestordrevs = set()
1406 op = tree[0]
1406 op = tree[0]
1407 if op == b'symbol':
1407 if op == b'symbol':
1408 r = _parsedrev(tree[1])
1408 r = _parsedrev(tree[1])
1409 if r:
1409 if r:
1410 drevs.add(r)
1410 drevs.add(r)
1411 elif op == b'ancestors':
1411 elif op == b'ancestors':
1412 r, a = _prefetchdrevs(tree[1])
1412 r, a = _prefetchdrevs(tree[1])
1413 drevs.update(r)
1413 drevs.update(r)
1414 ancestordrevs.update(r)
1414 ancestordrevs.update(r)
1415 ancestordrevs.update(a)
1415 ancestordrevs.update(a)
1416 else:
1416 else:
1417 for t in tree[1:]:
1417 for t in tree[1:]:
1418 r, a = _prefetchdrevs(t)
1418 r, a = _prefetchdrevs(t)
1419 drevs.update(r)
1419 drevs.update(r)
1420 ancestordrevs.update(a)
1420 ancestordrevs.update(a)
1421 return drevs, ancestordrevs
1421 return drevs, ancestordrevs
1422
1422
1423
1423
1424 def querydrev(ui, spec):
1424 def querydrev(ui, spec):
1425 """return a list of "Differential Revision" dicts
1425 """return a list of "Differential Revision" dicts
1426
1426
1427 spec is a string using a simple query language, see docstring in phabread
1427 spec is a string using a simple query language, see docstring in phabread
1428 for details.
1428 for details.
1429
1429
1430 A "Differential Revision dict" looks like:
1430 A "Differential Revision dict" looks like:
1431
1431
1432 {
1432 {
1433 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1433 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1434 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1434 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1435 "auxiliary": {
1435 "auxiliary": {
1436 "phabricator:depends-on": [
1436 "phabricator:depends-on": [
1437 "PHID-DREV-gbapp366kutjebt7agcd"
1437 "PHID-DREV-gbapp366kutjebt7agcd"
1438 ]
1438 ]
1439 "phabricator:projects": [],
1439 "phabricator:projects": [],
1440 },
1440 },
1441 "branch": "default",
1441 "branch": "default",
1442 "ccs": [],
1442 "ccs": [],
1443 "commits": [],
1443 "commits": [],
1444 "dateCreated": "1499181406",
1444 "dateCreated": "1499181406",
1445 "dateModified": "1499182103",
1445 "dateModified": "1499182103",
1446 "diffs": [
1446 "diffs": [
1447 "3",
1447 "3",
1448 "4",
1448 "4",
1449 ],
1449 ],
1450 "hashes": [],
1450 "hashes": [],
1451 "id": "2",
1451 "id": "2",
1452 "lineCount": "2",
1452 "lineCount": "2",
1453 "phid": "PHID-DREV-672qvysjcczopag46qty",
1453 "phid": "PHID-DREV-672qvysjcczopag46qty",
1454 "properties": {},
1454 "properties": {},
1455 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1455 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1456 "reviewers": [],
1456 "reviewers": [],
1457 "sourcePath": null
1457 "sourcePath": null
1458 "status": "0",
1458 "status": "0",
1459 "statusName": "Needs Review",
1459 "statusName": "Needs Review",
1460 "summary": "",
1460 "summary": "",
1461 "testPlan": "",
1461 "testPlan": "",
1462 "title": "example",
1462 "title": "example",
1463 "uri": "https://phab.example.com/D2",
1463 "uri": "https://phab.example.com/D2",
1464 }
1464 }
1465 """
1465 """
1466 # TODO: replace differential.query and differential.querydiffs with
1466 # TODO: replace differential.query and differential.querydiffs with
1467 # differential.diff.search because the former (and their output) are
1467 # differential.diff.search because the former (and their output) are
1468 # frozen, and planned to be deprecated and removed.
1468 # frozen, and planned to be deprecated and removed.
1469
1469
1470 def fetch(params):
1470 def fetch(params):
1471 """params -> single drev or None"""
1471 """params -> single drev or None"""
1472 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1472 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1473 if key in prefetched:
1473 if key in prefetched:
1474 return prefetched[key]
1474 return prefetched[key]
1475 drevs = callconduit(ui, b'differential.query', params)
1475 drevs = callconduit(ui, b'differential.query', params)
1476 # Fill prefetched with the result
1476 # Fill prefetched with the result
1477 for drev in drevs:
1477 for drev in drevs:
1478 prefetched[drev[b'phid']] = drev
1478 prefetched[drev[b'phid']] = drev
1479 prefetched[int(drev[b'id'])] = drev
1479 prefetched[int(drev[b'id'])] = drev
1480 if key not in prefetched:
1480 if key not in prefetched:
1481 raise error.Abort(
1481 raise error.Abort(
1482 _(b'cannot get Differential Revision %r') % params
1482 _(b'cannot get Differential Revision %r') % params
1483 )
1483 )
1484 return prefetched[key]
1484 return prefetched[key]
1485
1485
1486 def getstack(topdrevids):
1486 def getstack(topdrevids):
1487 """given a top, get a stack from the bottom, [id] -> [id]"""
1487 """given a top, get a stack from the bottom, [id] -> [id]"""
1488 visited = set()
1488 visited = set()
1489 result = []
1489 result = []
1490 queue = [{b'ids': [i]} for i in topdrevids]
1490 queue = [{b'ids': [i]} for i in topdrevids]
1491 while queue:
1491 while queue:
1492 params = queue.pop()
1492 params = queue.pop()
1493 drev = fetch(params)
1493 drev = fetch(params)
1494 if drev[b'id'] in visited:
1494 if drev[b'id'] in visited:
1495 continue
1495 continue
1496 visited.add(drev[b'id'])
1496 visited.add(drev[b'id'])
1497 result.append(int(drev[b'id']))
1497 result.append(int(drev[b'id']))
1498 auxiliary = drev.get(b'auxiliary', {})
1498 auxiliary = drev.get(b'auxiliary', {})
1499 depends = auxiliary.get(b'phabricator:depends-on', [])
1499 depends = auxiliary.get(b'phabricator:depends-on', [])
1500 for phid in depends:
1500 for phid in depends:
1501 queue.append({b'phids': [phid]})
1501 queue.append({b'phids': [phid]})
1502 result.reverse()
1502 result.reverse()
1503 return smartset.baseset(result)
1503 return smartset.baseset(result)
1504
1504
1505 # Initialize prefetch cache
1505 # Initialize prefetch cache
1506 prefetched = {} # {id or phid: drev}
1506 prefetched = {} # {id or phid: drev}
1507
1507
1508 tree = _parse(spec)
1508 tree = _parse(spec)
1509 drevs, ancestordrevs = _prefetchdrevs(tree)
1509 drevs, ancestordrevs = _prefetchdrevs(tree)
1510
1510
1511 # developer config: phabricator.batchsize
1511 # developer config: phabricator.batchsize
1512 batchsize = ui.configint(b'phabricator', b'batchsize')
1512 batchsize = ui.configint(b'phabricator', b'batchsize')
1513
1513
1514 # Prefetch Differential Revisions in batch
1514 # Prefetch Differential Revisions in batch
1515 tofetch = set(drevs)
1515 tofetch = set(drevs)
1516 for r in ancestordrevs:
1516 for r in ancestordrevs:
1517 tofetch.update(range(max(1, r - batchsize), r + 1))
1517 tofetch.update(range(max(1, r - batchsize), r + 1))
1518 if drevs:
1518 if drevs:
1519 fetch({b'ids': list(tofetch)})
1519 fetch({b'ids': list(tofetch)})
1520 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1520 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1521
1521
1522 # Walk through the tree, return smartsets
1522 # Walk through the tree, return smartsets
1523 def walk(tree):
1523 def walk(tree):
1524 op = tree[0]
1524 op = tree[0]
1525 if op == b'symbol':
1525 if op == b'symbol':
1526 drev = _parsedrev(tree[1])
1526 drev = _parsedrev(tree[1])
1527 if drev:
1527 if drev:
1528 return smartset.baseset([drev])
1528 return smartset.baseset([drev])
1529 elif tree[1] in _knownstatusnames:
1529 elif tree[1] in _knownstatusnames:
1530 drevs = [
1530 drevs = [
1531 r
1531 r
1532 for r in validids
1532 for r in validids
1533 if _getstatusname(prefetched[r]) == tree[1]
1533 if _getstatusname(prefetched[r]) == tree[1]
1534 ]
1534 ]
1535 return smartset.baseset(drevs)
1535 return smartset.baseset(drevs)
1536 else:
1536 else:
1537 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1537 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1538 elif op in {b'and_', b'add', b'sub'}:
1538 elif op in {b'and_', b'add', b'sub'}:
1539 assert len(tree) == 3
1539 assert len(tree) == 3
1540 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1540 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1541 elif op == b'group':
1541 elif op == b'group':
1542 return walk(tree[1])
1542 return walk(tree[1])
1543 elif op == b'ancestors':
1543 elif op == b'ancestors':
1544 return getstack(walk(tree[1]))
1544 return getstack(walk(tree[1]))
1545 else:
1545 else:
1546 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1546 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1547
1547
1548 return [prefetched[r] for r in walk(tree)]
1548 return [prefetched[r] for r in walk(tree)]
1549
1549
1550
1550
1551 def getdescfromdrev(drev):
1551 def getdescfromdrev(drev):
1552 """get description (commit message) from "Differential Revision"
1552 """get description (commit message) from "Differential Revision"
1553
1553
1554 This is similar to differential.getcommitmessage API. But we only care
1554 This is similar to differential.getcommitmessage API. But we only care
1555 about limited fields: title, summary, test plan, and URL.
1555 about limited fields: title, summary, test plan, and URL.
1556 """
1556 """
1557 title = drev[b'title']
1557 title = drev[b'title']
1558 summary = drev[b'summary'].rstrip()
1558 summary = drev[b'summary'].rstrip()
1559 testplan = drev[b'testPlan'].rstrip()
1559 testplan = drev[b'testPlan'].rstrip()
1560 if testplan:
1560 if testplan:
1561 testplan = b'Test Plan:\n%s' % testplan
1561 testplan = b'Test Plan:\n%s' % testplan
1562 uri = b'Differential Revision: %s' % drev[b'uri']
1562 uri = b'Differential Revision: %s' % drev[b'uri']
1563 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1563 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1564
1564
1565
1565
1566 def getdiffmeta(diff):
1566 def getdiffmeta(diff):
1567 """get commit metadata (date, node, user, p1) from a diff object
1567 """get commit metadata (date, node, user, p1) from a diff object
1568
1568
1569 The metadata could be "hg:meta", sent by phabsend, like:
1569 The metadata could be "hg:meta", sent by phabsend, like:
1570
1570
1571 "properties": {
1571 "properties": {
1572 "hg:meta": {
1572 "hg:meta": {
1573 "branch": "default",
1573 "branch": "default",
1574 "date": "1499571514 25200",
1574 "date": "1499571514 25200",
1575 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1575 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 "user": "Foo Bar <foo@example.com>",
1576 "user": "Foo Bar <foo@example.com>",
1577 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1577 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1578 }
1578 }
1579 }
1579 }
1580
1580
1581 Or converted from "local:commits", sent by "arc", like:
1581 Or converted from "local:commits", sent by "arc", like:
1582
1582
1583 "properties": {
1583 "properties": {
1584 "local:commits": {
1584 "local:commits": {
1585 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1585 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1586 "author": "Foo Bar",
1586 "author": "Foo Bar",
1587 "authorEmail": "foo@example.com"
1587 "authorEmail": "foo@example.com"
1588 "branch": "default",
1588 "branch": "default",
1589 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1589 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1590 "local": "1000",
1590 "local": "1000",
1591 "message": "...",
1591 "message": "...",
1592 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1592 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1593 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1593 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1594 "summary": "...",
1594 "summary": "...",
1595 "tag": "",
1595 "tag": "",
1596 "time": 1499546314,
1596 "time": 1499546314,
1597 }
1597 }
1598 }
1598 }
1599 }
1599 }
1600
1600
1601 Note: metadata extracted from "local:commits" will lose time zone
1601 Note: metadata extracted from "local:commits" will lose time zone
1602 information.
1602 information.
1603 """
1603 """
1604 props = diff.get(b'properties') or {}
1604 props = diff.get(b'properties') or {}
1605 meta = props.get(b'hg:meta')
1605 meta = props.get(b'hg:meta')
1606 if not meta:
1606 if not meta:
1607 if props.get(b'local:commits'):
1607 if props.get(b'local:commits'):
1608 commit = sorted(props[b'local:commits'].values())[0]
1608 commit = sorted(props[b'local:commits'].values())[0]
1609 meta = {}
1609 meta = {}
1610 if b'author' in commit and b'authorEmail' in commit:
1610 if b'author' in commit and b'authorEmail' in commit:
1611 meta[b'user'] = b'%s <%s>' % (
1611 meta[b'user'] = b'%s <%s>' % (
1612 commit[b'author'],
1612 commit[b'author'],
1613 commit[b'authorEmail'],
1613 commit[b'authorEmail'],
1614 )
1614 )
1615 if b'time' in commit:
1615 if b'time' in commit:
1616 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1616 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1617 if b'branch' in commit:
1617 if b'branch' in commit:
1618 meta[b'branch'] = commit[b'branch']
1618 meta[b'branch'] = commit[b'branch']
1619 node = commit.get(b'commit', commit.get(b'rev'))
1619 node = commit.get(b'commit', commit.get(b'rev'))
1620 if node:
1620 if node:
1621 meta[b'node'] = node
1621 meta[b'node'] = node
1622 if len(commit.get(b'parents', ())) >= 1:
1622 if len(commit.get(b'parents', ())) >= 1:
1623 meta[b'parent'] = commit[b'parents'][0]
1623 meta[b'parent'] = commit[b'parents'][0]
1624 else:
1624 else:
1625 meta = {}
1625 meta = {}
1626 if b'date' not in meta and b'dateCreated' in diff:
1626 if b'date' not in meta and b'dateCreated' in diff:
1627 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1627 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1628 if b'branch' not in meta and diff.get(b'branch'):
1628 if b'branch' not in meta and diff.get(b'branch'):
1629 meta[b'branch'] = diff[b'branch']
1629 meta[b'branch'] = diff[b'branch']
1630 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1630 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1631 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1631 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1632 return meta
1632 return meta
1633
1633
1634
1634
1635 def _getdrevs(ui, stack, *specs):
1635 def _getdrevs(ui, stack, specs):
1636 """convert user supplied DREVSPECs into "Differential Revision" dicts
1636 """convert user supplied DREVSPECs into "Differential Revision" dicts
1637
1637
1638 See ``hg help phabread`` for how to specify each DREVSPEC.
1638 See ``hg help phabread`` for how to specify each DREVSPEC.
1639 """
1639 """
1640 if len(*specs) > 0:
1640 if len(specs) > 0:
1641
1641
1642 def _formatspec(s):
1642 def _formatspec(s):
1643 if stack:
1643 if stack:
1644 s = b':(%s)' % s
1644 s = b':(%s)' % s
1645 return b'(%s)' % s
1645 return b'(%s)' % s
1646
1646
1647 spec = b'+'.join(pycompat.maplist(_formatspec, *specs))
1647 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1648
1648
1649 drevs = querydrev(ui, spec)
1649 drevs = querydrev(ui, spec)
1650 if drevs:
1650 if drevs:
1651 return drevs
1651 return drevs
1652
1652
1653 raise error.Abort(_(b"empty DREVSPEC set"))
1653 raise error.Abort(_(b"empty DREVSPEC set"))
1654
1654
1655
1655
1656 def readpatch(ui, drevs, write):
1656 def readpatch(ui, drevs, write):
1657 """generate plain-text patch readable by 'hg import'
1657 """generate plain-text patch readable by 'hg import'
1658
1658
1659 write takes a list of (DREV, bytes), where DREV is the differential number
1659 write takes a list of (DREV, bytes), where DREV is the differential number
1660 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1660 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1661 to be imported. drevs is what "querydrev" returns, results of
1661 to be imported. drevs is what "querydrev" returns, results of
1662 "differential.query".
1662 "differential.query".
1663 """
1663 """
1664 # Prefetch hg:meta property for all diffs
1664 # Prefetch hg:meta property for all diffs
1665 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1665 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1666 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1666 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1667
1667
1668 patches = []
1668 patches = []
1669
1669
1670 # Generate patch for each drev
1670 # Generate patch for each drev
1671 for drev in drevs:
1671 for drev in drevs:
1672 ui.note(_(b'reading D%s\n') % drev[b'id'])
1672 ui.note(_(b'reading D%s\n') % drev[b'id'])
1673
1673
1674 diffid = max(int(v) for v in drev[b'diffs'])
1674 diffid = max(int(v) for v in drev[b'diffs'])
1675 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1675 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1676 desc = getdescfromdrev(drev)
1676 desc = getdescfromdrev(drev)
1677 header = b'# HG changeset patch\n'
1677 header = b'# HG changeset patch\n'
1678
1678
1679 # Try to preserve metadata from hg:meta property. Write hg patch
1679 # Try to preserve metadata from hg:meta property. Write hg patch
1680 # headers that can be read by the "import" command. See patchheadermap
1680 # headers that can be read by the "import" command. See patchheadermap
1681 # and extract in mercurial/patch.py for supported headers.
1681 # and extract in mercurial/patch.py for supported headers.
1682 meta = getdiffmeta(diffs[b'%d' % diffid])
1682 meta = getdiffmeta(diffs[b'%d' % diffid])
1683 for k in _metanamemap.keys():
1683 for k in _metanamemap.keys():
1684 if k in meta:
1684 if k in meta:
1685 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1685 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1686
1686
1687 content = b'%s%s\n%s' % (header, desc, body)
1687 content = b'%s%s\n%s' % (header, desc, body)
1688 patches.append((drev[b'id'], content))
1688 patches.append((drev[b'id'], content))
1689
1689
1690 # Write patches to the supplied callback
1690 # Write patches to the supplied callback
1691 write(patches)
1691 write(patches)
1692
1692
1693
1693
1694 @vcrcommand(
1694 @vcrcommand(
1695 b'phabread',
1695 b'phabread',
1696 [(b'', b'stack', False, _(b'read dependencies'))],
1696 [(b'', b'stack', False, _(b'read dependencies'))],
1697 _(b'DREVSPEC... [OPTIONS]'),
1697 _(b'DREVSPEC... [OPTIONS]'),
1698 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1698 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1699 optionalrepo=True,
1699 optionalrepo=True,
1700 )
1700 )
1701 def phabread(ui, repo, *specs, **opts):
1701 def phabread(ui, repo, *specs, **opts):
1702 """print patches from Phabricator suitable for importing
1702 """print patches from Phabricator suitable for importing
1703
1703
1704 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1704 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1705 the number ``123``. It could also have common operators like ``+``, ``-``,
1705 the number ``123``. It could also have common operators like ``+``, ``-``,
1706 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1706 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1707 select a stack. If multiple DREVSPEC values are given, the result is the
1707 select a stack. If multiple DREVSPEC values are given, the result is the
1708 union of each individually evaluated value. No attempt is currently made
1708 union of each individually evaluated value. No attempt is currently made
1709 to reorder the values to run from parent to child.
1709 to reorder the values to run from parent to child.
1710
1710
1711 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1711 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1712 could be used to filter patches by status. For performance reason, they
1712 could be used to filter patches by status. For performance reason, they
1713 only represent a subset of non-status selections and cannot be used alone.
1713 only represent a subset of non-status selections and cannot be used alone.
1714
1714
1715 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1715 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1716 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1716 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1717 stack up to D9.
1717 stack up to D9.
1718
1718
1719 If --stack is given, follow dependencies information and read all patches.
1719 If --stack is given, follow dependencies information and read all patches.
1720 It is equivalent to the ``:`` operator.
1720 It is equivalent to the ``:`` operator.
1721 """
1721 """
1722 opts = pycompat.byteskwargs(opts)
1722 opts = pycompat.byteskwargs(opts)
1723 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1723 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1724
1724
1725 def _write(patches):
1725 def _write(patches):
1726 for drev, content in patches:
1726 for drev, content in patches:
1727 ui.write(content)
1727 ui.write(content)
1728
1728
1729 readpatch(ui, drevs, _write)
1729 readpatch(ui, drevs, _write)
1730
1730
1731
1731
1732 @vcrcommand(
1732 @vcrcommand(
1733 b'phabimport',
1733 b'phabimport',
1734 [(b'', b'stack', False, _(b'import dependencies as well'))],
1734 [(b'', b'stack', False, _(b'import dependencies as well'))],
1735 _(b'DREVSPEC... [OPTIONS]'),
1735 _(b'DREVSPEC... [OPTIONS]'),
1736 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1736 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1737 )
1737 )
1738 def phabimport(ui, repo, *specs, **opts):
1738 def phabimport(ui, repo, *specs, **opts):
1739 """import patches from Phabricator for the specified Differential Revisions
1739 """import patches from Phabricator for the specified Differential Revisions
1740
1740
1741 The patches are read and applied starting at the parent of the working
1741 The patches are read and applied starting at the parent of the working
1742 directory.
1742 directory.
1743
1743
1744 See ``hg help phabread`` for how to specify DREVSPEC.
1744 See ``hg help phabread`` for how to specify DREVSPEC.
1745 """
1745 """
1746 opts = pycompat.byteskwargs(opts)
1746 opts = pycompat.byteskwargs(opts)
1747
1747
1748 # --bypass avoids losing exec and symlink bits when importing on Windows,
1748 # --bypass avoids losing exec and symlink bits when importing on Windows,
1749 # and allows importing with a dirty wdir. It also aborts instead of leaving
1749 # and allows importing with a dirty wdir. It also aborts instead of leaving
1750 # rejects.
1750 # rejects.
1751 opts[b'bypass'] = True
1751 opts[b'bypass'] = True
1752
1752
1753 # Mandatory default values, synced with commands.import
1753 # Mandatory default values, synced with commands.import
1754 opts[b'strip'] = 1
1754 opts[b'strip'] = 1
1755 opts[b'prefix'] = b''
1755 opts[b'prefix'] = b''
1756 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1756 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1757 opts[b'obsolete'] = False
1757 opts[b'obsolete'] = False
1758
1758
1759 if ui.configbool(b'phabimport', b'secret'):
1759 if ui.configbool(b'phabimport', b'secret'):
1760 opts[b'secret'] = True
1760 opts[b'secret'] = True
1761 if ui.configbool(b'phabimport', b'obsolete'):
1761 if ui.configbool(b'phabimport', b'obsolete'):
1762 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1762 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1763
1763
1764 def _write(patches):
1764 def _write(patches):
1765 parents = repo[None].parents()
1765 parents = repo[None].parents()
1766
1766
1767 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1767 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1768 for drev, contents in patches:
1768 for drev, contents in patches:
1769 ui.status(_(b'applying patch from D%s\n') % drev)
1769 ui.status(_(b'applying patch from D%s\n') % drev)
1770
1770
1771 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1771 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1772 msg, node, rej = cmdutil.tryimportone(
1772 msg, node, rej = cmdutil.tryimportone(
1773 ui,
1773 ui,
1774 repo,
1774 repo,
1775 patchdata,
1775 patchdata,
1776 parents,
1776 parents,
1777 opts,
1777 opts,
1778 [],
1778 [],
1779 None, # Never update wdir to another revision
1779 None, # Never update wdir to another revision
1780 )
1780 )
1781
1781
1782 if not node:
1782 if not node:
1783 raise error.Abort(_(b'D%s: no diffs found') % drev)
1783 raise error.Abort(_(b'D%s: no diffs found') % drev)
1784
1784
1785 ui.note(msg + b'\n')
1785 ui.note(msg + b'\n')
1786 parents = [repo[node]]
1786 parents = [repo[node]]
1787
1787
1788 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1788 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1789
1789
1790 readpatch(repo.ui, drevs, _write)
1790 readpatch(repo.ui, drevs, _write)
1791
1791
1792
1792
1793 @vcrcommand(
1793 @vcrcommand(
1794 b'phabupdate',
1794 b'phabupdate',
1795 [
1795 [
1796 (b'', b'accept', False, _(b'accept revisions')),
1796 (b'', b'accept', False, _(b'accept revisions')),
1797 (b'', b'reject', False, _(b'reject revisions')),
1797 (b'', b'reject', False, _(b'reject revisions')),
1798 (b'', b'abandon', False, _(b'abandon revisions')),
1798 (b'', b'abandon', False, _(b'abandon revisions')),
1799 (b'', b'reclaim', False, _(b'reclaim revisions')),
1799 (b'', b'reclaim', False, _(b'reclaim revisions')),
1800 (b'm', b'comment', b'', _(b'comment on the last revision')),
1800 (b'm', b'comment', b'', _(b'comment on the last revision')),
1801 ],
1801 ],
1802 _(b'DREVSPEC... [OPTIONS]'),
1802 _(b'DREVSPEC... [OPTIONS]'),
1803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1804 optionalrepo=True,
1804 optionalrepo=True,
1805 )
1805 )
1806 def phabupdate(ui, repo, *specs, **opts):
1806 def phabupdate(ui, repo, *specs, **opts):
1807 """update Differential Revision in batch
1807 """update Differential Revision in batch
1808
1808
1809 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1809 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1810 """
1810 """
1811 opts = pycompat.byteskwargs(opts)
1811 opts = pycompat.byteskwargs(opts)
1812 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1812 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1813 if len(flags) > 1:
1813 if len(flags) > 1:
1814 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1814 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1815
1815
1816 actions = []
1816 actions = []
1817 for f in flags:
1817 for f in flags:
1818 actions.append({b'type': f, b'value': True})
1818 actions.append({b'type': f, b'value': True})
1819
1819
1820 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1820 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1821 for i, drev in enumerate(drevs):
1821 for i, drev in enumerate(drevs):
1822 if i + 1 == len(drevs) and opts.get(b'comment'):
1822 if i + 1 == len(drevs) and opts.get(b'comment'):
1823 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1823 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1824 if actions:
1824 if actions:
1825 params = {
1825 params = {
1826 b'objectIdentifier': drev[b'phid'],
1826 b'objectIdentifier': drev[b'phid'],
1827 b'transactions': actions,
1827 b'transactions': actions,
1828 }
1828 }
1829 callconduit(ui, b'differential.revision.edit', params)
1829 callconduit(ui, b'differential.revision.edit', params)
1830
1830
1831
1831
1832 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1832 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1833 def template_review(context, mapping):
1833 def template_review(context, mapping):
1834 """:phabreview: Object describing the review for this changeset.
1834 """:phabreview: Object describing the review for this changeset.
1835 Has attributes `url` and `id`.
1835 Has attributes `url` and `id`.
1836 """
1836 """
1837 ctx = context.resource(mapping, b'ctx')
1837 ctx = context.resource(mapping, b'ctx')
1838 m = _differentialrevisiondescre.search(ctx.description())
1838 m = _differentialrevisiondescre.search(ctx.description())
1839 if m:
1839 if m:
1840 return templateutil.hybriddict(
1840 return templateutil.hybriddict(
1841 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1841 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1842 )
1842 )
1843 else:
1843 else:
1844 tags = ctx.repo().nodetags(ctx.node())
1844 tags = ctx.repo().nodetags(ctx.node())
1845 for t in tags:
1845 for t in tags:
1846 if _differentialrevisiontagre.match(t):
1846 if _differentialrevisiontagre.match(t):
1847 url = ctx.repo().ui.config(b'phabricator', b'url')
1847 url = ctx.repo().ui.config(b'phabricator', b'url')
1848 if not url.endswith(b'/'):
1848 if not url.endswith(b'/'):
1849 url += b'/'
1849 url += b'/'
1850 url += t
1850 url += t
1851
1851
1852 return templateutil.hybriddict({b'url': url, b'id': t,})
1852 return templateutil.hybriddict({b'url': url, b'id': t,})
1853 return None
1853 return None
1854
1854
1855
1855
1856 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1856 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1857 def template_status(context, mapping):
1857 def template_status(context, mapping):
1858 """:phabstatus: String. Status of Phabricator differential.
1858 """:phabstatus: String. Status of Phabricator differential.
1859 """
1859 """
1860 ctx = context.resource(mapping, b'ctx')
1860 ctx = context.resource(mapping, b'ctx')
1861 repo = context.resource(mapping, b'repo')
1861 repo = context.resource(mapping, b'repo')
1862 ui = context.resource(mapping, b'ui')
1862 ui = context.resource(mapping, b'ui')
1863
1863
1864 rev = ctx.rev()
1864 rev = ctx.rev()
1865 try:
1865 try:
1866 drevid = getdrevmap(repo, [rev])[rev]
1866 drevid = getdrevmap(repo, [rev])[rev]
1867 except KeyError:
1867 except KeyError:
1868 return None
1868 return None
1869 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1869 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1870 for drev in drevs:
1870 for drev in drevs:
1871 if int(drev[b'id']) == drevid:
1871 if int(drev[b'id']) == drevid:
1872 return templateutil.hybriddict(
1872 return templateutil.hybriddict(
1873 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1873 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1874 )
1874 )
1875 return None
1875 return None
1876
1876
1877
1877
1878 @show.showview(b'phabstatus', csettopic=b'work')
1878 @show.showview(b'phabstatus', csettopic=b'work')
1879 def phabstatusshowview(ui, repo, displayer):
1879 def phabstatusshowview(ui, repo, displayer):
1880 """Phabricator differiential status"""
1880 """Phabricator differiential status"""
1881 revs = repo.revs('sort(_underway(), topo)')
1881 revs = repo.revs('sort(_underway(), topo)')
1882 drevmap = getdrevmap(repo, revs)
1882 drevmap = getdrevmap(repo, revs)
1883 unknownrevs, drevids, revsbydrevid = [], set(), {}
1883 unknownrevs, drevids, revsbydrevid = [], set(), {}
1884 for rev, drevid in pycompat.iteritems(drevmap):
1884 for rev, drevid in pycompat.iteritems(drevmap):
1885 if drevid is not None:
1885 if drevid is not None:
1886 drevids.add(drevid)
1886 drevids.add(drevid)
1887 revsbydrevid.setdefault(drevid, set()).add(rev)
1887 revsbydrevid.setdefault(drevid, set()).add(rev)
1888 else:
1888 else:
1889 unknownrevs.append(rev)
1889 unknownrevs.append(rev)
1890
1890
1891 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1891 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1892 drevsbyrev = {}
1892 drevsbyrev = {}
1893 for drev in drevs:
1893 for drev in drevs:
1894 for rev in revsbydrevid[int(drev[b'id'])]:
1894 for rev in revsbydrevid[int(drev[b'id'])]:
1895 drevsbyrev[rev] = drev
1895 drevsbyrev[rev] = drev
1896
1896
1897 def phabstatus(ctx):
1897 def phabstatus(ctx):
1898 drev = drevsbyrev[ctx.rev()]
1898 drev = drevsbyrev[ctx.rev()]
1899 status = ui.label(
1899 status = ui.label(
1900 b'%(statusName)s' % drev,
1900 b'%(statusName)s' % drev,
1901 b'phabricator.status.%s' % _getstatusname(drev),
1901 b'phabricator.status.%s' % _getstatusname(drev),
1902 )
1902 )
1903 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1903 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1904
1904
1905 revs -= smartset.baseset(unknownrevs)
1905 revs -= smartset.baseset(unknownrevs)
1906 revdag = graphmod.dagwalker(repo, revs)
1906 revdag = graphmod.dagwalker(repo, revs)
1907
1907
1908 ui.setconfig(b'experimental', b'graphshorten', True)
1908 ui.setconfig(b'experimental', b'graphshorten', True)
1909 displayer._exthook = phabstatus
1909 displayer._exthook = phabstatus
1910 nodelen = show.longestshortest(repo, revs)
1910 nodelen = show.longestshortest(repo, revs)
1911 logcmdutil.displaygraph(
1911 logcmdutil.displaygraph(
1912 ui,
1912 ui,
1913 repo,
1913 repo,
1914 revdag,
1914 revdag,
1915 displayer,
1915 displayer,
1916 graphmod.asciiedges,
1916 graphmod.asciiedges,
1917 props={b'nodelen': nodelen},
1917 props={b'nodelen': nodelen},
1918 )
1918 )
General Comments 0
You need to be logged in to leave comments. Login now