##// END OF EJS Templates
phabricator: account for `basectx != ctx` when calculating renames...
Matt Harbison -
r45101:022bf715 default
parent child Browse files
Show More
@@ -1,1926 +1,1939
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 encoding,
65 encoding,
65 error,
66 error,
66 exthelper,
67 exthelper,
67 graphmod,
68 graphmod,
68 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
69 localrepo,
70 localrepo,
70 logcmdutil,
71 logcmdutil,
71 match,
72 match,
72 mdiff,
73 mdiff,
73 obsutil,
74 obsutil,
74 parser,
75 parser,
75 patch,
76 patch,
76 phases,
77 phases,
77 pycompat,
78 pycompat,
78 scmutil,
79 scmutil,
79 smartset,
80 smartset,
80 tags,
81 tags,
81 templatefilters,
82 templatefilters,
82 templateutil,
83 templateutil,
83 url as urlmod,
84 url as urlmod,
84 util,
85 util,
85 )
86 )
86 from mercurial.utils import (
87 from mercurial.utils import (
87 procutil,
88 procutil,
88 stringutil,
89 stringutil,
89 )
90 )
90 from . import show
91 from . import show
91
92
92
93
93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # leave the attribute unspecified.
97 # leave the attribute unspecified.
97 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
98
99
99 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
100
101
101 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
102 command = eh.command
103 command = eh.command
103 configtable = eh.configtable
104 configtable = eh.configtable
104 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
105 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
106
107
107 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
108 eh.configitem(
109 eh.configitem(
109 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
110 )
111 )
111 eh.configitem(
112 eh.configitem(
112 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
113 )
114 )
114 eh.configitem(
115 eh.configitem(
115 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
116 )
117 )
117 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
118 eh.configitem(
119 eh.configitem(
119 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
120 )
121 )
121 eh.configitem(
122 eh.configitem(
122 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
123 )
124 )
124 eh.configitem(
125 eh.configitem(
125 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
126 )
127 )
127 eh.configitem(
128 eh.configitem(
128 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
129 )
130 )
130 eh.configitem(
131 eh.configitem(
131 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
132 )
133 )
133
134
134 colortable = {
135 colortable = {
135 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
139 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
140 b'phabricator.node': b'',
141 b'phabricator.node': b'',
141 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 }
148 }
148
149
149 _VCR_FLAGS = [
150 _VCR_FLAGS = [
150 (
151 (
151 b'',
152 b'',
152 b'test-vcr',
153 b'test-vcr',
153 b'',
154 b'',
154 _(
155 _(
155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b' (ADVANCED)'
158 b' (ADVANCED)'
158 ),
159 ),
159 ),
160 ),
160 ]
161 ]
161
162
162
163
163 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """
167 """
167 result = False
168 result = False
168 arcconfig = {}
169 arcconfig = {}
169
170
170 try:
171 try:
171 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
174 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
175 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
176 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
177 else x,
178 else x,
178 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
179 )
180 )
180
181
181 result = True
182 result = True
182 except ValueError:
183 except ValueError:
183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 except IOError:
185 except IOError:
185 pass
186 pass
186
187
187 cfg = util.sortdict()
188 cfg = util.sortdict()
188
189
189 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191
192
192 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194
195
195 if cfg:
196 if cfg:
196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197
198
198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199
200
200
201
201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
203
204
204 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
205 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
206 return False
207 return False
207 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
208 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 for key in r1params:
210 for key in r1params:
210 if key not in r2params:
211 if key not in r2params:
211 return False
212 return False
212 value = r1params[key][0]
213 value = r1params[key][0]
213 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
214 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
215 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
216 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
217 if r1json != r2json:
218 if r1json != r2json:
218 return False
219 return False
219 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
220 return False
221 return False
221 return True
222 return True
222
223
223 def sanitiserequest(request):
224 def sanitiserequest(request):
224 request.body = re.sub(
225 request.body = re.sub(
225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 )
227 )
227 return request
228 return request
228
229
229 def sanitiseresponse(response):
230 def sanitiseresponse(response):
230 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
231 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
232 return response
233 return response
233
234
234 def decorate(fn):
235 def decorate(fn):
235 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 if cassette:
238 if cassette:
238 import hgdemandimport
239 import hgdemandimport
239
240
240 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
241 import vcr as vcrmod
242 import vcr as vcrmod
242 import vcr.stubs as stubs
243 import vcr.stubs as stubs
243
244
244 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
245 serializer='json',
246 serializer='json',
246 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
247 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
248 custom_patches=[
249 custom_patches=[
249 (
250 (
250 urlmod,
251 urlmod,
251 'httpconnection',
252 'httpconnection',
252 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
253 ),
254 ),
254 (
255 (
255 urlmod,
256 urlmod,
256 'httpsconnection',
257 'httpsconnection',
257 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
258 ),
259 ),
259 ],
260 ],
260 )
261 )
261 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265
266
266 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
267 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
268 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
269
270
270 return command(
271 return command(
271 name,
272 name,
272 fullflags,
273 fullflags,
273 spec,
274 spec,
274 helpcategory=helpcategory,
275 helpcategory=helpcategory,
275 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
276 )(cmd)
277 )(cmd)
277
278
278 return decorate
279 return decorate
279
280
280
281
281 def urlencodenested(params):
282 def urlencodenested(params):
282 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
283
284
284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 """
288 """
288 flatparams = util.sortdict()
289 flatparams = util.sortdict()
289
290
290 def process(prefix, obj):
291 def process(prefix, obj):
291 if isinstance(obj, bool):
292 if isinstance(obj, bool):
292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 if items is None:
296 if items is None:
296 flatparams[prefix] = obj
297 flatparams[prefix] = obj
297 else:
298 else:
298 for k, v in items(obj):
299 for k, v in items(obj):
299 if prefix:
300 if prefix:
300 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
301 else:
302 else:
302 process(k, v)
303 process(k, v)
303
304
304 process(b'', params)
305 process(b'', params)
305 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
306
307
307
308
308 def readurltoken(ui):
309 def readurltoken(ui):
309 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
310
311
311 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
312 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
313 """
314 """
314 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
315 if not url:
316 if not url:
316 raise error.Abort(
317 raise error.Abort(
317 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 )
319 )
319
320
320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 token = None
322 token = None
322
323
323 if res:
324 if res:
324 group, auth = res
325 group, auth = res
325
326
326 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327
328
328 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
329
330
330 if not token:
331 if not token:
331 raise error.Abort(
332 raise error.Abort(
332 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 )
334 )
334
335
335 return url, token
336 return url, token
336
337
337
338
338 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
339 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 params = params.copy()
344 params = params.copy()
344 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
345 b'token': token,
346 b'token': token,
346 }
347 }
347 rawdata = {
348 rawdata = {
348 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
349 b'output': b'json',
350 b'output': b'json',
350 b'__conduit__': 1,
351 b'__conduit__': 1,
351 }
352 }
352 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
353 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 if curlcmd:
355 if curlcmd:
355 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 )
358 )
358 sin.write(data)
359 sin.write(data)
359 sin.close()
360 sin.close()
360 body = sout.read()
361 body = sout.read()
361 else:
362 else:
362 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
363 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 body = rsp.read()
366 body = rsp.read()
366 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
367 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
368 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
369 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
370 else x,
371 else x,
371 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
372 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 )
374 )
374 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
375 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
376 parsed[b'error_code'],
377 parsed[b'error_code'],
377 parsed[b'error_info'],
378 parsed[b'error_info'],
378 )
379 )
379 raise error.Abort(msg)
380 raise error.Abort(msg)
380 return parsed[b'result']
381 return parsed[b'result']
381
382
382
383
383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
385 """call Conduit API
386 """call Conduit API
386
387
387 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 to stdout as a JSON blob.
389 to stdout as a JSON blob.
389 """
390 """
390 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
391 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
393 params = pycompat.rapply(
394 params = pycompat.rapply(
394 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
395 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
396 else x,
397 else x,
397 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
398 )
399 )
399 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
400 result = pycompat.rapply(
401 result = pycompat.rapply(
401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 callconduit(ui, name, params),
403 callconduit(ui, name, params),
403 )
404 )
404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406
407
407
408
408 def getrepophid(repo):
409 def getrepophid(repo):
409 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
410 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
411 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 if repophid:
413 if repophid:
413 return repophid
414 return repophid
414 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 if not callsign:
416 if not callsign:
416 return None
417 return None
417 query = callconduit(
418 query = callconduit(
418 repo.ui,
419 repo.ui,
419 b'diffusion.repository.search',
420 b'diffusion.repository.search',
420 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
421 )
422 )
422 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
423 return None
424 return None
424 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 return repophid
427 return repophid
427
428
428
429
429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 )
433 )
433
434
434
435
435 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
436 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
437
438
438 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 be ``None``.
442 be ``None``.
442
443
443 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
444 association information.
445 association information.
445
446
446 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
447 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
448 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449
450
450 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
452 """
453 """
453 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
454 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
455
456
456 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 for node in nodelist:
459 for node in nodelist:
459 ctx = unfi[node]
460 ctx = unfi[node]
460 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 for n in precnodes:
463 for n in precnodes:
463 if has_node(n):
464 if has_node(n):
464 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
465 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
466 if m:
467 if m:
467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 break
469 break
469 else:
470 else:
470 continue # move to next predecessor
471 continue # move to next predecessor
471 break # found a tag, stop
472 break # found a tag, stop
472 else:
473 else:
473 # Check commit message
474 # Check commit message
474 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
475 if m:
476 if m:
476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477
478
478 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
479 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
480 if toconfirm:
481 if toconfirm:
481 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 alldiffs = callconduit(
483 alldiffs = callconduit(
483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 )
485 )
485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 for newnode, (force, precset, drev) in toconfirm.items():
487 for newnode, (force, precset, drev) in toconfirm.items():
487 diffs = [
488 diffs = [
488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 ]
490 ]
490
491
491 # "precursors" as known by Phabricator
492 # "precursors" as known by Phabricator
492 phprecset = {getnode(d) for d in diffs}
493 phprecset = {getnode(d) for d in diffs}
493
494
494 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # and force is not set (when commit message says nothing)
496 # and force is not set (when commit message says nothing)
496 if not force and not bool(phprecset & precset):
497 if not force and not bool(phprecset & precset):
497 tagname = b'D%d' % drev
498 tagname = b'D%d' % drev
498 tags.tag(
499 tags.tag(
499 repo,
500 repo,
500 tagname,
501 tagname,
501 nullid,
502 nullid,
502 message=None,
503 message=None,
503 user=None,
504 user=None,
504 date=None,
505 date=None,
505 local=True,
506 local=True,
506 )
507 )
507 unfi.ui.warn(
508 unfi.ui.warn(
508 _(
509 _(
509 b'D%d: local tag removed - does not match '
510 b'D%d: local tag removed - does not match '
510 b'Differential history\n'
511 b'Differential history\n'
511 )
512 )
512 % drev
513 % drev
513 )
514 )
514 continue
515 continue
515
516
516 # Find the last node using Phabricator metadata, and make sure it
517 # Find the last node using Phabricator metadata, and make sure it
517 # exists in the repo
518 # exists in the repo
518 oldnode = lastdiff = None
519 oldnode = lastdiff = None
519 if diffs:
520 if diffs:
520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 oldnode = getnode(lastdiff)
522 oldnode = getnode(lastdiff)
522 if oldnode and not has_node(oldnode):
523 if oldnode and not has_node(oldnode):
523 oldnode = None
524 oldnode = None
524
525
525 result[newnode] = (oldnode, lastdiff, drev)
526 result[newnode] = (oldnode, lastdiff, drev)
526
527
527 return result
528 return result
528
529
529
530
530 def getdrevmap(repo, revs):
531 def getdrevmap(repo, revs):
531 """Return a dict mapping each rev in `revs` to their Differential Revision
532 """Return a dict mapping each rev in `revs` to their Differential Revision
532 ID or None.
533 ID or None.
533 """
534 """
534 result = {}
535 result = {}
535 for rev in revs:
536 for rev in revs:
536 result[rev] = None
537 result[rev] = None
537 ctx = repo[rev]
538 ctx = repo[rev]
538 # Check commit message
539 # Check commit message
539 m = _differentialrevisiondescre.search(ctx.description())
540 m = _differentialrevisiondescre.search(ctx.description())
540 if m:
541 if m:
541 result[rev] = int(m.group('id'))
542 result[rev] = int(m.group('id'))
542 continue
543 continue
543 # Check tags
544 # Check tags
544 for tag in repo.nodetags(ctx.node()):
545 for tag in repo.nodetags(ctx.node()):
545 m = _differentialrevisiontagre.match(tag)
546 m = _differentialrevisiontagre.match(tag)
546 if m:
547 if m:
547 result[rev] = int(m.group(1))
548 result[rev] = int(m.group(1))
548 break
549 break
549
550
550 return result
551 return result
551
552
552
553
553 def getdiff(basectx, ctx, diffopts):
554 def getdiff(basectx, ctx, diffopts):
554 """plain-text diff without header (user, commit message, etc)"""
555 """plain-text diff without header (user, commit message, etc)"""
555 output = util.stringio()
556 output = util.stringio()
556 for chunk, _label in patch.diffui(
557 for chunk, _label in patch.diffui(
557 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 ):
559 ):
559 output.write(chunk)
560 output.write(chunk)
560 return output.getvalue()
561 return output.getvalue()
561
562
562
563
563 class DiffChangeType(object):
564 class DiffChangeType(object):
564 ADD = 1
565 ADD = 1
565 CHANGE = 2
566 CHANGE = 2
566 DELETE = 3
567 DELETE = 3
567 MOVE_AWAY = 4
568 MOVE_AWAY = 4
568 COPY_AWAY = 5
569 COPY_AWAY = 5
569 MOVE_HERE = 6
570 MOVE_HERE = 6
570 COPY_HERE = 7
571 COPY_HERE = 7
571 MULTICOPY = 8
572 MULTICOPY = 8
572
573
573
574
574 class DiffFileType(object):
575 class DiffFileType(object):
575 TEXT = 1
576 TEXT = 1
576 IMAGE = 2
577 IMAGE = 2
577 BINARY = 3
578 BINARY = 3
578
579
579
580
580 @attr.s
581 @attr.s
581 class phabhunk(dict):
582 class phabhunk(dict):
582 """Represents a Differential hunk, which is owned by a Differential change
583 """Represents a Differential hunk, which is owned by a Differential change
583 """
584 """
584
585
585 oldOffset = attr.ib(default=0) # camelcase-required
586 oldOffset = attr.ib(default=0) # camelcase-required
586 oldLength = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
587 newOffset = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
588 newLength = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
589 corpus = attr.ib(default='')
590 corpus = attr.ib(default='')
590 # These get added to the phabchange's equivalents
591 # These get added to the phabchange's equivalents
591 addLines = attr.ib(default=0) # camelcase-required
592 addLines = attr.ib(default=0) # camelcase-required
592 delLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
593
594
594
595
595 @attr.s
596 @attr.s
596 class phabchange(object):
597 class phabchange(object):
597 """Represents a Differential change, owns Differential hunks and owned by a
598 """Represents a Differential change, owns Differential hunks and owned by a
598 Differential diff. Each one represents one file in a diff.
599 Differential diff. Each one represents one file in a diff.
599 """
600 """
600
601
601 currentPath = attr.ib(default=None) # camelcase-required
602 currentPath = attr.ib(default=None) # camelcase-required
602 oldPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 metadata = attr.ib(default=attr.Factory(dict))
605 metadata = attr.ib(default=attr.Factory(dict))
605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 type = attr.ib(default=DiffChangeType.CHANGE)
608 type = attr.ib(default=DiffChangeType.CHANGE)
608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 commitHash = attr.ib(default=None) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
610 addLines = attr.ib(default=0) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
612 hunks = attr.ib(default=attr.Factory(list))
613 hunks = attr.ib(default=attr.Factory(list))
613
614
614 def copynewmetadatatoold(self):
615 def copynewmetadatatoold(self):
615 for key in list(self.metadata.keys()):
616 for key in list(self.metadata.keys()):
616 newkey = key.replace(b'new:', b'old:')
617 newkey = key.replace(b'new:', b'old:')
617 self.metadata[newkey] = self.metadata[key]
618 self.metadata[newkey] = self.metadata[key]
618
619
619 def addoldmode(self, value):
620 def addoldmode(self, value):
620 self.oldProperties[b'unix:filemode'] = value
621 self.oldProperties[b'unix:filemode'] = value
621
622
622 def addnewmode(self, value):
623 def addnewmode(self, value):
623 self.newProperties[b'unix:filemode'] = value
624 self.newProperties[b'unix:filemode'] = value
624
625
625 def addhunk(self, hunk):
626 def addhunk(self, hunk):
626 if not isinstance(hunk, phabhunk):
627 if not isinstance(hunk, phabhunk):
627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 # It's useful to include these stats since the Phab web UI shows them,
630 # It's useful to include these stats since the Phab web UI shows them,
630 # and uses them to estimate how large a change a Revision is. Also used
631 # and uses them to estimate how large a change a Revision is. Also used
631 # in email subjects for the [+++--] bit.
632 # in email subjects for the [+++--] bit.
632 self.addLines += hunk.addLines
633 self.addLines += hunk.addLines
633 self.delLines += hunk.delLines
634 self.delLines += hunk.delLines
634
635
635
636
636 @attr.s
637 @attr.s
637 class phabdiff(object):
638 class phabdiff(object):
638 """Represents a Differential diff, owns Differential changes. Corresponds
639 """Represents a Differential diff, owns Differential changes. Corresponds
639 to a commit.
640 to a commit.
640 """
641 """
641
642
642 # Doesn't seem to be any reason to send this (output of uname -n)
643 # Doesn't seem to be any reason to send this (output of uname -n)
643 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 branch = attr.ib(default=b'default')
649 branch = attr.ib(default=b'default')
649 bookmark = attr.ib(default=None)
650 bookmark = attr.ib(default=None)
650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 lintStatus = attr.ib(default=b'none') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
652 unitStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
653 changes = attr.ib(default=attr.Factory(dict))
654 changes = attr.ib(default=attr.Factory(dict))
654 repositoryPHID = attr.ib(default=None) # camelcase-required
655 repositoryPHID = attr.ib(default=None) # camelcase-required
655
656
656 def addchange(self, change):
657 def addchange(self, change):
657 if not isinstance(change, phabchange):
658 if not isinstance(change, phabchange):
658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 self.changes[change.currentPath] = pycompat.byteskwargs(
660 self.changes[change.currentPath] = pycompat.byteskwargs(
660 attr.asdict(change)
661 attr.asdict(change)
661 )
662 )
662
663
663
664
664 def maketext(pchange, basectx, ctx, fname):
665 def maketext(pchange, basectx, ctx, fname):
665 """populate the phabchange for a text file"""
666 """populate the phabchange for a text file"""
666 repo = ctx.repo()
667 repo = ctx.repo()
667 fmatcher = match.exact([fname])
668 fmatcher = match.exact([fname])
668 diffopts = mdiff.diffopts(git=True, context=32767)
669 diffopts = mdiff.diffopts(git=True, context=32767)
669 _pfctx, _fctx, header, fhunks = next(
670 _pfctx, _fctx, header, fhunks = next(
670 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 )
672 )
672
673
673 for fhunk in fhunks:
674 for fhunk in fhunks:
674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 corpus = b''.join(lines[1:])
676 corpus = b''.join(lines[1:])
676 shunk = list(header)
677 shunk = list(header)
677 shunk.extend(lines)
678 shunk.extend(lines)
678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 patch.diffstatdata(util.iterlines(shunk))
680 patch.diffstatdata(util.iterlines(shunk))
680 )
681 )
681 pchange.addhunk(
682 pchange.addhunk(
682 phabhunk(
683 phabhunk(
683 oldOffset,
684 oldOffset,
684 oldLength,
685 oldLength,
685 newOffset,
686 newOffset,
686 newLength,
687 newLength,
687 corpus,
688 corpus,
688 addLines,
689 addLines,
689 delLines,
690 delLines,
690 )
691 )
691 )
692 )
692
693
693
694
694 def uploadchunks(fctx, fphid):
695 def uploadchunks(fctx, fphid):
695 """upload large binary files as separate chunks.
696 """upload large binary files as separate chunks.
696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 """
698 """
698 ui = fctx.repo().ui
699 ui = fctx.repo().ui
699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 with ui.makeprogress(
701 with ui.makeprogress(
701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 ) as progress:
703 ) as progress:
703 for chunk in chunks:
704 for chunk in chunks:
704 progress.increment()
705 progress.increment()
705 if chunk[b'complete']:
706 if chunk[b'complete']:
706 continue
707 continue
707 bstart = int(chunk[b'byteStart'])
708 bstart = int(chunk[b'byteStart'])
708 bend = int(chunk[b'byteEnd'])
709 bend = int(chunk[b'byteEnd'])
709 callconduit(
710 callconduit(
710 ui,
711 ui,
711 b'file.uploadchunk',
712 b'file.uploadchunk',
712 {
713 {
713 b'filePHID': fphid,
714 b'filePHID': fphid,
714 b'byteStart': bstart,
715 b'byteStart': bstart,
715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'dataEncoding': b'base64',
717 b'dataEncoding': b'base64',
717 },
718 },
718 )
719 )
719
720
720
721
721 def uploadfile(fctx):
722 def uploadfile(fctx):
722 """upload binary files to Phabricator"""
723 """upload binary files to Phabricator"""
723 repo = fctx.repo()
724 repo = fctx.repo()
724 ui = repo.ui
725 ui = repo.ui
725 fname = fctx.path()
726 fname = fctx.path()
726 size = fctx.size()
727 size = fctx.size()
727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728
729
729 # an allocate call is required first to see if an upload is even required
730 # an allocate call is required first to see if an upload is even required
730 # (Phab might already have it) and to determine if chunking is needed
731 # (Phab might already have it) and to determine if chunking is needed
731 allocateparams = {
732 allocateparams = {
732 b'name': fname,
733 b'name': fname,
733 b'contentLength': size,
734 b'contentLength': size,
734 b'contentHash': fhash,
735 b'contentHash': fhash,
735 }
736 }
736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 fphid = filealloc[b'filePHID']
738 fphid = filealloc[b'filePHID']
738
739
739 if filealloc[b'upload']:
740 if filealloc[b'upload']:
740 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 if not fphid:
742 if not fphid:
742 uploadparams = {
743 uploadparams = {
743 b'name': fname,
744 b'name': fname,
744 b'data_base64': base64.b64encode(fctx.data()),
745 b'data_base64': base64.b64encode(fctx.data()),
745 }
746 }
746 fphid = callconduit(ui, b'file.upload', uploadparams)
747 fphid = callconduit(ui, b'file.upload', uploadparams)
747 else:
748 else:
748 uploadchunks(fctx, fphid)
749 uploadchunks(fctx, fphid)
749 else:
750 else:
750 ui.debug(b'server already has %s\n' % bytes(fctx))
751 ui.debug(b'server already has %s\n' % bytes(fctx))
751
752
752 if not fphid:
753 if not fphid:
753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754
755
755 return fphid
756 return fphid
756
757
757
758
758 def addoldbinary(pchange, oldfctx, fctx):
759 def addoldbinary(pchange, oldfctx, fctx):
759 """add the metadata for the previous version of a binary file to the
760 """add the metadata for the previous version of a binary file to the
760 phabchange for the new version
761 phabchange for the new version
761
762
762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 version of the file, or None if the file is being removed.
764 version of the file, or None if the file is being removed.
764 """
765 """
765 if not fctx or fctx.cmp(oldfctx):
766 if not fctx or fctx.cmp(oldfctx):
766 # Files differ, add the old one
767 # Files differ, add the old one
767 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 mimeguess, _enc = mimetypes.guess_type(
769 mimeguess, _enc = mimetypes.guess_type(
769 encoding.unifromlocal(oldfctx.path())
770 encoding.unifromlocal(oldfctx.path())
770 )
771 )
771 if mimeguess:
772 if mimeguess:
772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 mimeguess
774 mimeguess
774 )
775 )
775 fphid = uploadfile(oldfctx)
776 fphid = uploadfile(oldfctx)
776 pchange.metadata[b'old:binary-phid'] = fphid
777 pchange.metadata[b'old:binary-phid'] = fphid
777 else:
778 else:
778 # If it's left as IMAGE/BINARY web UI might try to display it
779 # If it's left as IMAGE/BINARY web UI might try to display it
779 pchange.fileType = DiffFileType.TEXT
780 pchange.fileType = DiffFileType.TEXT
780 pchange.copynewmetadatatoold()
781 pchange.copynewmetadatatoold()
781
782
782
783
783 def makebinary(pchange, fctx):
784 def makebinary(pchange, fctx):
784 """populate the phabchange for a binary file"""
785 """populate the phabchange for a binary file"""
785 pchange.fileType = DiffFileType.BINARY
786 pchange.fileType = DiffFileType.BINARY
786 fphid = uploadfile(fctx)
787 fphid = uploadfile(fctx)
787 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:file:size'] = fctx.size()
789 pchange.metadata[b'new:file:size'] = fctx.size()
789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 if mimeguess:
791 if mimeguess:
791 mimeguess = pycompat.bytestr(mimeguess)
792 mimeguess = pycompat.bytestr(mimeguess)
792 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 if mimeguess.startswith(b'image/'):
794 if mimeguess.startswith(b'image/'):
794 pchange.fileType = DiffFileType.IMAGE
795 pchange.fileType = DiffFileType.IMAGE
795
796
796
797
797 # Copied from mercurial/patch.py
798 # Copied from mercurial/patch.py
798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799
800
800
801
801 def notutf8(fctx):
802 def notutf8(fctx):
802 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 as binary
804 as binary
804 """
805 """
805 try:
806 try:
806 fctx.data().decode('utf-8')
807 fctx.data().decode('utf-8')
807 return False
808 return False
808 except UnicodeDecodeError:
809 except UnicodeDecodeError:
809 fctx.repo().ui.write(
810 fctx.repo().ui.write(
810 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 % fctx.path()
812 % fctx.path()
812 )
813 )
813 return True
814 return True
814
815
815
816
816 def addremoved(pdiff, basectx, ctx, removed):
817 def addremoved(pdiff, basectx, ctx, removed):
817 """add removed files to the phabdiff. Shouldn't include moves"""
818 """add removed files to the phabdiff. Shouldn't include moves"""
818 for fname in removed:
819 for fname in removed:
819 pchange = phabchange(
820 pchange = phabchange(
820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 )
822 )
822 oldfctx = basectx.p1()[fname]
823 oldfctx = basectx.p1()[fname]
823 pchange.addoldmode(gitmode[oldfctx.flags()])
824 pchange.addoldmode(gitmode[oldfctx.flags()])
824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 maketext(pchange, basectx, ctx, fname)
826 maketext(pchange, basectx, ctx, fname)
826
827
827 pdiff.addchange(pchange)
828 pdiff.addchange(pchange)
828
829
829
830
830 def addmodified(pdiff, basectx, ctx, modified):
831 def addmodified(pdiff, basectx, ctx, modified):
831 """add modified files to the phabdiff"""
832 """add modified files to the phabdiff"""
832 for fname in modified:
833 for fname in modified:
833 fctx = ctx[fname]
834 fctx = ctx[fname]
834 oldfctx = basectx.p1()[fname]
835 oldfctx = basectx.p1()[fname]
835 pchange = phabchange(currentPath=fname, oldPath=fname)
836 pchange = phabchange(currentPath=fname, oldPath=fname)
836 filemode = gitmode[fctx.flags()]
837 filemode = gitmode[fctx.flags()]
837 originalmode = gitmode[oldfctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
838 if filemode != originalmode:
839 if filemode != originalmode:
839 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
840 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
841
842
842 if (
843 if (
843 fctx.isbinary()
844 fctx.isbinary()
844 or notutf8(fctx)
845 or notutf8(fctx)
845 or oldfctx.isbinary()
846 or oldfctx.isbinary()
846 or notutf8(oldfctx)
847 or notutf8(oldfctx)
847 ):
848 ):
848 makebinary(pchange, fctx)
849 makebinary(pchange, fctx)
849 addoldbinary(pchange, oldfctx, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
850 else:
851 else:
851 maketext(pchange, basectx, ctx, fname)
852 maketext(pchange, basectx, ctx, fname)
852
853
853 pdiff.addchange(pchange)
854 pdiff.addchange(pchange)
854
855
855
856
856 def addadded(pdiff, basectx, ctx, added, removed):
857 def addadded(pdiff, basectx, ctx, added, removed):
857 """add file adds to the phabdiff, both new files and copies/moves"""
858 """add file adds to the phabdiff, both new files and copies/moves"""
858 # Keep track of files that've been recorded as moved/copied, so if there are
859 # Keep track of files that've been recorded as moved/copied, so if there are
859 # additional copies we can mark them (moves get removed from removed)
860 # additional copies we can mark them (moves get removed from removed)
860 copiedchanges = {}
861 copiedchanges = {}
861 movedchanges = {}
862 movedchanges = {}
863
864 copy = {}
865 if basectx != ctx:
866 copy = copies.pathcopies(basectx.p1(), ctx)
867
862 for fname in added:
868 for fname in added:
863 fctx = ctx[fname]
869 fctx = ctx[fname]
864 oldfctx = None
870 oldfctx = None
865 pchange = phabchange(currentPath=fname)
871 pchange = phabchange(currentPath=fname)
866
872
867 filemode = gitmode[fctx.flags()]
873 filemode = gitmode[fctx.flags()]
868 renamed = fctx.renamed()
874
875 if copy:
876 originalfname = copy.get(fname, fname)
877 else:
878 originalfname = fname
879 if fctx.renamed():
880 originalfname = fctx.renamed()[0]
881
882 renamed = fname != originalfname
869
883
870 if renamed:
884 if renamed:
871 originalfname = renamed[0]
872 oldfctx = basectx.p1()[originalfname]
885 oldfctx = basectx.p1()[originalfname]
873 originalmode = gitmode[oldfctx.flags()]
886 originalmode = gitmode[oldfctx.flags()]
874 pchange.oldPath = originalfname
887 pchange.oldPath = originalfname
875
888
876 if originalfname in removed:
889 if originalfname in removed:
877 origpchange = phabchange(
890 origpchange = phabchange(
878 currentPath=originalfname,
891 currentPath=originalfname,
879 oldPath=originalfname,
892 oldPath=originalfname,
880 type=DiffChangeType.MOVE_AWAY,
893 type=DiffChangeType.MOVE_AWAY,
881 awayPaths=[fname],
894 awayPaths=[fname],
882 )
895 )
883 movedchanges[originalfname] = origpchange
896 movedchanges[originalfname] = origpchange
884 removed.remove(originalfname)
897 removed.remove(originalfname)
885 pchange.type = DiffChangeType.MOVE_HERE
898 pchange.type = DiffChangeType.MOVE_HERE
886 elif originalfname in movedchanges:
899 elif originalfname in movedchanges:
887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
888 movedchanges[originalfname].awayPaths.append(fname)
901 movedchanges[originalfname].awayPaths.append(fname)
889 pchange.type = DiffChangeType.COPY_HERE
902 pchange.type = DiffChangeType.COPY_HERE
890 else: # pure copy
903 else: # pure copy
891 if originalfname not in copiedchanges:
904 if originalfname not in copiedchanges:
892 origpchange = phabchange(
905 origpchange = phabchange(
893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
894 )
907 )
895 copiedchanges[originalfname] = origpchange
908 copiedchanges[originalfname] = origpchange
896 else:
909 else:
897 origpchange = copiedchanges[originalfname]
910 origpchange = copiedchanges[originalfname]
898 origpchange.awayPaths.append(fname)
911 origpchange.awayPaths.append(fname)
899 pchange.type = DiffChangeType.COPY_HERE
912 pchange.type = DiffChangeType.COPY_HERE
900
913
901 if filemode != originalmode:
914 if filemode != originalmode:
902 pchange.addoldmode(originalmode)
915 pchange.addoldmode(originalmode)
903 pchange.addnewmode(filemode)
916 pchange.addnewmode(filemode)
904 else: # Brand-new file
917 else: # Brand-new file
905 pchange.addnewmode(gitmode[fctx.flags()])
918 pchange.addnewmode(gitmode[fctx.flags()])
906 pchange.type = DiffChangeType.ADD
919 pchange.type = DiffChangeType.ADD
907
920
908 if (
921 if (
909 fctx.isbinary()
922 fctx.isbinary()
910 or notutf8(fctx)
923 or notutf8(fctx)
911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
912 ):
925 ):
913 makebinary(pchange, fctx)
926 makebinary(pchange, fctx)
914 if renamed:
927 if renamed:
915 addoldbinary(pchange, oldfctx, fctx)
928 addoldbinary(pchange, oldfctx, fctx)
916 else:
929 else:
917 maketext(pchange, basectx, ctx, fname)
930 maketext(pchange, basectx, ctx, fname)
918
931
919 pdiff.addchange(pchange)
932 pdiff.addchange(pchange)
920
933
921 for _path, copiedchange in copiedchanges.items():
934 for _path, copiedchange in copiedchanges.items():
922 pdiff.addchange(copiedchange)
935 pdiff.addchange(copiedchange)
923 for _path, movedchange in movedchanges.items():
936 for _path, movedchange in movedchanges.items():
924 pdiff.addchange(movedchange)
937 pdiff.addchange(movedchange)
925
938
926
939
927 def creatediff(basectx, ctx):
940 def creatediff(basectx, ctx):
928 """create a Differential Diff"""
941 """create a Differential Diff"""
929 repo = ctx.repo()
942 repo = ctx.repo()
930 repophid = getrepophid(repo)
943 repophid = getrepophid(repo)
931 # Create a "Differential Diff" via "differential.creatediff" API
944 # Create a "Differential Diff" via "differential.creatediff" API
932 pdiff = phabdiff(
945 pdiff = phabdiff(
933 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
934 branch=b'%s' % ctx.branch(),
947 branch=b'%s' % ctx.branch(),
935 )
948 )
936 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
937 # addadded will remove moved files from removed, so addremoved won't get
950 # addadded will remove moved files from removed, so addremoved won't get
938 # them
951 # them
939 addadded(pdiff, basectx, ctx, added, removed)
952 addadded(pdiff, basectx, ctx, added, removed)
940 addmodified(pdiff, basectx, ctx, modified)
953 addmodified(pdiff, basectx, ctx, modified)
941 addremoved(pdiff, basectx, ctx, removed)
954 addremoved(pdiff, basectx, ctx, removed)
942 if repophid:
955 if repophid:
943 pdiff.repositoryPHID = repophid
956 pdiff.repositoryPHID = repophid
944 diff = callconduit(
957 diff = callconduit(
945 repo.ui,
958 repo.ui,
946 b'differential.creatediff',
959 b'differential.creatediff',
947 pycompat.byteskwargs(attr.asdict(pdiff)),
960 pycompat.byteskwargs(attr.asdict(pdiff)),
948 )
961 )
949 if not diff:
962 if not diff:
950 if basectx != ctx:
963 if basectx != ctx:
951 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
952 else:
965 else:
953 msg = _(b'cannot create diff for %s') % ctx
966 msg = _(b'cannot create diff for %s') % ctx
954 raise error.Abort(msg)
967 raise error.Abort(msg)
955 return diff
968 return diff
956
969
957
970
958 def writediffproperties(ctx, diff):
971 def writediffproperties(ctx, diff):
959 """write metadata to diff so patches could be applied losslessly"""
972 """write metadata to diff so patches could be applied losslessly"""
960 # creatediff returns with a diffid but query returns with an id
973 # creatediff returns with a diffid but query returns with an id
961 diffid = diff.get(b'diffid', diff.get(b'id'))
974 diffid = diff.get(b'diffid', diff.get(b'id'))
962 params = {
975 params = {
963 b'diff_id': diffid,
976 b'diff_id': diffid,
964 b'name': b'hg:meta',
977 b'name': b'hg:meta',
965 b'data': templatefilters.json(
978 b'data': templatefilters.json(
966 {
979 {
967 b'user': ctx.user(),
980 b'user': ctx.user(),
968 b'date': b'%d %d' % ctx.date(),
981 b'date': b'%d %d' % ctx.date(),
969 b'branch': ctx.branch(),
982 b'branch': ctx.branch(),
970 b'node': ctx.hex(),
983 b'node': ctx.hex(),
971 b'parent': ctx.p1().hex(),
984 b'parent': ctx.p1().hex(),
972 }
985 }
973 ),
986 ),
974 }
987 }
975 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
988 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
976
989
977 params = {
990 params = {
978 b'diff_id': diffid,
991 b'diff_id': diffid,
979 b'name': b'local:commits',
992 b'name': b'local:commits',
980 b'data': templatefilters.json(
993 b'data': templatefilters.json(
981 {
994 {
982 ctx.hex(): {
995 ctx.hex(): {
983 b'author': stringutil.person(ctx.user()),
996 b'author': stringutil.person(ctx.user()),
984 b'authorEmail': stringutil.email(ctx.user()),
997 b'authorEmail': stringutil.email(ctx.user()),
985 b'time': int(ctx.date()[0]),
998 b'time': int(ctx.date()[0]),
986 b'commit': ctx.hex(),
999 b'commit': ctx.hex(),
987 b'parents': [ctx.p1().hex()],
1000 b'parents': [ctx.p1().hex()],
988 b'branch': ctx.branch(),
1001 b'branch': ctx.branch(),
989 },
1002 },
990 }
1003 }
991 ),
1004 ),
992 }
1005 }
993 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
1006 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
994
1007
995
1008
996 def createdifferentialrevision(
1009 def createdifferentialrevision(
997 ctx,
1010 ctx,
998 revid=None,
1011 revid=None,
999 parentrevphid=None,
1012 parentrevphid=None,
1000 oldnode=None,
1013 oldnode=None,
1001 olddiff=None,
1014 olddiff=None,
1002 actions=None,
1015 actions=None,
1003 comment=None,
1016 comment=None,
1004 ):
1017 ):
1005 """create or update a Differential Revision
1018 """create or update a Differential Revision
1006
1019
1007 If revid is None, create a new Differential Revision, otherwise update
1020 If revid is None, create a new Differential Revision, otherwise update
1008 revid. If parentrevphid is not None, set it as a dependency.
1021 revid. If parentrevphid is not None, set it as a dependency.
1009
1022
1010 If oldnode is not None, check if the patch content (without commit message
1023 If oldnode is not None, check if the patch content (without commit message
1011 and metadata) has changed before creating another diff.
1024 and metadata) has changed before creating another diff.
1012
1025
1013 If actions is not None, they will be appended to the transaction.
1026 If actions is not None, they will be appended to the transaction.
1014 """
1027 """
1015 basectx = ctx
1028 basectx = ctx
1016 repo = ctx.repo()
1029 repo = ctx.repo()
1017 if oldnode:
1030 if oldnode:
1018 diffopts = mdiff.diffopts(git=True, context=32767)
1031 diffopts = mdiff.diffopts(git=True, context=32767)
1019 oldctx = repo.unfiltered()[oldnode]
1032 oldctx = repo.unfiltered()[oldnode]
1020 oldbasectx = oldctx
1033 oldbasectx = oldctx
1021 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1034 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1022 oldbasectx, oldctx, diffopts
1035 oldbasectx, oldctx, diffopts
1023 )
1036 )
1024 else:
1037 else:
1025 neednewdiff = True
1038 neednewdiff = True
1026
1039
1027 transactions = []
1040 transactions = []
1028 if neednewdiff:
1041 if neednewdiff:
1029 diff = creatediff(basectx, ctx)
1042 diff = creatediff(basectx, ctx)
1030 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1043 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1031 if comment:
1044 if comment:
1032 transactions.append({b'type': b'comment', b'value': comment})
1045 transactions.append({b'type': b'comment', b'value': comment})
1033 else:
1046 else:
1034 # Even if we don't need to upload a new diff because the patch content
1047 # Even if we don't need to upload a new diff because the patch content
1035 # does not change. We might still need to update its metadata so
1048 # does not change. We might still need to update its metadata so
1036 # pushers could know the correct node metadata.
1049 # pushers could know the correct node metadata.
1037 assert olddiff
1050 assert olddiff
1038 diff = olddiff
1051 diff = olddiff
1039 writediffproperties(ctx, diff)
1052 writediffproperties(ctx, diff)
1040
1053
1041 # Set the parent Revision every time, so commit re-ordering is picked-up
1054 # Set the parent Revision every time, so commit re-ordering is picked-up
1042 if parentrevphid:
1055 if parentrevphid:
1043 transactions.append(
1056 transactions.append(
1044 {b'type': b'parents.set', b'value': [parentrevphid]}
1057 {b'type': b'parents.set', b'value': [parentrevphid]}
1045 )
1058 )
1046
1059
1047 if actions:
1060 if actions:
1048 transactions += actions
1061 transactions += actions
1049
1062
1050 # Parse commit message and update related fields.
1063 # Parse commit message and update related fields.
1051 desc = ctx.description()
1064 desc = ctx.description()
1052 info = callconduit(
1065 info = callconduit(
1053 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1066 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1054 )
1067 )
1055 for k, v in info[b'fields'].items():
1068 for k, v in info[b'fields'].items():
1056 if k in [b'title', b'summary', b'testPlan']:
1069 if k in [b'title', b'summary', b'testPlan']:
1057 transactions.append({b'type': k, b'value': v})
1070 transactions.append({b'type': k, b'value': v})
1058
1071
1059 params = {b'transactions': transactions}
1072 params = {b'transactions': transactions}
1060 if revid is not None:
1073 if revid is not None:
1061 # Update an existing Differential Revision
1074 # Update an existing Differential Revision
1062 params[b'objectIdentifier'] = revid
1075 params[b'objectIdentifier'] = revid
1063
1076
1064 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1077 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1065 if not revision:
1078 if not revision:
1066 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1079 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1067
1080
1068 return revision, diff
1081 return revision, diff
1069
1082
1070
1083
1071 def userphids(ui, names):
1084 def userphids(ui, names):
1072 """convert user names to PHIDs"""
1085 """convert user names to PHIDs"""
1073 names = [name.lower() for name in names]
1086 names = [name.lower() for name in names]
1074 query = {b'constraints': {b'usernames': names}}
1087 query = {b'constraints': {b'usernames': names}}
1075 result = callconduit(ui, b'user.search', query)
1088 result = callconduit(ui, b'user.search', query)
1076 # username not found is not an error of the API. So check if we have missed
1089 # username not found is not an error of the API. So check if we have missed
1077 # some names here.
1090 # some names here.
1078 data = result[b'data']
1091 data = result[b'data']
1079 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1092 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1080 unresolved = set(names) - resolved
1093 unresolved = set(names) - resolved
1081 if unresolved:
1094 if unresolved:
1082 raise error.Abort(
1095 raise error.Abort(
1083 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1096 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1084 )
1097 )
1085 return [entry[b'phid'] for entry in data]
1098 return [entry[b'phid'] for entry in data]
1086
1099
1087
1100
1088 @vcrcommand(
1101 @vcrcommand(
1089 b'phabsend',
1102 b'phabsend',
1090 [
1103 [
1091 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1104 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1092 (b'', b'amend', True, _(b'update commit messages')),
1105 (b'', b'amend', True, _(b'update commit messages')),
1093 (b'', b'reviewer', [], _(b'specify reviewers')),
1106 (b'', b'reviewer', [], _(b'specify reviewers')),
1094 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1107 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1095 (
1108 (
1096 b'm',
1109 b'm',
1097 b'comment',
1110 b'comment',
1098 b'',
1111 b'',
1099 _(b'add a comment to Revisions with new/updated Diffs'),
1112 _(b'add a comment to Revisions with new/updated Diffs'),
1100 ),
1113 ),
1101 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1114 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1102 ],
1115 ],
1103 _(b'REV [OPTIONS]'),
1116 _(b'REV [OPTIONS]'),
1104 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1117 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1105 )
1118 )
1106 def phabsend(ui, repo, *revs, **opts):
1119 def phabsend(ui, repo, *revs, **opts):
1107 """upload changesets to Phabricator
1120 """upload changesets to Phabricator
1108
1121
1109 If there are multiple revisions specified, they will be send as a stack
1122 If there are multiple revisions specified, they will be send as a stack
1110 with a linear dependencies relationship using the order specified by the
1123 with a linear dependencies relationship using the order specified by the
1111 revset.
1124 revset.
1112
1125
1113 For the first time uploading changesets, local tags will be created to
1126 For the first time uploading changesets, local tags will be created to
1114 maintain the association. After the first time, phabsend will check
1127 maintain the association. After the first time, phabsend will check
1115 obsstore and tags information so it can figure out whether to update an
1128 obsstore and tags information so it can figure out whether to update an
1116 existing Differential Revision, or create a new one.
1129 existing Differential Revision, or create a new one.
1117
1130
1118 If --amend is set, update commit messages so they have the
1131 If --amend is set, update commit messages so they have the
1119 ``Differential Revision`` URL, remove related tags. This is similar to what
1132 ``Differential Revision`` URL, remove related tags. This is similar to what
1120 arcanist will do, and is more desired in author-push workflows. Otherwise,
1133 arcanist will do, and is more desired in author-push workflows. Otherwise,
1121 use local tags to record the ``Differential Revision`` association.
1134 use local tags to record the ``Differential Revision`` association.
1122
1135
1123 The --confirm option lets you confirm changesets before sending them. You
1136 The --confirm option lets you confirm changesets before sending them. You
1124 can also add following to your configuration file to make it default
1137 can also add following to your configuration file to make it default
1125 behaviour::
1138 behaviour::
1126
1139
1127 [phabsend]
1140 [phabsend]
1128 confirm = true
1141 confirm = true
1129
1142
1130 phabsend will check obsstore and the above association to decide whether to
1143 phabsend will check obsstore and the above association to decide whether to
1131 update an existing Differential Revision, or create a new one.
1144 update an existing Differential Revision, or create a new one.
1132 """
1145 """
1133 opts = pycompat.byteskwargs(opts)
1146 opts = pycompat.byteskwargs(opts)
1134 revs = list(revs) + opts.get(b'rev', [])
1147 revs = list(revs) + opts.get(b'rev', [])
1135 revs = scmutil.revrange(repo, revs)
1148 revs = scmutil.revrange(repo, revs)
1136 revs.sort() # ascending order to preserve topological parent/child in phab
1149 revs.sort() # ascending order to preserve topological parent/child in phab
1137
1150
1138 if not revs:
1151 if not revs:
1139 raise error.Abort(_(b'phabsend requires at least one changeset'))
1152 raise error.Abort(_(b'phabsend requires at least one changeset'))
1140 if opts.get(b'amend'):
1153 if opts.get(b'amend'):
1141 cmdutil.checkunfinished(repo)
1154 cmdutil.checkunfinished(repo)
1142
1155
1143 # {newnode: (oldnode, olddiff, olddrev}
1156 # {newnode: (oldnode, olddiff, olddrev}
1144 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1157 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1145
1158
1146 confirm = ui.configbool(b'phabsend', b'confirm')
1159 confirm = ui.configbool(b'phabsend', b'confirm')
1147 confirm |= bool(opts.get(b'confirm'))
1160 confirm |= bool(opts.get(b'confirm'))
1148 if confirm:
1161 if confirm:
1149 confirmed = _confirmbeforesend(repo, revs, oldmap)
1162 confirmed = _confirmbeforesend(repo, revs, oldmap)
1150 if not confirmed:
1163 if not confirmed:
1151 raise error.Abort(_(b'phabsend cancelled'))
1164 raise error.Abort(_(b'phabsend cancelled'))
1152
1165
1153 actions = []
1166 actions = []
1154 reviewers = opts.get(b'reviewer', [])
1167 reviewers = opts.get(b'reviewer', [])
1155 blockers = opts.get(b'blocker', [])
1168 blockers = opts.get(b'blocker', [])
1156 phids = []
1169 phids = []
1157 if reviewers:
1170 if reviewers:
1158 phids.extend(userphids(repo.ui, reviewers))
1171 phids.extend(userphids(repo.ui, reviewers))
1159 if blockers:
1172 if blockers:
1160 phids.extend(
1173 phids.extend(
1161 map(
1174 map(
1162 lambda phid: b'blocking(%s)' % phid,
1175 lambda phid: b'blocking(%s)' % phid,
1163 userphids(repo.ui, blockers),
1176 userphids(repo.ui, blockers),
1164 )
1177 )
1165 )
1178 )
1166 if phids:
1179 if phids:
1167 actions.append({b'type': b'reviewers.add', b'value': phids})
1180 actions.append({b'type': b'reviewers.add', b'value': phids})
1168
1181
1169 drevids = [] # [int]
1182 drevids = [] # [int]
1170 diffmap = {} # {newnode: diff}
1183 diffmap = {} # {newnode: diff}
1171
1184
1172 # Send patches one by one so we know their Differential Revision PHIDs and
1185 # Send patches one by one so we know their Differential Revision PHIDs and
1173 # can provide dependency relationship
1186 # can provide dependency relationship
1174 lastrevphid = None
1187 lastrevphid = None
1175 for rev in revs:
1188 for rev in revs:
1176 ui.debug(b'sending rev %d\n' % rev)
1189 ui.debug(b'sending rev %d\n' % rev)
1177 ctx = repo[rev]
1190 ctx = repo[rev]
1178
1191
1179 # Get Differential Revision ID
1192 # Get Differential Revision ID
1180 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1193 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1181 if oldnode != ctx.node() or opts.get(b'amend'):
1194 if oldnode != ctx.node() or opts.get(b'amend'):
1182 # Create or update Differential Revision
1195 # Create or update Differential Revision
1183 revision, diff = createdifferentialrevision(
1196 revision, diff = createdifferentialrevision(
1184 ctx,
1197 ctx,
1185 revid,
1198 revid,
1186 lastrevphid,
1199 lastrevphid,
1187 oldnode,
1200 oldnode,
1188 olddiff,
1201 olddiff,
1189 actions,
1202 actions,
1190 opts.get(b'comment'),
1203 opts.get(b'comment'),
1191 )
1204 )
1192 diffmap[ctx.node()] = diff
1205 diffmap[ctx.node()] = diff
1193 newrevid = int(revision[b'object'][b'id'])
1206 newrevid = int(revision[b'object'][b'id'])
1194 newrevphid = revision[b'object'][b'phid']
1207 newrevphid = revision[b'object'][b'phid']
1195 if revid:
1208 if revid:
1196 action = b'updated'
1209 action = b'updated'
1197 else:
1210 else:
1198 action = b'created'
1211 action = b'created'
1199
1212
1200 # Create a local tag to note the association, if commit message
1213 # Create a local tag to note the association, if commit message
1201 # does not have it already
1214 # does not have it already
1202 m = _differentialrevisiondescre.search(ctx.description())
1215 m = _differentialrevisiondescre.search(ctx.description())
1203 if not m or int(m.group('id')) != newrevid:
1216 if not m or int(m.group('id')) != newrevid:
1204 tagname = b'D%d' % newrevid
1217 tagname = b'D%d' % newrevid
1205 tags.tag(
1218 tags.tag(
1206 repo,
1219 repo,
1207 tagname,
1220 tagname,
1208 ctx.node(),
1221 ctx.node(),
1209 message=None,
1222 message=None,
1210 user=None,
1223 user=None,
1211 date=None,
1224 date=None,
1212 local=True,
1225 local=True,
1213 )
1226 )
1214 else:
1227 else:
1215 # Nothing changed. But still set "newrevphid" so the next revision
1228 # Nothing changed. But still set "newrevphid" so the next revision
1216 # could depend on this one and "newrevid" for the summary line.
1229 # could depend on this one and "newrevid" for the summary line.
1217 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1230 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1218 newrevid = revid
1231 newrevid = revid
1219 action = b'skipped'
1232 action = b'skipped'
1220
1233
1221 actiondesc = ui.label(
1234 actiondesc = ui.label(
1222 {
1235 {
1223 b'created': _(b'created'),
1236 b'created': _(b'created'),
1224 b'skipped': _(b'skipped'),
1237 b'skipped': _(b'skipped'),
1225 b'updated': _(b'updated'),
1238 b'updated': _(b'updated'),
1226 }[action],
1239 }[action],
1227 b'phabricator.action.%s' % action,
1240 b'phabricator.action.%s' % action,
1228 )
1241 )
1229 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1242 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1230 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1243 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1231 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1244 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1232 ui.write(
1245 ui.write(
1233 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1246 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1234 )
1247 )
1235 drevids.append(newrevid)
1248 drevids.append(newrevid)
1236 lastrevphid = newrevphid
1249 lastrevphid = newrevphid
1237
1250
1238 # Update commit messages and remove tags
1251 # Update commit messages and remove tags
1239 if opts.get(b'amend'):
1252 if opts.get(b'amend'):
1240 unfi = repo.unfiltered()
1253 unfi = repo.unfiltered()
1241 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1254 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1242 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1255 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1243 wnode = unfi[b'.'].node()
1256 wnode = unfi[b'.'].node()
1244 mapping = {} # {oldnode: [newnode]}
1257 mapping = {} # {oldnode: [newnode]}
1245 for i, rev in enumerate(revs):
1258 for i, rev in enumerate(revs):
1246 old = unfi[rev]
1259 old = unfi[rev]
1247 drevid = drevids[i]
1260 drevid = drevids[i]
1248 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1261 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1249 newdesc = getdescfromdrev(drev)
1262 newdesc = getdescfromdrev(drev)
1250 # Make sure commit message contain "Differential Revision"
1263 # Make sure commit message contain "Differential Revision"
1251 if old.description() != newdesc:
1264 if old.description() != newdesc:
1252 if old.phase() == phases.public:
1265 if old.phase() == phases.public:
1253 ui.warn(
1266 ui.warn(
1254 _(b"warning: not updating public commit %s\n")
1267 _(b"warning: not updating public commit %s\n")
1255 % scmutil.formatchangeid(old)
1268 % scmutil.formatchangeid(old)
1256 )
1269 )
1257 continue
1270 continue
1258 parents = [
1271 parents = [
1259 mapping.get(old.p1().node(), (old.p1(),))[0],
1272 mapping.get(old.p1().node(), (old.p1(),))[0],
1260 mapping.get(old.p2().node(), (old.p2(),))[0],
1273 mapping.get(old.p2().node(), (old.p2(),))[0],
1261 ]
1274 ]
1262 new = context.metadataonlyctx(
1275 new = context.metadataonlyctx(
1263 repo,
1276 repo,
1264 old,
1277 old,
1265 parents=parents,
1278 parents=parents,
1266 text=newdesc,
1279 text=newdesc,
1267 user=old.user(),
1280 user=old.user(),
1268 date=old.date(),
1281 date=old.date(),
1269 extra=old.extra(),
1282 extra=old.extra(),
1270 )
1283 )
1271
1284
1272 newnode = new.commit()
1285 newnode = new.commit()
1273
1286
1274 mapping[old.node()] = [newnode]
1287 mapping[old.node()] = [newnode]
1275 # Update diff property
1288 # Update diff property
1276 # If it fails just warn and keep going, otherwise the DREV
1289 # If it fails just warn and keep going, otherwise the DREV
1277 # associations will be lost
1290 # associations will be lost
1278 try:
1291 try:
1279 writediffproperties(unfi[newnode], diffmap[old.node()])
1292 writediffproperties(unfi[newnode], diffmap[old.node()])
1280 except util.urlerr.urlerror:
1293 except util.urlerr.urlerror:
1281 ui.warnnoi18n(
1294 ui.warnnoi18n(
1282 b'Failed to update metadata for D%d\n' % drevid
1295 b'Failed to update metadata for D%d\n' % drevid
1283 )
1296 )
1284 # Remove local tags since it's no longer necessary
1297 # Remove local tags since it's no longer necessary
1285 tagname = b'D%d' % drevid
1298 tagname = b'D%d' % drevid
1286 if tagname in repo.tags():
1299 if tagname in repo.tags():
1287 tags.tag(
1300 tags.tag(
1288 repo,
1301 repo,
1289 tagname,
1302 tagname,
1290 nullid,
1303 nullid,
1291 message=None,
1304 message=None,
1292 user=None,
1305 user=None,
1293 date=None,
1306 date=None,
1294 local=True,
1307 local=True,
1295 )
1308 )
1296 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1309 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1297 if wnode in mapping:
1310 if wnode in mapping:
1298 unfi.setparents(mapping[wnode][0])
1311 unfi.setparents(mapping[wnode][0])
1299
1312
1300
1313
1301 # Map from "hg:meta" keys to header understood by "hg import". The order is
1314 # Map from "hg:meta" keys to header understood by "hg import". The order is
1302 # consistent with "hg export" output.
1315 # consistent with "hg export" output.
1303 _metanamemap = util.sortdict(
1316 _metanamemap = util.sortdict(
1304 [
1317 [
1305 (b'user', b'User'),
1318 (b'user', b'User'),
1306 (b'date', b'Date'),
1319 (b'date', b'Date'),
1307 (b'branch', b'Branch'),
1320 (b'branch', b'Branch'),
1308 (b'node', b'Node ID'),
1321 (b'node', b'Node ID'),
1309 (b'parent', b'Parent '),
1322 (b'parent', b'Parent '),
1310 ]
1323 ]
1311 )
1324 )
1312
1325
1313
1326
1314 def _confirmbeforesend(repo, revs, oldmap):
1327 def _confirmbeforesend(repo, revs, oldmap):
1315 url, token = readurltoken(repo.ui)
1328 url, token = readurltoken(repo.ui)
1316 ui = repo.ui
1329 ui = repo.ui
1317 for rev in revs:
1330 for rev in revs:
1318 ctx = repo[rev]
1331 ctx = repo[rev]
1319 desc = ctx.description().splitlines()[0]
1332 desc = ctx.description().splitlines()[0]
1320 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1333 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1321 if drevid:
1334 if drevid:
1322 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1335 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1323 else:
1336 else:
1324 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1337 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1325
1338
1326 ui.write(
1339 ui.write(
1327 _(b'%s - %s: %s\n')
1340 _(b'%s - %s: %s\n')
1328 % (
1341 % (
1329 drevdesc,
1342 drevdesc,
1330 ui.label(bytes(ctx), b'phabricator.node'),
1343 ui.label(bytes(ctx), b'phabricator.node'),
1331 ui.label(desc, b'phabricator.desc'),
1344 ui.label(desc, b'phabricator.desc'),
1332 )
1345 )
1333 )
1346 )
1334
1347
1335 if ui.promptchoice(
1348 if ui.promptchoice(
1336 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1349 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1337 ):
1350 ):
1338 return False
1351 return False
1339
1352
1340 return True
1353 return True
1341
1354
1342
1355
1343 _knownstatusnames = {
1356 _knownstatusnames = {
1344 b'accepted',
1357 b'accepted',
1345 b'needsreview',
1358 b'needsreview',
1346 b'needsrevision',
1359 b'needsrevision',
1347 b'closed',
1360 b'closed',
1348 b'abandoned',
1361 b'abandoned',
1349 b'changesplanned',
1362 b'changesplanned',
1350 }
1363 }
1351
1364
1352
1365
1353 def _getstatusname(drev):
1366 def _getstatusname(drev):
1354 """get normalized status name from a Differential Revision"""
1367 """get normalized status name from a Differential Revision"""
1355 return drev[b'statusName'].replace(b' ', b'').lower()
1368 return drev[b'statusName'].replace(b' ', b'').lower()
1356
1369
1357
1370
1358 # Small language to specify differential revisions. Support symbols: (), :X,
1371 # Small language to specify differential revisions. Support symbols: (), :X,
1359 # +, and -.
1372 # +, and -.
1360
1373
1361 _elements = {
1374 _elements = {
1362 # token-type: binding-strength, primary, prefix, infix, suffix
1375 # token-type: binding-strength, primary, prefix, infix, suffix
1363 b'(': (12, None, (b'group', 1, b')'), None, None),
1376 b'(': (12, None, (b'group', 1, b')'), None, None),
1364 b':': (8, None, (b'ancestors', 8), None, None),
1377 b':': (8, None, (b'ancestors', 8), None, None),
1365 b'&': (5, None, None, (b'and_', 5), None),
1378 b'&': (5, None, None, (b'and_', 5), None),
1366 b'+': (4, None, None, (b'add', 4), None),
1379 b'+': (4, None, None, (b'add', 4), None),
1367 b'-': (4, None, None, (b'sub', 4), None),
1380 b'-': (4, None, None, (b'sub', 4), None),
1368 b')': (0, None, None, None, None),
1381 b')': (0, None, None, None, None),
1369 b'symbol': (0, b'symbol', None, None, None),
1382 b'symbol': (0, b'symbol', None, None, None),
1370 b'end': (0, None, None, None, None),
1383 b'end': (0, None, None, None, None),
1371 }
1384 }
1372
1385
1373
1386
1374 def _tokenize(text):
1387 def _tokenize(text):
1375 view = memoryview(text) # zero-copy slice
1388 view = memoryview(text) # zero-copy slice
1376 special = b'():+-& '
1389 special = b'():+-& '
1377 pos = 0
1390 pos = 0
1378 length = len(text)
1391 length = len(text)
1379 while pos < length:
1392 while pos < length:
1380 symbol = b''.join(
1393 symbol = b''.join(
1381 itertools.takewhile(
1394 itertools.takewhile(
1382 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1395 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1383 )
1396 )
1384 )
1397 )
1385 if symbol:
1398 if symbol:
1386 yield (b'symbol', symbol, pos)
1399 yield (b'symbol', symbol, pos)
1387 pos += len(symbol)
1400 pos += len(symbol)
1388 else: # special char, ignore space
1401 else: # special char, ignore space
1389 if text[pos : pos + 1] != b' ':
1402 if text[pos : pos + 1] != b' ':
1390 yield (text[pos : pos + 1], None, pos)
1403 yield (text[pos : pos + 1], None, pos)
1391 pos += 1
1404 pos += 1
1392 yield (b'end', None, pos)
1405 yield (b'end', None, pos)
1393
1406
1394
1407
1395 def _parse(text):
1408 def _parse(text):
1396 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1409 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1397 if pos != len(text):
1410 if pos != len(text):
1398 raise error.ParseError(b'invalid token', pos)
1411 raise error.ParseError(b'invalid token', pos)
1399 return tree
1412 return tree
1400
1413
1401
1414
1402 def _parsedrev(symbol):
1415 def _parsedrev(symbol):
1403 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1416 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1404 if symbol.startswith(b'D') and symbol[1:].isdigit():
1417 if symbol.startswith(b'D') and symbol[1:].isdigit():
1405 return int(symbol[1:])
1418 return int(symbol[1:])
1406 if symbol.isdigit():
1419 if symbol.isdigit():
1407 return int(symbol)
1420 return int(symbol)
1408
1421
1409
1422
1410 def _prefetchdrevs(tree):
1423 def _prefetchdrevs(tree):
1411 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1424 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1412 drevs = set()
1425 drevs = set()
1413 ancestordrevs = set()
1426 ancestordrevs = set()
1414 op = tree[0]
1427 op = tree[0]
1415 if op == b'symbol':
1428 if op == b'symbol':
1416 r = _parsedrev(tree[1])
1429 r = _parsedrev(tree[1])
1417 if r:
1430 if r:
1418 drevs.add(r)
1431 drevs.add(r)
1419 elif op == b'ancestors':
1432 elif op == b'ancestors':
1420 r, a = _prefetchdrevs(tree[1])
1433 r, a = _prefetchdrevs(tree[1])
1421 drevs.update(r)
1434 drevs.update(r)
1422 ancestordrevs.update(r)
1435 ancestordrevs.update(r)
1423 ancestordrevs.update(a)
1436 ancestordrevs.update(a)
1424 else:
1437 else:
1425 for t in tree[1:]:
1438 for t in tree[1:]:
1426 r, a = _prefetchdrevs(t)
1439 r, a = _prefetchdrevs(t)
1427 drevs.update(r)
1440 drevs.update(r)
1428 ancestordrevs.update(a)
1441 ancestordrevs.update(a)
1429 return drevs, ancestordrevs
1442 return drevs, ancestordrevs
1430
1443
1431
1444
1432 def querydrev(ui, spec):
1445 def querydrev(ui, spec):
1433 """return a list of "Differential Revision" dicts
1446 """return a list of "Differential Revision" dicts
1434
1447
1435 spec is a string using a simple query language, see docstring in phabread
1448 spec is a string using a simple query language, see docstring in phabread
1436 for details.
1449 for details.
1437
1450
1438 A "Differential Revision dict" looks like:
1451 A "Differential Revision dict" looks like:
1439
1452
1440 {
1453 {
1441 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1454 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1442 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1455 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1443 "auxiliary": {
1456 "auxiliary": {
1444 "phabricator:depends-on": [
1457 "phabricator:depends-on": [
1445 "PHID-DREV-gbapp366kutjebt7agcd"
1458 "PHID-DREV-gbapp366kutjebt7agcd"
1446 ]
1459 ]
1447 "phabricator:projects": [],
1460 "phabricator:projects": [],
1448 },
1461 },
1449 "branch": "default",
1462 "branch": "default",
1450 "ccs": [],
1463 "ccs": [],
1451 "commits": [],
1464 "commits": [],
1452 "dateCreated": "1499181406",
1465 "dateCreated": "1499181406",
1453 "dateModified": "1499182103",
1466 "dateModified": "1499182103",
1454 "diffs": [
1467 "diffs": [
1455 "3",
1468 "3",
1456 "4",
1469 "4",
1457 ],
1470 ],
1458 "hashes": [],
1471 "hashes": [],
1459 "id": "2",
1472 "id": "2",
1460 "lineCount": "2",
1473 "lineCount": "2",
1461 "phid": "PHID-DREV-672qvysjcczopag46qty",
1474 "phid": "PHID-DREV-672qvysjcczopag46qty",
1462 "properties": {},
1475 "properties": {},
1463 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1476 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1464 "reviewers": [],
1477 "reviewers": [],
1465 "sourcePath": null
1478 "sourcePath": null
1466 "status": "0",
1479 "status": "0",
1467 "statusName": "Needs Review",
1480 "statusName": "Needs Review",
1468 "summary": "",
1481 "summary": "",
1469 "testPlan": "",
1482 "testPlan": "",
1470 "title": "example",
1483 "title": "example",
1471 "uri": "https://phab.example.com/D2",
1484 "uri": "https://phab.example.com/D2",
1472 }
1485 }
1473 """
1486 """
1474 # TODO: replace differential.query and differential.querydiffs with
1487 # TODO: replace differential.query and differential.querydiffs with
1475 # differential.diff.search because the former (and their output) are
1488 # differential.diff.search because the former (and their output) are
1476 # frozen, and planned to be deprecated and removed.
1489 # frozen, and planned to be deprecated and removed.
1477
1490
1478 def fetch(params):
1491 def fetch(params):
1479 """params -> single drev or None"""
1492 """params -> single drev or None"""
1480 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1493 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1481 if key in prefetched:
1494 if key in prefetched:
1482 return prefetched[key]
1495 return prefetched[key]
1483 drevs = callconduit(ui, b'differential.query', params)
1496 drevs = callconduit(ui, b'differential.query', params)
1484 # Fill prefetched with the result
1497 # Fill prefetched with the result
1485 for drev in drevs:
1498 for drev in drevs:
1486 prefetched[drev[b'phid']] = drev
1499 prefetched[drev[b'phid']] = drev
1487 prefetched[int(drev[b'id'])] = drev
1500 prefetched[int(drev[b'id'])] = drev
1488 if key not in prefetched:
1501 if key not in prefetched:
1489 raise error.Abort(
1502 raise error.Abort(
1490 _(b'cannot get Differential Revision %r') % params
1503 _(b'cannot get Differential Revision %r') % params
1491 )
1504 )
1492 return prefetched[key]
1505 return prefetched[key]
1493
1506
1494 def getstack(topdrevids):
1507 def getstack(topdrevids):
1495 """given a top, get a stack from the bottom, [id] -> [id]"""
1508 """given a top, get a stack from the bottom, [id] -> [id]"""
1496 visited = set()
1509 visited = set()
1497 result = []
1510 result = []
1498 queue = [{b'ids': [i]} for i in topdrevids]
1511 queue = [{b'ids': [i]} for i in topdrevids]
1499 while queue:
1512 while queue:
1500 params = queue.pop()
1513 params = queue.pop()
1501 drev = fetch(params)
1514 drev = fetch(params)
1502 if drev[b'id'] in visited:
1515 if drev[b'id'] in visited:
1503 continue
1516 continue
1504 visited.add(drev[b'id'])
1517 visited.add(drev[b'id'])
1505 result.append(int(drev[b'id']))
1518 result.append(int(drev[b'id']))
1506 auxiliary = drev.get(b'auxiliary', {})
1519 auxiliary = drev.get(b'auxiliary', {})
1507 depends = auxiliary.get(b'phabricator:depends-on', [])
1520 depends = auxiliary.get(b'phabricator:depends-on', [])
1508 for phid in depends:
1521 for phid in depends:
1509 queue.append({b'phids': [phid]})
1522 queue.append({b'phids': [phid]})
1510 result.reverse()
1523 result.reverse()
1511 return smartset.baseset(result)
1524 return smartset.baseset(result)
1512
1525
1513 # Initialize prefetch cache
1526 # Initialize prefetch cache
1514 prefetched = {} # {id or phid: drev}
1527 prefetched = {} # {id or phid: drev}
1515
1528
1516 tree = _parse(spec)
1529 tree = _parse(spec)
1517 drevs, ancestordrevs = _prefetchdrevs(tree)
1530 drevs, ancestordrevs = _prefetchdrevs(tree)
1518
1531
1519 # developer config: phabricator.batchsize
1532 # developer config: phabricator.batchsize
1520 batchsize = ui.configint(b'phabricator', b'batchsize')
1533 batchsize = ui.configint(b'phabricator', b'batchsize')
1521
1534
1522 # Prefetch Differential Revisions in batch
1535 # Prefetch Differential Revisions in batch
1523 tofetch = set(drevs)
1536 tofetch = set(drevs)
1524 for r in ancestordrevs:
1537 for r in ancestordrevs:
1525 tofetch.update(range(max(1, r - batchsize), r + 1))
1538 tofetch.update(range(max(1, r - batchsize), r + 1))
1526 if drevs:
1539 if drevs:
1527 fetch({b'ids': list(tofetch)})
1540 fetch({b'ids': list(tofetch)})
1528 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1541 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1529
1542
1530 # Walk through the tree, return smartsets
1543 # Walk through the tree, return smartsets
1531 def walk(tree):
1544 def walk(tree):
1532 op = tree[0]
1545 op = tree[0]
1533 if op == b'symbol':
1546 if op == b'symbol':
1534 drev = _parsedrev(tree[1])
1547 drev = _parsedrev(tree[1])
1535 if drev:
1548 if drev:
1536 return smartset.baseset([drev])
1549 return smartset.baseset([drev])
1537 elif tree[1] in _knownstatusnames:
1550 elif tree[1] in _knownstatusnames:
1538 drevs = [
1551 drevs = [
1539 r
1552 r
1540 for r in validids
1553 for r in validids
1541 if _getstatusname(prefetched[r]) == tree[1]
1554 if _getstatusname(prefetched[r]) == tree[1]
1542 ]
1555 ]
1543 return smartset.baseset(drevs)
1556 return smartset.baseset(drevs)
1544 else:
1557 else:
1545 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1558 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1546 elif op in {b'and_', b'add', b'sub'}:
1559 elif op in {b'and_', b'add', b'sub'}:
1547 assert len(tree) == 3
1560 assert len(tree) == 3
1548 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1561 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1549 elif op == b'group':
1562 elif op == b'group':
1550 return walk(tree[1])
1563 return walk(tree[1])
1551 elif op == b'ancestors':
1564 elif op == b'ancestors':
1552 return getstack(walk(tree[1]))
1565 return getstack(walk(tree[1]))
1553 else:
1566 else:
1554 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1567 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1555
1568
1556 return [prefetched[r] for r in walk(tree)]
1569 return [prefetched[r] for r in walk(tree)]
1557
1570
1558
1571
1559 def getdescfromdrev(drev):
1572 def getdescfromdrev(drev):
1560 """get description (commit message) from "Differential Revision"
1573 """get description (commit message) from "Differential Revision"
1561
1574
1562 This is similar to differential.getcommitmessage API. But we only care
1575 This is similar to differential.getcommitmessage API. But we only care
1563 about limited fields: title, summary, test plan, and URL.
1576 about limited fields: title, summary, test plan, and URL.
1564 """
1577 """
1565 title = drev[b'title']
1578 title = drev[b'title']
1566 summary = drev[b'summary'].rstrip()
1579 summary = drev[b'summary'].rstrip()
1567 testplan = drev[b'testPlan'].rstrip()
1580 testplan = drev[b'testPlan'].rstrip()
1568 if testplan:
1581 if testplan:
1569 testplan = b'Test Plan:\n%s' % testplan
1582 testplan = b'Test Plan:\n%s' % testplan
1570 uri = b'Differential Revision: %s' % drev[b'uri']
1583 uri = b'Differential Revision: %s' % drev[b'uri']
1571 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1584 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1572
1585
1573
1586
1574 def getdiffmeta(diff):
1587 def getdiffmeta(diff):
1575 """get commit metadata (date, node, user, p1) from a diff object
1588 """get commit metadata (date, node, user, p1) from a diff object
1576
1589
1577 The metadata could be "hg:meta", sent by phabsend, like:
1590 The metadata could be "hg:meta", sent by phabsend, like:
1578
1591
1579 "properties": {
1592 "properties": {
1580 "hg:meta": {
1593 "hg:meta": {
1581 "branch": "default",
1594 "branch": "default",
1582 "date": "1499571514 25200",
1595 "date": "1499571514 25200",
1583 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1596 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1584 "user": "Foo Bar <foo@example.com>",
1597 "user": "Foo Bar <foo@example.com>",
1585 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1598 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1586 }
1599 }
1587 }
1600 }
1588
1601
1589 Or converted from "local:commits", sent by "arc", like:
1602 Or converted from "local:commits", sent by "arc", like:
1590
1603
1591 "properties": {
1604 "properties": {
1592 "local:commits": {
1605 "local:commits": {
1593 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1606 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1594 "author": "Foo Bar",
1607 "author": "Foo Bar",
1595 "authorEmail": "foo@example.com"
1608 "authorEmail": "foo@example.com"
1596 "branch": "default",
1609 "branch": "default",
1597 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1610 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1598 "local": "1000",
1611 "local": "1000",
1599 "message": "...",
1612 "message": "...",
1600 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1613 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1601 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1614 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1602 "summary": "...",
1615 "summary": "...",
1603 "tag": "",
1616 "tag": "",
1604 "time": 1499546314,
1617 "time": 1499546314,
1605 }
1618 }
1606 }
1619 }
1607 }
1620 }
1608
1621
1609 Note: metadata extracted from "local:commits" will lose time zone
1622 Note: metadata extracted from "local:commits" will lose time zone
1610 information.
1623 information.
1611 """
1624 """
1612 props = diff.get(b'properties') or {}
1625 props = diff.get(b'properties') or {}
1613 meta = props.get(b'hg:meta')
1626 meta = props.get(b'hg:meta')
1614 if not meta:
1627 if not meta:
1615 if props.get(b'local:commits'):
1628 if props.get(b'local:commits'):
1616 commit = sorted(props[b'local:commits'].values())[0]
1629 commit = sorted(props[b'local:commits'].values())[0]
1617 meta = {}
1630 meta = {}
1618 if b'author' in commit and b'authorEmail' in commit:
1631 if b'author' in commit and b'authorEmail' in commit:
1619 meta[b'user'] = b'%s <%s>' % (
1632 meta[b'user'] = b'%s <%s>' % (
1620 commit[b'author'],
1633 commit[b'author'],
1621 commit[b'authorEmail'],
1634 commit[b'authorEmail'],
1622 )
1635 )
1623 if b'time' in commit:
1636 if b'time' in commit:
1624 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1637 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1625 if b'branch' in commit:
1638 if b'branch' in commit:
1626 meta[b'branch'] = commit[b'branch']
1639 meta[b'branch'] = commit[b'branch']
1627 node = commit.get(b'commit', commit.get(b'rev'))
1640 node = commit.get(b'commit', commit.get(b'rev'))
1628 if node:
1641 if node:
1629 meta[b'node'] = node
1642 meta[b'node'] = node
1630 if len(commit.get(b'parents', ())) >= 1:
1643 if len(commit.get(b'parents', ())) >= 1:
1631 meta[b'parent'] = commit[b'parents'][0]
1644 meta[b'parent'] = commit[b'parents'][0]
1632 else:
1645 else:
1633 meta = {}
1646 meta = {}
1634 if b'date' not in meta and b'dateCreated' in diff:
1647 if b'date' not in meta and b'dateCreated' in diff:
1635 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1648 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1636 if b'branch' not in meta and diff.get(b'branch'):
1649 if b'branch' not in meta and diff.get(b'branch'):
1637 meta[b'branch'] = diff[b'branch']
1650 meta[b'branch'] = diff[b'branch']
1638 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1651 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1639 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1652 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1640 return meta
1653 return meta
1641
1654
1642
1655
1643 def _getdrevs(ui, stack, specs):
1656 def _getdrevs(ui, stack, specs):
1644 """convert user supplied DREVSPECs into "Differential Revision" dicts
1657 """convert user supplied DREVSPECs into "Differential Revision" dicts
1645
1658
1646 See ``hg help phabread`` for how to specify each DREVSPEC.
1659 See ``hg help phabread`` for how to specify each DREVSPEC.
1647 """
1660 """
1648 if len(specs) > 0:
1661 if len(specs) > 0:
1649
1662
1650 def _formatspec(s):
1663 def _formatspec(s):
1651 if stack:
1664 if stack:
1652 s = b':(%s)' % s
1665 s = b':(%s)' % s
1653 return b'(%s)' % s
1666 return b'(%s)' % s
1654
1667
1655 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1668 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1656
1669
1657 drevs = querydrev(ui, spec)
1670 drevs = querydrev(ui, spec)
1658 if drevs:
1671 if drevs:
1659 return drevs
1672 return drevs
1660
1673
1661 raise error.Abort(_(b"empty DREVSPEC set"))
1674 raise error.Abort(_(b"empty DREVSPEC set"))
1662
1675
1663
1676
1664 def readpatch(ui, drevs, write):
1677 def readpatch(ui, drevs, write):
1665 """generate plain-text patch readable by 'hg import'
1678 """generate plain-text patch readable by 'hg import'
1666
1679
1667 write takes a list of (DREV, bytes), where DREV is the differential number
1680 write takes a list of (DREV, bytes), where DREV is the differential number
1668 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1681 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1669 to be imported. drevs is what "querydrev" returns, results of
1682 to be imported. drevs is what "querydrev" returns, results of
1670 "differential.query".
1683 "differential.query".
1671 """
1684 """
1672 # Prefetch hg:meta property for all diffs
1685 # Prefetch hg:meta property for all diffs
1673 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1686 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1674 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1687 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1675
1688
1676 patches = []
1689 patches = []
1677
1690
1678 # Generate patch for each drev
1691 # Generate patch for each drev
1679 for drev in drevs:
1692 for drev in drevs:
1680 ui.note(_(b'reading D%s\n') % drev[b'id'])
1693 ui.note(_(b'reading D%s\n') % drev[b'id'])
1681
1694
1682 diffid = max(int(v) for v in drev[b'diffs'])
1695 diffid = max(int(v) for v in drev[b'diffs'])
1683 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1696 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1684 desc = getdescfromdrev(drev)
1697 desc = getdescfromdrev(drev)
1685 header = b'# HG changeset patch\n'
1698 header = b'# HG changeset patch\n'
1686
1699
1687 # Try to preserve metadata from hg:meta property. Write hg patch
1700 # Try to preserve metadata from hg:meta property. Write hg patch
1688 # headers that can be read by the "import" command. See patchheadermap
1701 # headers that can be read by the "import" command. See patchheadermap
1689 # and extract in mercurial/patch.py for supported headers.
1702 # and extract in mercurial/patch.py for supported headers.
1690 meta = getdiffmeta(diffs[b'%d' % diffid])
1703 meta = getdiffmeta(diffs[b'%d' % diffid])
1691 for k in _metanamemap.keys():
1704 for k in _metanamemap.keys():
1692 if k in meta:
1705 if k in meta:
1693 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1706 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1694
1707
1695 content = b'%s%s\n%s' % (header, desc, body)
1708 content = b'%s%s\n%s' % (header, desc, body)
1696 patches.append((drev[b'id'], content))
1709 patches.append((drev[b'id'], content))
1697
1710
1698 # Write patches to the supplied callback
1711 # Write patches to the supplied callback
1699 write(patches)
1712 write(patches)
1700
1713
1701
1714
1702 @vcrcommand(
1715 @vcrcommand(
1703 b'phabread',
1716 b'phabread',
1704 [(b'', b'stack', False, _(b'read dependencies'))],
1717 [(b'', b'stack', False, _(b'read dependencies'))],
1705 _(b'DREVSPEC... [OPTIONS]'),
1718 _(b'DREVSPEC... [OPTIONS]'),
1706 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1719 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1707 optionalrepo=True,
1720 optionalrepo=True,
1708 )
1721 )
1709 def phabread(ui, repo, *specs, **opts):
1722 def phabread(ui, repo, *specs, **opts):
1710 """print patches from Phabricator suitable for importing
1723 """print patches from Phabricator suitable for importing
1711
1724
1712 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1725 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1713 the number ``123``. It could also have common operators like ``+``, ``-``,
1726 the number ``123``. It could also have common operators like ``+``, ``-``,
1714 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1727 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1715 select a stack. If multiple DREVSPEC values are given, the result is the
1728 select a stack. If multiple DREVSPEC values are given, the result is the
1716 union of each individually evaluated value. No attempt is currently made
1729 union of each individually evaluated value. No attempt is currently made
1717 to reorder the values to run from parent to child.
1730 to reorder the values to run from parent to child.
1718
1731
1719 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1732 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1720 could be used to filter patches by status. For performance reason, they
1733 could be used to filter patches by status. For performance reason, they
1721 only represent a subset of non-status selections and cannot be used alone.
1734 only represent a subset of non-status selections and cannot be used alone.
1722
1735
1723 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1736 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1724 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1737 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1725 stack up to D9.
1738 stack up to D9.
1726
1739
1727 If --stack is given, follow dependencies information and read all patches.
1740 If --stack is given, follow dependencies information and read all patches.
1728 It is equivalent to the ``:`` operator.
1741 It is equivalent to the ``:`` operator.
1729 """
1742 """
1730 opts = pycompat.byteskwargs(opts)
1743 opts = pycompat.byteskwargs(opts)
1731 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1744 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1732
1745
1733 def _write(patches):
1746 def _write(patches):
1734 for drev, content in patches:
1747 for drev, content in patches:
1735 ui.write(content)
1748 ui.write(content)
1736
1749
1737 readpatch(ui, drevs, _write)
1750 readpatch(ui, drevs, _write)
1738
1751
1739
1752
1740 @vcrcommand(
1753 @vcrcommand(
1741 b'phabimport',
1754 b'phabimport',
1742 [(b'', b'stack', False, _(b'import dependencies as well'))],
1755 [(b'', b'stack', False, _(b'import dependencies as well'))],
1743 _(b'DREVSPEC... [OPTIONS]'),
1756 _(b'DREVSPEC... [OPTIONS]'),
1744 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1757 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1745 )
1758 )
1746 def phabimport(ui, repo, *specs, **opts):
1759 def phabimport(ui, repo, *specs, **opts):
1747 """import patches from Phabricator for the specified Differential Revisions
1760 """import patches from Phabricator for the specified Differential Revisions
1748
1761
1749 The patches are read and applied starting at the parent of the working
1762 The patches are read and applied starting at the parent of the working
1750 directory.
1763 directory.
1751
1764
1752 See ``hg help phabread`` for how to specify DREVSPEC.
1765 See ``hg help phabread`` for how to specify DREVSPEC.
1753 """
1766 """
1754 opts = pycompat.byteskwargs(opts)
1767 opts = pycompat.byteskwargs(opts)
1755
1768
1756 # --bypass avoids losing exec and symlink bits when importing on Windows,
1769 # --bypass avoids losing exec and symlink bits when importing on Windows,
1757 # and allows importing with a dirty wdir. It also aborts instead of leaving
1770 # and allows importing with a dirty wdir. It also aborts instead of leaving
1758 # rejects.
1771 # rejects.
1759 opts[b'bypass'] = True
1772 opts[b'bypass'] = True
1760
1773
1761 # Mandatory default values, synced with commands.import
1774 # Mandatory default values, synced with commands.import
1762 opts[b'strip'] = 1
1775 opts[b'strip'] = 1
1763 opts[b'prefix'] = b''
1776 opts[b'prefix'] = b''
1764 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1777 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1765 opts[b'obsolete'] = False
1778 opts[b'obsolete'] = False
1766
1779
1767 if ui.configbool(b'phabimport', b'secret'):
1780 if ui.configbool(b'phabimport', b'secret'):
1768 opts[b'secret'] = True
1781 opts[b'secret'] = True
1769 if ui.configbool(b'phabimport', b'obsolete'):
1782 if ui.configbool(b'phabimport', b'obsolete'):
1770 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1783 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1771
1784
1772 def _write(patches):
1785 def _write(patches):
1773 parents = repo[None].parents()
1786 parents = repo[None].parents()
1774
1787
1775 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1788 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1776 for drev, contents in patches:
1789 for drev, contents in patches:
1777 ui.status(_(b'applying patch from D%s\n') % drev)
1790 ui.status(_(b'applying patch from D%s\n') % drev)
1778
1791
1779 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1792 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1780 msg, node, rej = cmdutil.tryimportone(
1793 msg, node, rej = cmdutil.tryimportone(
1781 ui,
1794 ui,
1782 repo,
1795 repo,
1783 patchdata,
1796 patchdata,
1784 parents,
1797 parents,
1785 opts,
1798 opts,
1786 [],
1799 [],
1787 None, # Never update wdir to another revision
1800 None, # Never update wdir to another revision
1788 )
1801 )
1789
1802
1790 if not node:
1803 if not node:
1791 raise error.Abort(_(b'D%s: no diffs found') % drev)
1804 raise error.Abort(_(b'D%s: no diffs found') % drev)
1792
1805
1793 ui.note(msg + b'\n')
1806 ui.note(msg + b'\n')
1794 parents = [repo[node]]
1807 parents = [repo[node]]
1795
1808
1796 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1809 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1797
1810
1798 readpatch(repo.ui, drevs, _write)
1811 readpatch(repo.ui, drevs, _write)
1799
1812
1800
1813
1801 @vcrcommand(
1814 @vcrcommand(
1802 b'phabupdate',
1815 b'phabupdate',
1803 [
1816 [
1804 (b'', b'accept', False, _(b'accept revisions')),
1817 (b'', b'accept', False, _(b'accept revisions')),
1805 (b'', b'reject', False, _(b'reject revisions')),
1818 (b'', b'reject', False, _(b'reject revisions')),
1806 (b'', b'abandon', False, _(b'abandon revisions')),
1819 (b'', b'abandon', False, _(b'abandon revisions')),
1807 (b'', b'reclaim', False, _(b'reclaim revisions')),
1820 (b'', b'reclaim', False, _(b'reclaim revisions')),
1808 (b'm', b'comment', b'', _(b'comment on the last revision')),
1821 (b'm', b'comment', b'', _(b'comment on the last revision')),
1809 ],
1822 ],
1810 _(b'DREVSPEC... [OPTIONS]'),
1823 _(b'DREVSPEC... [OPTIONS]'),
1811 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1824 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1812 optionalrepo=True,
1825 optionalrepo=True,
1813 )
1826 )
1814 def phabupdate(ui, repo, *specs, **opts):
1827 def phabupdate(ui, repo, *specs, **opts):
1815 """update Differential Revision in batch
1828 """update Differential Revision in batch
1816
1829
1817 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1830 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1818 """
1831 """
1819 opts = pycompat.byteskwargs(opts)
1832 opts = pycompat.byteskwargs(opts)
1820 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1833 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1821 if len(flags) > 1:
1834 if len(flags) > 1:
1822 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1835 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1823
1836
1824 actions = []
1837 actions = []
1825 for f in flags:
1838 for f in flags:
1826 actions.append({b'type': f, b'value': True})
1839 actions.append({b'type': f, b'value': True})
1827
1840
1828 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1841 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1829 for i, drev in enumerate(drevs):
1842 for i, drev in enumerate(drevs):
1830 if i + 1 == len(drevs) and opts.get(b'comment'):
1843 if i + 1 == len(drevs) and opts.get(b'comment'):
1831 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1844 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1832 if actions:
1845 if actions:
1833 params = {
1846 params = {
1834 b'objectIdentifier': drev[b'phid'],
1847 b'objectIdentifier': drev[b'phid'],
1835 b'transactions': actions,
1848 b'transactions': actions,
1836 }
1849 }
1837 callconduit(ui, b'differential.revision.edit', params)
1850 callconduit(ui, b'differential.revision.edit', params)
1838
1851
1839
1852
1840 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1853 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1841 def template_review(context, mapping):
1854 def template_review(context, mapping):
1842 """:phabreview: Object describing the review for this changeset.
1855 """:phabreview: Object describing the review for this changeset.
1843 Has attributes `url` and `id`.
1856 Has attributes `url` and `id`.
1844 """
1857 """
1845 ctx = context.resource(mapping, b'ctx')
1858 ctx = context.resource(mapping, b'ctx')
1846 m = _differentialrevisiondescre.search(ctx.description())
1859 m = _differentialrevisiondescre.search(ctx.description())
1847 if m:
1860 if m:
1848 return templateutil.hybriddict(
1861 return templateutil.hybriddict(
1849 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1862 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1850 )
1863 )
1851 else:
1864 else:
1852 tags = ctx.repo().nodetags(ctx.node())
1865 tags = ctx.repo().nodetags(ctx.node())
1853 for t in tags:
1866 for t in tags:
1854 if _differentialrevisiontagre.match(t):
1867 if _differentialrevisiontagre.match(t):
1855 url = ctx.repo().ui.config(b'phabricator', b'url')
1868 url = ctx.repo().ui.config(b'phabricator', b'url')
1856 if not url.endswith(b'/'):
1869 if not url.endswith(b'/'):
1857 url += b'/'
1870 url += b'/'
1858 url += t
1871 url += t
1859
1872
1860 return templateutil.hybriddict({b'url': url, b'id': t,})
1873 return templateutil.hybriddict({b'url': url, b'id': t,})
1861 return None
1874 return None
1862
1875
1863
1876
1864 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1877 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1865 def template_status(context, mapping):
1878 def template_status(context, mapping):
1866 """:phabstatus: String. Status of Phabricator differential.
1879 """:phabstatus: String. Status of Phabricator differential.
1867 """
1880 """
1868 ctx = context.resource(mapping, b'ctx')
1881 ctx = context.resource(mapping, b'ctx')
1869 repo = context.resource(mapping, b'repo')
1882 repo = context.resource(mapping, b'repo')
1870 ui = context.resource(mapping, b'ui')
1883 ui = context.resource(mapping, b'ui')
1871
1884
1872 rev = ctx.rev()
1885 rev = ctx.rev()
1873 try:
1886 try:
1874 drevid = getdrevmap(repo, [rev])[rev]
1887 drevid = getdrevmap(repo, [rev])[rev]
1875 except KeyError:
1888 except KeyError:
1876 return None
1889 return None
1877 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1890 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1878 for drev in drevs:
1891 for drev in drevs:
1879 if int(drev[b'id']) == drevid:
1892 if int(drev[b'id']) == drevid:
1880 return templateutil.hybriddict(
1893 return templateutil.hybriddict(
1881 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1894 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1882 )
1895 )
1883 return None
1896 return None
1884
1897
1885
1898
1886 @show.showview(b'phabstatus', csettopic=b'work')
1899 @show.showview(b'phabstatus', csettopic=b'work')
1887 def phabstatusshowview(ui, repo, displayer):
1900 def phabstatusshowview(ui, repo, displayer):
1888 """Phabricator differiential status"""
1901 """Phabricator differiential status"""
1889 revs = repo.revs('sort(_underway(), topo)')
1902 revs = repo.revs('sort(_underway(), topo)')
1890 drevmap = getdrevmap(repo, revs)
1903 drevmap = getdrevmap(repo, revs)
1891 unknownrevs, drevids, revsbydrevid = [], set(), {}
1904 unknownrevs, drevids, revsbydrevid = [], set(), {}
1892 for rev, drevid in pycompat.iteritems(drevmap):
1905 for rev, drevid in pycompat.iteritems(drevmap):
1893 if drevid is not None:
1906 if drevid is not None:
1894 drevids.add(drevid)
1907 drevids.add(drevid)
1895 revsbydrevid.setdefault(drevid, set()).add(rev)
1908 revsbydrevid.setdefault(drevid, set()).add(rev)
1896 else:
1909 else:
1897 unknownrevs.append(rev)
1910 unknownrevs.append(rev)
1898
1911
1899 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1912 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1900 drevsbyrev = {}
1913 drevsbyrev = {}
1901 for drev in drevs:
1914 for drev in drevs:
1902 for rev in revsbydrevid[int(drev[b'id'])]:
1915 for rev in revsbydrevid[int(drev[b'id'])]:
1903 drevsbyrev[rev] = drev
1916 drevsbyrev[rev] = drev
1904
1917
1905 def phabstatus(ctx):
1918 def phabstatus(ctx):
1906 drev = drevsbyrev[ctx.rev()]
1919 drev = drevsbyrev[ctx.rev()]
1907 status = ui.label(
1920 status = ui.label(
1908 b'%(statusName)s' % drev,
1921 b'%(statusName)s' % drev,
1909 b'phabricator.status.%s' % _getstatusname(drev),
1922 b'phabricator.status.%s' % _getstatusname(drev),
1910 )
1923 )
1911 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1924 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1912
1925
1913 revs -= smartset.baseset(unknownrevs)
1926 revs -= smartset.baseset(unknownrevs)
1914 revdag = graphmod.dagwalker(repo, revs)
1927 revdag = graphmod.dagwalker(repo, revs)
1915
1928
1916 ui.setconfig(b'experimental', b'graphshorten', True)
1929 ui.setconfig(b'experimental', b'graphshorten', True)
1917 displayer._exthook = phabstatus
1930 displayer._exthook = phabstatus
1918 nodelen = show.longestshortest(repo, revs)
1931 nodelen = show.longestshortest(repo, revs)
1919 logcmdutil.displaygraph(
1932 logcmdutil.displaygraph(
1920 ui,
1933 ui,
1921 repo,
1934 repo,
1922 revdag,
1935 revdag,
1923 displayer,
1936 displayer,
1924 graphmod.asciiedges,
1937 graphmod.asciiedges,
1925 props={b'nodelen': nodelen},
1938 props={b'nodelen': nodelen},
1926 )
1939 )
General Comments 0
You need to be logged in to leave comments. Login now