##// END OF EJS Templates
phabricator: record all local commits used to create a Differential revision...
Matt Harbison -
r45133:0437959d default
parent child Browse files
Show More
@@ -1,1939 +1,1948 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
124 )
124 )
125 eh.configitem(
125 eh.configitem(
126 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
127 )
127 )
128 eh.configitem(
128 eh.configitem(
129 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
133 )
133 )
134
134
135 colortable = {
135 colortable = {
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
141 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
148 }
148 }
149
149
150 _VCR_FLAGS = [
150 _VCR_FLAGS = [
151 (
151 (
152 b'',
152 b'',
153 b'test-vcr',
153 b'test-vcr',
154 b'',
154 b'',
155 _(
155 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
158 b' (ADVANCED)'
159 ),
159 ),
160 ),
160 ),
161 ]
161 ]
162
162
163
163
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
167 """
168 result = False
168 result = False
169 arcconfig = {}
169 arcconfig = {}
170
170
171 try:
171 try:
172 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
178 else x,
178 else x,
179 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
180 )
180 )
181
181
182 result = True
182 result = True
183 except ValueError:
183 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
185 except IOError:
186 pass
186 pass
187
187
188 cfg = util.sortdict()
188 cfg = util.sortdict()
189
189
190 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
192
193 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
195
196 if cfg:
196 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
198
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
200
201
201
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
204
204
205 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
207 return False
208 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
210 for key in r1params:
211 if key not in r2params:
211 if key not in r2params:
212 return False
212 return False
213 value = r1params[key][0]
213 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
218 if r1json != r2json:
219 return False
219 return False
220 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
221 return False
221 return False
222 return True
222 return True
223
223
224 def sanitiserequest(request):
224 def sanitiserequest(request):
225 request.body = re.sub(
225 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
227 )
228 return request
228 return request
229
229
230 def sanitiseresponse(response):
230 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
233 return response
233 return response
234
234
235 def decorate(fn):
235 def decorate(fn):
236 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
238 if cassette:
239 import hgdemandimport
239 import hgdemandimport
240
240
241 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
242 import vcr as vcrmod
242 import vcr as vcrmod
243 import vcr.stubs as stubs
243 import vcr.stubs as stubs
244
244
245 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
246 serializer='json',
246 serializer='json',
247 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
249 custom_patches=[
249 custom_patches=[
250 (
250 (
251 urlmod,
251 urlmod,
252 'httpconnection',
252 'httpconnection',
253 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
254 ),
254 ),
255 (
255 (
256 urlmod,
256 urlmod,
257 'httpsconnection',
257 'httpsconnection',
258 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
259 ),
259 ),
260 ],
260 ],
261 )
261 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
266
266
267 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
270
270
271 return command(
271 return command(
272 name,
272 name,
273 fullflags,
273 fullflags,
274 spec,
274 spec,
275 helpcategory=helpcategory,
275 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
277 )(cmd)
277 )(cmd)
278
278
279 return decorate
279 return decorate
280
280
281
281
282 def urlencodenested(params):
282 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
284
284
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
288 """
289 flatparams = util.sortdict()
289 flatparams = util.sortdict()
290
290
291 def process(prefix, obj):
291 def process(prefix, obj):
292 if isinstance(obj, bool):
292 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
296 if items is None:
297 flatparams[prefix] = obj
297 flatparams[prefix] = obj
298 else:
298 else:
299 for k, v in items(obj):
299 for k, v in items(obj):
300 if prefix:
300 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
302 else:
302 else:
303 process(k, v)
303 process(k, v)
304
304
305 process(b'', params)
305 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
307
307
308
308
309 def readurltoken(ui):
309 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
311
311
312 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
314 """
314 """
315 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
316 if not url:
316 if not url:
317 raise error.Abort(
317 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
319 )
320
320
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
322 token = None
323
323
324 if res:
324 if res:
325 group, auth = res
325 group, auth = res
326
326
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
328
329 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
330
330
331 if not token:
331 if not token:
332 raise error.Abort(
332 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
334 )
335
335
336 return url, token
336 return url, token
337
337
338
338
339 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
344 params = params.copy()
345 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
346 b'token': token,
346 b'token': token,
347 }
347 }
348 rawdata = {
348 rawdata = {
349 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
350 b'output': b'json',
350 b'output': b'json',
351 b'__conduit__': 1,
351 b'__conduit__': 1,
352 }
352 }
353 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
355 if curlcmd:
356 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
358 )
359 sin.write(data)
359 sin.write(data)
360 sin.close()
360 sin.close()
361 body = sout.read()
361 body = sout.read()
362 else:
362 else:
363 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
366 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
371 else x,
371 else x,
372 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
374 )
375 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
377 parsed[b'error_code'],
378 parsed[b'error_info'],
378 parsed[b'error_info'],
379 )
379 )
380 raise error.Abort(msg)
380 raise error.Abort(msg)
381 return parsed[b'result']
381 return parsed[b'result']
382
382
383
383
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
386 """call Conduit API
386 """call Conduit API
387
387
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
389 to stdout as a JSON blob.
390 """
390 """
391 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
394 params = pycompat.rapply(
394 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
397 else x,
397 else x,
398 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
399 )
399 )
400 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
401 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
403 callconduit(ui, name, params),
404 )
404 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
407
407
408
408
409 def getrepophid(repo):
409 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
413 if repophid:
414 return repophid
414 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
416 if not callsign:
417 return None
417 return None
418 query = callconduit(
418 query = callconduit(
419 repo.ui,
419 repo.ui,
420 b'diffusion.repository.search',
420 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
422 )
422 )
423 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
424 return None
424 return None
425 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
427 return repophid
428
428
429
429
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
433 )
434
434
435
435
436 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
438
438
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
442 be ``None``.
443
443
444 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
445 association information.
445 association information.
446
446
447 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
450
450
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
453 """
453 """
454 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
456
456
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 for node in nodelist:
459 for node in nodelist:
460 ctx = unfi[node]
460 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
463 for n in precnodes:
464 if has_node(n):
464 if has_node(n):
465 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
467 if m:
467 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
469 break
470 else:
470 else:
471 continue # move to next predecessor
471 continue # move to next predecessor
472 break # found a tag, stop
472 break # found a tag, stop
473 else:
473 else:
474 # Check commit message
474 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
476 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
478
479 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
481 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
483 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
485 )
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 for newnode, (force, precset, drev) in toconfirm.items():
487 for newnode, (force, precset, drev) in toconfirm.items():
488 diffs = [
488 diffs = [
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 ]
490 ]
491
491
492 # "precursors" as known by Phabricator
492 # "precursors" as known by Phabricator
493 phprecset = {getnode(d) for d in diffs}
493 phprecset = {getnode(d) for d in diffs}
494
494
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 # and force is not set (when commit message says nothing)
496 # and force is not set (when commit message says nothing)
497 if not force and not bool(phprecset & precset):
497 if not force and not bool(phprecset & precset):
498 tagname = b'D%d' % drev
498 tagname = b'D%d' % drev
499 tags.tag(
499 tags.tag(
500 repo,
500 repo,
501 tagname,
501 tagname,
502 nullid,
502 nullid,
503 message=None,
503 message=None,
504 user=None,
504 user=None,
505 date=None,
505 date=None,
506 local=True,
506 local=True,
507 )
507 )
508 unfi.ui.warn(
508 unfi.ui.warn(
509 _(
509 _(
510 b'D%d: local tag removed - does not match '
510 b'D%d: local tag removed - does not match '
511 b'Differential history\n'
511 b'Differential history\n'
512 )
512 )
513 % drev
513 % drev
514 )
514 )
515 continue
515 continue
516
516
517 # Find the last node using Phabricator metadata, and make sure it
517 # Find the last node using Phabricator metadata, and make sure it
518 # exists in the repo
518 # exists in the repo
519 oldnode = lastdiff = None
519 oldnode = lastdiff = None
520 if diffs:
520 if diffs:
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 oldnode = getnode(lastdiff)
522 oldnode = getnode(lastdiff)
523 if oldnode and not has_node(oldnode):
523 if oldnode and not has_node(oldnode):
524 oldnode = None
524 oldnode = None
525
525
526 result[newnode] = (oldnode, lastdiff, drev)
526 result[newnode] = (oldnode, lastdiff, drev)
527
527
528 return result
528 return result
529
529
530
530
531 def getdrevmap(repo, revs):
531 def getdrevmap(repo, revs):
532 """Return a dict mapping each rev in `revs` to their Differential Revision
532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 ID or None.
533 ID or None.
534 """
534 """
535 result = {}
535 result = {}
536 for rev in revs:
536 for rev in revs:
537 result[rev] = None
537 result[rev] = None
538 ctx = repo[rev]
538 ctx = repo[rev]
539 # Check commit message
539 # Check commit message
540 m = _differentialrevisiondescre.search(ctx.description())
540 m = _differentialrevisiondescre.search(ctx.description())
541 if m:
541 if m:
542 result[rev] = int(m.group('id'))
542 result[rev] = int(m.group('id'))
543 continue
543 continue
544 # Check tags
544 # Check tags
545 for tag in repo.nodetags(ctx.node()):
545 for tag in repo.nodetags(ctx.node()):
546 m = _differentialrevisiontagre.match(tag)
546 m = _differentialrevisiontagre.match(tag)
547 if m:
547 if m:
548 result[rev] = int(m.group(1))
548 result[rev] = int(m.group(1))
549 break
549 break
550
550
551 return result
551 return result
552
552
553
553
554 def getdiff(basectx, ctx, diffopts):
554 def getdiff(basectx, ctx, diffopts):
555 """plain-text diff without header (user, commit message, etc)"""
555 """plain-text diff without header (user, commit message, etc)"""
556 output = util.stringio()
556 output = util.stringio()
557 for chunk, _label in patch.diffui(
557 for chunk, _label in patch.diffui(
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 ):
559 ):
560 output.write(chunk)
560 output.write(chunk)
561 return output.getvalue()
561 return output.getvalue()
562
562
563
563
564 class DiffChangeType(object):
564 class DiffChangeType(object):
565 ADD = 1
565 ADD = 1
566 CHANGE = 2
566 CHANGE = 2
567 DELETE = 3
567 DELETE = 3
568 MOVE_AWAY = 4
568 MOVE_AWAY = 4
569 COPY_AWAY = 5
569 COPY_AWAY = 5
570 MOVE_HERE = 6
570 MOVE_HERE = 6
571 COPY_HERE = 7
571 COPY_HERE = 7
572 MULTICOPY = 8
572 MULTICOPY = 8
573
573
574
574
575 class DiffFileType(object):
575 class DiffFileType(object):
576 TEXT = 1
576 TEXT = 1
577 IMAGE = 2
577 IMAGE = 2
578 BINARY = 3
578 BINARY = 3
579
579
580
580
581 @attr.s
581 @attr.s
582 class phabhunk(dict):
582 class phabhunk(dict):
583 """Represents a Differential hunk, which is owned by a Differential change
583 """Represents a Differential hunk, which is owned by a Differential change
584 """
584 """
585
585
586 oldOffset = attr.ib(default=0) # camelcase-required
586 oldOffset = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
587 oldLength = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
588 newOffset = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
589 newLength = attr.ib(default=0) # camelcase-required
590 corpus = attr.ib(default='')
590 corpus = attr.ib(default='')
591 # These get added to the phabchange's equivalents
591 # These get added to the phabchange's equivalents
592 addLines = attr.ib(default=0) # camelcase-required
592 addLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
593 delLines = attr.ib(default=0) # camelcase-required
594
594
595
595
596 @attr.s
596 @attr.s
597 class phabchange(object):
597 class phabchange(object):
598 """Represents a Differential change, owns Differential hunks and owned by a
598 """Represents a Differential change, owns Differential hunks and owned by a
599 Differential diff. Each one represents one file in a diff.
599 Differential diff. Each one represents one file in a diff.
600 """
600 """
601
601
602 currentPath = attr.ib(default=None) # camelcase-required
602 currentPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
603 oldPath = attr.ib(default=None) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 metadata = attr.ib(default=attr.Factory(dict))
605 metadata = attr.ib(default=attr.Factory(dict))
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 type = attr.ib(default=DiffChangeType.CHANGE)
608 type = attr.ib(default=DiffChangeType.CHANGE)
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
610 commitHash = attr.ib(default=None) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
611 addLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
612 delLines = attr.ib(default=0) # camelcase-required
613 hunks = attr.ib(default=attr.Factory(list))
613 hunks = attr.ib(default=attr.Factory(list))
614
614
615 def copynewmetadatatoold(self):
615 def copynewmetadatatoold(self):
616 for key in list(self.metadata.keys()):
616 for key in list(self.metadata.keys()):
617 newkey = key.replace(b'new:', b'old:')
617 newkey = key.replace(b'new:', b'old:')
618 self.metadata[newkey] = self.metadata[key]
618 self.metadata[newkey] = self.metadata[key]
619
619
620 def addoldmode(self, value):
620 def addoldmode(self, value):
621 self.oldProperties[b'unix:filemode'] = value
621 self.oldProperties[b'unix:filemode'] = value
622
622
623 def addnewmode(self, value):
623 def addnewmode(self, value):
624 self.newProperties[b'unix:filemode'] = value
624 self.newProperties[b'unix:filemode'] = value
625
625
626 def addhunk(self, hunk):
626 def addhunk(self, hunk):
627 if not isinstance(hunk, phabhunk):
627 if not isinstance(hunk, phabhunk):
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 # It's useful to include these stats since the Phab web UI shows them,
630 # It's useful to include these stats since the Phab web UI shows them,
631 # and uses them to estimate how large a change a Revision is. Also used
631 # and uses them to estimate how large a change a Revision is. Also used
632 # in email subjects for the [+++--] bit.
632 # in email subjects for the [+++--] bit.
633 self.addLines += hunk.addLines
633 self.addLines += hunk.addLines
634 self.delLines += hunk.delLines
634 self.delLines += hunk.delLines
635
635
636
636
637 @attr.s
637 @attr.s
638 class phabdiff(object):
638 class phabdiff(object):
639 """Represents a Differential diff, owns Differential changes. Corresponds
639 """Represents a Differential diff, owns Differential changes. Corresponds
640 to a commit.
640 to a commit.
641 """
641 """
642
642
643 # Doesn't seem to be any reason to send this (output of uname -n)
643 # Doesn't seem to be any reason to send this (output of uname -n)
644 sourceMachine = attr.ib(default=b'') # camelcase-required
644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 branch = attr.ib(default=b'default')
649 branch = attr.ib(default=b'default')
650 bookmark = attr.ib(default=None)
650 bookmark = attr.ib(default=None)
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 changes = attr.ib(default=attr.Factory(dict))
654 changes = attr.ib(default=attr.Factory(dict))
655 repositoryPHID = attr.ib(default=None) # camelcase-required
655 repositoryPHID = attr.ib(default=None) # camelcase-required
656
656
657 def addchange(self, change):
657 def addchange(self, change):
658 if not isinstance(change, phabchange):
658 if not isinstance(change, phabchange):
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 self.changes[change.currentPath] = pycompat.byteskwargs(
660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 attr.asdict(change)
661 attr.asdict(change)
662 )
662 )
663
663
664
664
665 def maketext(pchange, basectx, ctx, fname):
665 def maketext(pchange, basectx, ctx, fname):
666 """populate the phabchange for a text file"""
666 """populate the phabchange for a text file"""
667 repo = ctx.repo()
667 repo = ctx.repo()
668 fmatcher = match.exact([fname])
668 fmatcher = match.exact([fname])
669 diffopts = mdiff.diffopts(git=True, context=32767)
669 diffopts = mdiff.diffopts(git=True, context=32767)
670 _pfctx, _fctx, header, fhunks = next(
670 _pfctx, _fctx, header, fhunks = next(
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 )
672 )
673
673
674 for fhunk in fhunks:
674 for fhunk in fhunks:
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 corpus = b''.join(lines[1:])
676 corpus = b''.join(lines[1:])
677 shunk = list(header)
677 shunk = list(header)
678 shunk.extend(lines)
678 shunk.extend(lines)
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 patch.diffstatdata(util.iterlines(shunk))
680 patch.diffstatdata(util.iterlines(shunk))
681 )
681 )
682 pchange.addhunk(
682 pchange.addhunk(
683 phabhunk(
683 phabhunk(
684 oldOffset,
684 oldOffset,
685 oldLength,
685 oldLength,
686 newOffset,
686 newOffset,
687 newLength,
687 newLength,
688 corpus,
688 corpus,
689 addLines,
689 addLines,
690 delLines,
690 delLines,
691 )
691 )
692 )
692 )
693
693
694
694
695 def uploadchunks(fctx, fphid):
695 def uploadchunks(fctx, fphid):
696 """upload large binary files as separate chunks.
696 """upload large binary files as separate chunks.
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 """
698 """
699 ui = fctx.repo().ui
699 ui = fctx.repo().ui
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 with ui.makeprogress(
701 with ui.makeprogress(
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 ) as progress:
703 ) as progress:
704 for chunk in chunks:
704 for chunk in chunks:
705 progress.increment()
705 progress.increment()
706 if chunk[b'complete']:
706 if chunk[b'complete']:
707 continue
707 continue
708 bstart = int(chunk[b'byteStart'])
708 bstart = int(chunk[b'byteStart'])
709 bend = int(chunk[b'byteEnd'])
709 bend = int(chunk[b'byteEnd'])
710 callconduit(
710 callconduit(
711 ui,
711 ui,
712 b'file.uploadchunk',
712 b'file.uploadchunk',
713 {
713 {
714 b'filePHID': fphid,
714 b'filePHID': fphid,
715 b'byteStart': bstart,
715 b'byteStart': bstart,
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 b'dataEncoding': b'base64',
717 b'dataEncoding': b'base64',
718 },
718 },
719 )
719 )
720
720
721
721
722 def uploadfile(fctx):
722 def uploadfile(fctx):
723 """upload binary files to Phabricator"""
723 """upload binary files to Phabricator"""
724 repo = fctx.repo()
724 repo = fctx.repo()
725 ui = repo.ui
725 ui = repo.ui
726 fname = fctx.path()
726 fname = fctx.path()
727 size = fctx.size()
727 size = fctx.size()
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729
729
730 # an allocate call is required first to see if an upload is even required
730 # an allocate call is required first to see if an upload is even required
731 # (Phab might already have it) and to determine if chunking is needed
731 # (Phab might already have it) and to determine if chunking is needed
732 allocateparams = {
732 allocateparams = {
733 b'name': fname,
733 b'name': fname,
734 b'contentLength': size,
734 b'contentLength': size,
735 b'contentHash': fhash,
735 b'contentHash': fhash,
736 }
736 }
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 fphid = filealloc[b'filePHID']
738 fphid = filealloc[b'filePHID']
739
739
740 if filealloc[b'upload']:
740 if filealloc[b'upload']:
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 if not fphid:
742 if not fphid:
743 uploadparams = {
743 uploadparams = {
744 b'name': fname,
744 b'name': fname,
745 b'data_base64': base64.b64encode(fctx.data()),
745 b'data_base64': base64.b64encode(fctx.data()),
746 }
746 }
747 fphid = callconduit(ui, b'file.upload', uploadparams)
747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 else:
748 else:
749 uploadchunks(fctx, fphid)
749 uploadchunks(fctx, fphid)
750 else:
750 else:
751 ui.debug(b'server already has %s\n' % bytes(fctx))
751 ui.debug(b'server already has %s\n' % bytes(fctx))
752
752
753 if not fphid:
753 if not fphid:
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755
755
756 return fphid
756 return fphid
757
757
758
758
759 def addoldbinary(pchange, oldfctx, fctx):
759 def addoldbinary(pchange, oldfctx, fctx):
760 """add the metadata for the previous version of a binary file to the
760 """add the metadata for the previous version of a binary file to the
761 phabchange for the new version
761 phabchange for the new version
762
762
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 version of the file, or None if the file is being removed.
764 version of the file, or None if the file is being removed.
765 """
765 """
766 if not fctx or fctx.cmp(oldfctx):
766 if not fctx or fctx.cmp(oldfctx):
767 # Files differ, add the old one
767 # Files differ, add the old one
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 mimeguess, _enc = mimetypes.guess_type(
769 mimeguess, _enc = mimetypes.guess_type(
770 encoding.unifromlocal(oldfctx.path())
770 encoding.unifromlocal(oldfctx.path())
771 )
771 )
772 if mimeguess:
772 if mimeguess:
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 mimeguess
774 mimeguess
775 )
775 )
776 fphid = uploadfile(oldfctx)
776 fphid = uploadfile(oldfctx)
777 pchange.metadata[b'old:binary-phid'] = fphid
777 pchange.metadata[b'old:binary-phid'] = fphid
778 else:
778 else:
779 # If it's left as IMAGE/BINARY web UI might try to display it
779 # If it's left as IMAGE/BINARY web UI might try to display it
780 pchange.fileType = DiffFileType.TEXT
780 pchange.fileType = DiffFileType.TEXT
781 pchange.copynewmetadatatoold()
781 pchange.copynewmetadatatoold()
782
782
783
783
784 def makebinary(pchange, fctx):
784 def makebinary(pchange, fctx):
785 """populate the phabchange for a binary file"""
785 """populate the phabchange for a binary file"""
786 pchange.fileType = DiffFileType.BINARY
786 pchange.fileType = DiffFileType.BINARY
787 fphid = uploadfile(fctx)
787 fphid = uploadfile(fctx)
788 pchange.metadata[b'new:binary-phid'] = fphid
788 pchange.metadata[b'new:binary-phid'] = fphid
789 pchange.metadata[b'new:file:size'] = fctx.size()
789 pchange.metadata[b'new:file:size'] = fctx.size()
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 if mimeguess:
791 if mimeguess:
792 mimeguess = pycompat.bytestr(mimeguess)
792 mimeguess = pycompat.bytestr(mimeguess)
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 if mimeguess.startswith(b'image/'):
794 if mimeguess.startswith(b'image/'):
795 pchange.fileType = DiffFileType.IMAGE
795 pchange.fileType = DiffFileType.IMAGE
796
796
797
797
798 # Copied from mercurial/patch.py
798 # Copied from mercurial/patch.py
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800
800
801
801
802 def notutf8(fctx):
802 def notutf8(fctx):
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 as binary
804 as binary
805 """
805 """
806 try:
806 try:
807 fctx.data().decode('utf-8')
807 fctx.data().decode('utf-8')
808 return False
808 return False
809 except UnicodeDecodeError:
809 except UnicodeDecodeError:
810 fctx.repo().ui.write(
810 fctx.repo().ui.write(
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 % fctx.path()
812 % fctx.path()
813 )
813 )
814 return True
814 return True
815
815
816
816
817 def addremoved(pdiff, basectx, ctx, removed):
817 def addremoved(pdiff, basectx, ctx, removed):
818 """add removed files to the phabdiff. Shouldn't include moves"""
818 """add removed files to the phabdiff. Shouldn't include moves"""
819 for fname in removed:
819 for fname in removed:
820 pchange = phabchange(
820 pchange = phabchange(
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 )
822 )
823 oldfctx = basectx.p1()[fname]
823 oldfctx = basectx.p1()[fname]
824 pchange.addoldmode(gitmode[oldfctx.flags()])
824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 maketext(pchange, basectx, ctx, fname)
826 maketext(pchange, basectx, ctx, fname)
827
827
828 pdiff.addchange(pchange)
828 pdiff.addchange(pchange)
829
829
830
830
831 def addmodified(pdiff, basectx, ctx, modified):
831 def addmodified(pdiff, basectx, ctx, modified):
832 """add modified files to the phabdiff"""
832 """add modified files to the phabdiff"""
833 for fname in modified:
833 for fname in modified:
834 fctx = ctx[fname]
834 fctx = ctx[fname]
835 oldfctx = basectx.p1()[fname]
835 oldfctx = basectx.p1()[fname]
836 pchange = phabchange(currentPath=fname, oldPath=fname)
836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 filemode = gitmode[fctx.flags()]
837 filemode = gitmode[fctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
838 originalmode = gitmode[oldfctx.flags()]
839 if filemode != originalmode:
839 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
840 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
841 pchange.addnewmode(filemode)
842
842
843 if (
843 if (
844 fctx.isbinary()
844 fctx.isbinary()
845 or notutf8(fctx)
845 or notutf8(fctx)
846 or oldfctx.isbinary()
846 or oldfctx.isbinary()
847 or notutf8(oldfctx)
847 or notutf8(oldfctx)
848 ):
848 ):
849 makebinary(pchange, fctx)
849 makebinary(pchange, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
850 addoldbinary(pchange, oldfctx, fctx)
851 else:
851 else:
852 maketext(pchange, basectx, ctx, fname)
852 maketext(pchange, basectx, ctx, fname)
853
853
854 pdiff.addchange(pchange)
854 pdiff.addchange(pchange)
855
855
856
856
857 def addadded(pdiff, basectx, ctx, added, removed):
857 def addadded(pdiff, basectx, ctx, added, removed):
858 """add file adds to the phabdiff, both new files and copies/moves"""
858 """add file adds to the phabdiff, both new files and copies/moves"""
859 # Keep track of files that've been recorded as moved/copied, so if there are
859 # Keep track of files that've been recorded as moved/copied, so if there are
860 # additional copies we can mark them (moves get removed from removed)
860 # additional copies we can mark them (moves get removed from removed)
861 copiedchanges = {}
861 copiedchanges = {}
862 movedchanges = {}
862 movedchanges = {}
863
863
864 copy = {}
864 copy = {}
865 if basectx != ctx:
865 if basectx != ctx:
866 copy = copies.pathcopies(basectx.p1(), ctx)
866 copy = copies.pathcopies(basectx.p1(), ctx)
867
867
868 for fname in added:
868 for fname in added:
869 fctx = ctx[fname]
869 fctx = ctx[fname]
870 oldfctx = None
870 oldfctx = None
871 pchange = phabchange(currentPath=fname)
871 pchange = phabchange(currentPath=fname)
872
872
873 filemode = gitmode[fctx.flags()]
873 filemode = gitmode[fctx.flags()]
874
874
875 if copy:
875 if copy:
876 originalfname = copy.get(fname, fname)
876 originalfname = copy.get(fname, fname)
877 else:
877 else:
878 originalfname = fname
878 originalfname = fname
879 if fctx.renamed():
879 if fctx.renamed():
880 originalfname = fctx.renamed()[0]
880 originalfname = fctx.renamed()[0]
881
881
882 renamed = fname != originalfname
882 renamed = fname != originalfname
883
883
884 if renamed:
884 if renamed:
885 oldfctx = basectx.p1()[originalfname]
885 oldfctx = basectx.p1()[originalfname]
886 originalmode = gitmode[oldfctx.flags()]
886 originalmode = gitmode[oldfctx.flags()]
887 pchange.oldPath = originalfname
887 pchange.oldPath = originalfname
888
888
889 if originalfname in removed:
889 if originalfname in removed:
890 origpchange = phabchange(
890 origpchange = phabchange(
891 currentPath=originalfname,
891 currentPath=originalfname,
892 oldPath=originalfname,
892 oldPath=originalfname,
893 type=DiffChangeType.MOVE_AWAY,
893 type=DiffChangeType.MOVE_AWAY,
894 awayPaths=[fname],
894 awayPaths=[fname],
895 )
895 )
896 movedchanges[originalfname] = origpchange
896 movedchanges[originalfname] = origpchange
897 removed.remove(originalfname)
897 removed.remove(originalfname)
898 pchange.type = DiffChangeType.MOVE_HERE
898 pchange.type = DiffChangeType.MOVE_HERE
899 elif originalfname in movedchanges:
899 elif originalfname in movedchanges:
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 movedchanges[originalfname].awayPaths.append(fname)
901 movedchanges[originalfname].awayPaths.append(fname)
902 pchange.type = DiffChangeType.COPY_HERE
902 pchange.type = DiffChangeType.COPY_HERE
903 else: # pure copy
903 else: # pure copy
904 if originalfname not in copiedchanges:
904 if originalfname not in copiedchanges:
905 origpchange = phabchange(
905 origpchange = phabchange(
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 )
907 )
908 copiedchanges[originalfname] = origpchange
908 copiedchanges[originalfname] = origpchange
909 else:
909 else:
910 origpchange = copiedchanges[originalfname]
910 origpchange = copiedchanges[originalfname]
911 origpchange.awayPaths.append(fname)
911 origpchange.awayPaths.append(fname)
912 pchange.type = DiffChangeType.COPY_HERE
912 pchange.type = DiffChangeType.COPY_HERE
913
913
914 if filemode != originalmode:
914 if filemode != originalmode:
915 pchange.addoldmode(originalmode)
915 pchange.addoldmode(originalmode)
916 pchange.addnewmode(filemode)
916 pchange.addnewmode(filemode)
917 else: # Brand-new file
917 else: # Brand-new file
918 pchange.addnewmode(gitmode[fctx.flags()])
918 pchange.addnewmode(gitmode[fctx.flags()])
919 pchange.type = DiffChangeType.ADD
919 pchange.type = DiffChangeType.ADD
920
920
921 if (
921 if (
922 fctx.isbinary()
922 fctx.isbinary()
923 or notutf8(fctx)
923 or notutf8(fctx)
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 ):
925 ):
926 makebinary(pchange, fctx)
926 makebinary(pchange, fctx)
927 if renamed:
927 if renamed:
928 addoldbinary(pchange, oldfctx, fctx)
928 addoldbinary(pchange, oldfctx, fctx)
929 else:
929 else:
930 maketext(pchange, basectx, ctx, fname)
930 maketext(pchange, basectx, ctx, fname)
931
931
932 pdiff.addchange(pchange)
932 pdiff.addchange(pchange)
933
933
934 for _path, copiedchange in copiedchanges.items():
934 for _path, copiedchange in copiedchanges.items():
935 pdiff.addchange(copiedchange)
935 pdiff.addchange(copiedchange)
936 for _path, movedchange in movedchanges.items():
936 for _path, movedchange in movedchanges.items():
937 pdiff.addchange(movedchange)
937 pdiff.addchange(movedchange)
938
938
939
939
940 def creatediff(basectx, ctx):
940 def creatediff(basectx, ctx):
941 """create a Differential Diff"""
941 """create a Differential Diff"""
942 repo = ctx.repo()
942 repo = ctx.repo()
943 repophid = getrepophid(repo)
943 repophid = getrepophid(repo)
944 # Create a "Differential Diff" via "differential.creatediff" API
944 # Create a "Differential Diff" via "differential.creatediff" API
945 pdiff = phabdiff(
945 pdiff = phabdiff(
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 branch=b'%s' % ctx.branch(),
947 branch=b'%s' % ctx.branch(),
948 )
948 )
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 # addadded will remove moved files from removed, so addremoved won't get
950 # addadded will remove moved files from removed, so addremoved won't get
951 # them
951 # them
952 addadded(pdiff, basectx, ctx, added, removed)
952 addadded(pdiff, basectx, ctx, added, removed)
953 addmodified(pdiff, basectx, ctx, modified)
953 addmodified(pdiff, basectx, ctx, modified)
954 addremoved(pdiff, basectx, ctx, removed)
954 addremoved(pdiff, basectx, ctx, removed)
955 if repophid:
955 if repophid:
956 pdiff.repositoryPHID = repophid
956 pdiff.repositoryPHID = repophid
957 diff = callconduit(
957 diff = callconduit(
958 repo.ui,
958 repo.ui,
959 b'differential.creatediff',
959 b'differential.creatediff',
960 pycompat.byteskwargs(attr.asdict(pdiff)),
960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 )
961 )
962 if not diff:
962 if not diff:
963 if basectx != ctx:
963 if basectx != ctx:
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 else:
965 else:
966 msg = _(b'cannot create diff for %s') % ctx
966 msg = _(b'cannot create diff for %s') % ctx
967 raise error.Abort(msg)
967 raise error.Abort(msg)
968 return diff
968 return diff
969
969
970
970
971 def writediffproperties(ctx, diff):
971 def writediffproperties(ctxs, diff):
972 """write metadata to diff so patches could be applied losslessly"""
972 """write metadata to diff so patches could be applied losslessly
973
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 The list is generally a single commit, but may be several when using
976 ``phabsend --fold``.
977 """
973 # creatediff returns with a diffid but query returns with an id
978 # creatediff returns with a diffid but query returns with an id
974 diffid = diff.get(b'diffid', diff.get(b'id'))
979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 basectx = ctxs[0]
981 tipctx = ctxs[-1]
982
975 params = {
983 params = {
976 b'diff_id': diffid,
984 b'diff_id': diffid,
977 b'name': b'hg:meta',
985 b'name': b'hg:meta',
978 b'data': templatefilters.json(
986 b'data': templatefilters.json(
979 {
987 {
980 b'user': ctx.user(),
988 b'user': tipctx.user(),
981 b'date': b'%d %d' % ctx.date(),
989 b'date': b'%d %d' % tipctx.date(),
982 b'branch': ctx.branch(),
990 b'branch': tipctx.branch(),
983 b'node': ctx.hex(),
991 b'node': tipctx.hex(),
984 b'parent': ctx.p1().hex(),
992 b'parent': basectx.p1().hex(),
985 }
993 }
986 ),
994 ),
987 }
995 }
988 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
989
997
998 commits = {}
999 for ctx in ctxs:
1000 commits[ctx.hex()] = {
1001 b'author': stringutil.person(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1003 b'time': int(ctx.date()[0]),
1004 b'commit': ctx.hex(),
1005 b'parents': [ctx.p1().hex()],
1006 b'branch': ctx.branch(),
1007 }
990 params = {
1008 params = {
991 b'diff_id': diffid,
1009 b'diff_id': diffid,
992 b'name': b'local:commits',
1010 b'name': b'local:commits',
993 b'data': templatefilters.json(
1011 b'data': templatefilters.json(commits),
994 {
995 ctx.hex(): {
996 b'author': stringutil.person(ctx.user()),
997 b'authorEmail': stringutil.email(ctx.user()),
998 b'time': int(ctx.date()[0]),
999 b'commit': ctx.hex(),
1000 b'parents': [ctx.p1().hex()],
1001 b'branch': ctx.branch(),
1002 },
1003 }
1004 ),
1005 }
1012 }
1006 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1007
1014
1008
1015
1009 def createdifferentialrevision(
1016 def createdifferentialrevision(
1010 ctx,
1017 ctx,
1011 revid=None,
1018 revid=None,
1012 parentrevphid=None,
1019 parentrevphid=None,
1013 oldnode=None,
1020 oldnode=None,
1014 olddiff=None,
1021 olddiff=None,
1015 actions=None,
1022 actions=None,
1016 comment=None,
1023 comment=None,
1017 ):
1024 ):
1018 """create or update a Differential Revision
1025 """create or update a Differential Revision
1019
1026
1020 If revid is None, create a new Differential Revision, otherwise update
1027 If revid is None, create a new Differential Revision, otherwise update
1021 revid. If parentrevphid is not None, set it as a dependency.
1028 revid. If parentrevphid is not None, set it as a dependency.
1022
1029
1023 If oldnode is not None, check if the patch content (without commit message
1030 If oldnode is not None, check if the patch content (without commit message
1024 and metadata) has changed before creating another diff.
1031 and metadata) has changed before creating another diff.
1025
1032
1026 If actions is not None, they will be appended to the transaction.
1033 If actions is not None, they will be appended to the transaction.
1027 """
1034 """
1028 basectx = ctx
1035 basectx = ctx
1029 repo = ctx.repo()
1036 repo = ctx.repo()
1030 if oldnode:
1037 if oldnode:
1031 diffopts = mdiff.diffopts(git=True, context=32767)
1038 diffopts = mdiff.diffopts(git=True, context=32767)
1032 oldctx = repo.unfiltered()[oldnode]
1039 oldctx = repo.unfiltered()[oldnode]
1033 oldbasectx = oldctx
1040 oldbasectx = oldctx
1034 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1035 oldbasectx, oldctx, diffopts
1042 oldbasectx, oldctx, diffopts
1036 )
1043 )
1037 else:
1044 else:
1038 neednewdiff = True
1045 neednewdiff = True
1039
1046
1040 transactions = []
1047 transactions = []
1041 if neednewdiff:
1048 if neednewdiff:
1042 diff = creatediff(basectx, ctx)
1049 diff = creatediff(basectx, ctx)
1043 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1044 if comment:
1051 if comment:
1045 transactions.append({b'type': b'comment', b'value': comment})
1052 transactions.append({b'type': b'comment', b'value': comment})
1046 else:
1053 else:
1047 # Even if we don't need to upload a new diff because the patch content
1054 # Even if we don't need to upload a new diff because the patch content
1048 # does not change. We might still need to update its metadata so
1055 # does not change. We might still need to update its metadata so
1049 # pushers could know the correct node metadata.
1056 # pushers could know the correct node metadata.
1050 assert olddiff
1057 assert olddiff
1051 diff = olddiff
1058 diff = olddiff
1052 writediffproperties(ctx, diff)
1059 writediffproperties([ctx], diff)
1053
1060
1054 # Set the parent Revision every time, so commit re-ordering is picked-up
1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1055 if parentrevphid:
1062 if parentrevphid:
1056 transactions.append(
1063 transactions.append(
1057 {b'type': b'parents.set', b'value': [parentrevphid]}
1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1058 )
1065 )
1059
1066
1060 if actions:
1067 if actions:
1061 transactions += actions
1068 transactions += actions
1062
1069
1063 # Parse commit message and update related fields.
1070 # Parse commit message and update related fields.
1064 desc = ctx.description()
1071 desc = ctx.description()
1065 info = callconduit(
1072 info = callconduit(
1066 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1073 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1067 )
1074 )
1068 for k, v in info[b'fields'].items():
1075 for k, v in info[b'fields'].items():
1069 if k in [b'title', b'summary', b'testPlan']:
1076 if k in [b'title', b'summary', b'testPlan']:
1070 transactions.append({b'type': k, b'value': v})
1077 transactions.append({b'type': k, b'value': v})
1071
1078
1072 params = {b'transactions': transactions}
1079 params = {b'transactions': transactions}
1073 if revid is not None:
1080 if revid is not None:
1074 # Update an existing Differential Revision
1081 # Update an existing Differential Revision
1075 params[b'objectIdentifier'] = revid
1082 params[b'objectIdentifier'] = revid
1076
1083
1077 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1084 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1078 if not revision:
1085 if not revision:
1079 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1086 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1080
1087
1081 return revision, diff
1088 return revision, diff
1082
1089
1083
1090
1084 def userphids(ui, names):
1091 def userphids(ui, names):
1085 """convert user names to PHIDs"""
1092 """convert user names to PHIDs"""
1086 names = [name.lower() for name in names]
1093 names = [name.lower() for name in names]
1087 query = {b'constraints': {b'usernames': names}}
1094 query = {b'constraints': {b'usernames': names}}
1088 result = callconduit(ui, b'user.search', query)
1095 result = callconduit(ui, b'user.search', query)
1089 # username not found is not an error of the API. So check if we have missed
1096 # username not found is not an error of the API. So check if we have missed
1090 # some names here.
1097 # some names here.
1091 data = result[b'data']
1098 data = result[b'data']
1092 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1099 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1093 unresolved = set(names) - resolved
1100 unresolved = set(names) - resolved
1094 if unresolved:
1101 if unresolved:
1095 raise error.Abort(
1102 raise error.Abort(
1096 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1103 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1097 )
1104 )
1098 return [entry[b'phid'] for entry in data]
1105 return [entry[b'phid'] for entry in data]
1099
1106
1100
1107
1101 @vcrcommand(
1108 @vcrcommand(
1102 b'phabsend',
1109 b'phabsend',
1103 [
1110 [
1104 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1111 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1105 (b'', b'amend', True, _(b'update commit messages')),
1112 (b'', b'amend', True, _(b'update commit messages')),
1106 (b'', b'reviewer', [], _(b'specify reviewers')),
1113 (b'', b'reviewer', [], _(b'specify reviewers')),
1107 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1114 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1108 (
1115 (
1109 b'm',
1116 b'm',
1110 b'comment',
1117 b'comment',
1111 b'',
1118 b'',
1112 _(b'add a comment to Revisions with new/updated Diffs'),
1119 _(b'add a comment to Revisions with new/updated Diffs'),
1113 ),
1120 ),
1114 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1121 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1115 ],
1122 ],
1116 _(b'REV [OPTIONS]'),
1123 _(b'REV [OPTIONS]'),
1117 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1124 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1118 )
1125 )
1119 def phabsend(ui, repo, *revs, **opts):
1126 def phabsend(ui, repo, *revs, **opts):
1120 """upload changesets to Phabricator
1127 """upload changesets to Phabricator
1121
1128
1122 If there are multiple revisions specified, they will be send as a stack
1129 If there are multiple revisions specified, they will be send as a stack
1123 with a linear dependencies relationship using the order specified by the
1130 with a linear dependencies relationship using the order specified by the
1124 revset.
1131 revset.
1125
1132
1126 For the first time uploading changesets, local tags will be created to
1133 For the first time uploading changesets, local tags will be created to
1127 maintain the association. After the first time, phabsend will check
1134 maintain the association. After the first time, phabsend will check
1128 obsstore and tags information so it can figure out whether to update an
1135 obsstore and tags information so it can figure out whether to update an
1129 existing Differential Revision, or create a new one.
1136 existing Differential Revision, or create a new one.
1130
1137
1131 If --amend is set, update commit messages so they have the
1138 If --amend is set, update commit messages so they have the
1132 ``Differential Revision`` URL, remove related tags. This is similar to what
1139 ``Differential Revision`` URL, remove related tags. This is similar to what
1133 arcanist will do, and is more desired in author-push workflows. Otherwise,
1140 arcanist will do, and is more desired in author-push workflows. Otherwise,
1134 use local tags to record the ``Differential Revision`` association.
1141 use local tags to record the ``Differential Revision`` association.
1135
1142
1136 The --confirm option lets you confirm changesets before sending them. You
1143 The --confirm option lets you confirm changesets before sending them. You
1137 can also add following to your configuration file to make it default
1144 can also add following to your configuration file to make it default
1138 behaviour::
1145 behaviour::
1139
1146
1140 [phabsend]
1147 [phabsend]
1141 confirm = true
1148 confirm = true
1142
1149
1143 phabsend will check obsstore and the above association to decide whether to
1150 phabsend will check obsstore and the above association to decide whether to
1144 update an existing Differential Revision, or create a new one.
1151 update an existing Differential Revision, or create a new one.
1145 """
1152 """
1146 opts = pycompat.byteskwargs(opts)
1153 opts = pycompat.byteskwargs(opts)
1147 revs = list(revs) + opts.get(b'rev', [])
1154 revs = list(revs) + opts.get(b'rev', [])
1148 revs = scmutil.revrange(repo, revs)
1155 revs = scmutil.revrange(repo, revs)
1149 revs.sort() # ascending order to preserve topological parent/child in phab
1156 revs.sort() # ascending order to preserve topological parent/child in phab
1150
1157
1151 if not revs:
1158 if not revs:
1152 raise error.Abort(_(b'phabsend requires at least one changeset'))
1159 raise error.Abort(_(b'phabsend requires at least one changeset'))
1153 if opts.get(b'amend'):
1160 if opts.get(b'amend'):
1154 cmdutil.checkunfinished(repo)
1161 cmdutil.checkunfinished(repo)
1155
1162
1156 # {newnode: (oldnode, olddiff, olddrev}
1163 # {newnode: (oldnode, olddiff, olddrev}
1157 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1164 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1158
1165
1159 confirm = ui.configbool(b'phabsend', b'confirm')
1166 confirm = ui.configbool(b'phabsend', b'confirm')
1160 confirm |= bool(opts.get(b'confirm'))
1167 confirm |= bool(opts.get(b'confirm'))
1161 if confirm:
1168 if confirm:
1162 confirmed = _confirmbeforesend(repo, revs, oldmap)
1169 confirmed = _confirmbeforesend(repo, revs, oldmap)
1163 if not confirmed:
1170 if not confirmed:
1164 raise error.Abort(_(b'phabsend cancelled'))
1171 raise error.Abort(_(b'phabsend cancelled'))
1165
1172
1166 actions = []
1173 actions = []
1167 reviewers = opts.get(b'reviewer', [])
1174 reviewers = opts.get(b'reviewer', [])
1168 blockers = opts.get(b'blocker', [])
1175 blockers = opts.get(b'blocker', [])
1169 phids = []
1176 phids = []
1170 if reviewers:
1177 if reviewers:
1171 phids.extend(userphids(repo.ui, reviewers))
1178 phids.extend(userphids(repo.ui, reviewers))
1172 if blockers:
1179 if blockers:
1173 phids.extend(
1180 phids.extend(
1174 map(
1181 map(
1175 lambda phid: b'blocking(%s)' % phid,
1182 lambda phid: b'blocking(%s)' % phid,
1176 userphids(repo.ui, blockers),
1183 userphids(repo.ui, blockers),
1177 )
1184 )
1178 )
1185 )
1179 if phids:
1186 if phids:
1180 actions.append({b'type': b'reviewers.add', b'value': phids})
1187 actions.append({b'type': b'reviewers.add', b'value': phids})
1181
1188
1182 drevids = [] # [int]
1189 drevids = [] # [int]
1183 diffmap = {} # {newnode: diff}
1190 diffmap = {} # {newnode: diff}
1184
1191
1185 # Send patches one by one so we know their Differential Revision PHIDs and
1192 # Send patches one by one so we know their Differential Revision PHIDs and
1186 # can provide dependency relationship
1193 # can provide dependency relationship
1187 lastrevphid = None
1194 lastrevphid = None
1188 for rev in revs:
1195 for rev in revs:
1189 ui.debug(b'sending rev %d\n' % rev)
1196 ui.debug(b'sending rev %d\n' % rev)
1190 ctx = repo[rev]
1197 ctx = repo[rev]
1191
1198
1192 # Get Differential Revision ID
1199 # Get Differential Revision ID
1193 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1200 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1194 if oldnode != ctx.node() or opts.get(b'amend'):
1201 if oldnode != ctx.node() or opts.get(b'amend'):
1195 # Create or update Differential Revision
1202 # Create or update Differential Revision
1196 revision, diff = createdifferentialrevision(
1203 revision, diff = createdifferentialrevision(
1197 ctx,
1204 ctx,
1198 revid,
1205 revid,
1199 lastrevphid,
1206 lastrevphid,
1200 oldnode,
1207 oldnode,
1201 olddiff,
1208 olddiff,
1202 actions,
1209 actions,
1203 opts.get(b'comment'),
1210 opts.get(b'comment'),
1204 )
1211 )
1205 diffmap[ctx.node()] = diff
1212 diffmap[ctx.node()] = diff
1206 newrevid = int(revision[b'object'][b'id'])
1213 newrevid = int(revision[b'object'][b'id'])
1207 newrevphid = revision[b'object'][b'phid']
1214 newrevphid = revision[b'object'][b'phid']
1208 if revid:
1215 if revid:
1209 action = b'updated'
1216 action = b'updated'
1210 else:
1217 else:
1211 action = b'created'
1218 action = b'created'
1212
1219
1213 # Create a local tag to note the association, if commit message
1220 # Create a local tag to note the association, if commit message
1214 # does not have it already
1221 # does not have it already
1215 m = _differentialrevisiondescre.search(ctx.description())
1222 m = _differentialrevisiondescre.search(ctx.description())
1216 if not m or int(m.group('id')) != newrevid:
1223 if not m or int(m.group('id')) != newrevid:
1217 tagname = b'D%d' % newrevid
1224 tagname = b'D%d' % newrevid
1218 tags.tag(
1225 tags.tag(
1219 repo,
1226 repo,
1220 tagname,
1227 tagname,
1221 ctx.node(),
1228 ctx.node(),
1222 message=None,
1229 message=None,
1223 user=None,
1230 user=None,
1224 date=None,
1231 date=None,
1225 local=True,
1232 local=True,
1226 )
1233 )
1227 else:
1234 else:
1228 # Nothing changed. But still set "newrevphid" so the next revision
1235 # Nothing changed. But still set "newrevphid" so the next revision
1229 # could depend on this one and "newrevid" for the summary line.
1236 # could depend on this one and "newrevid" for the summary line.
1230 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1237 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1231 newrevid = revid
1238 newrevid = revid
1232 action = b'skipped'
1239 action = b'skipped'
1233
1240
1234 actiondesc = ui.label(
1241 actiondesc = ui.label(
1235 {
1242 {
1236 b'created': _(b'created'),
1243 b'created': _(b'created'),
1237 b'skipped': _(b'skipped'),
1244 b'skipped': _(b'skipped'),
1238 b'updated': _(b'updated'),
1245 b'updated': _(b'updated'),
1239 }[action],
1246 }[action],
1240 b'phabricator.action.%s' % action,
1247 b'phabricator.action.%s' % action,
1241 )
1248 )
1242 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1249 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1243 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1250 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1244 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1251 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1245 ui.write(
1252 ui.write(
1246 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1253 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1247 )
1254 )
1248 drevids.append(newrevid)
1255 drevids.append(newrevid)
1249 lastrevphid = newrevphid
1256 lastrevphid = newrevphid
1250
1257
1251 # Update commit messages and remove tags
1258 # Update commit messages and remove tags
1252 if opts.get(b'amend'):
1259 if opts.get(b'amend'):
1253 unfi = repo.unfiltered()
1260 unfi = repo.unfiltered()
1254 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1261 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1255 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1262 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1256 wnode = unfi[b'.'].node()
1263 wnode = unfi[b'.'].node()
1257 mapping = {} # {oldnode: [newnode]}
1264 mapping = {} # {oldnode: [newnode]}
1258 for i, rev in enumerate(revs):
1265 for i, rev in enumerate(revs):
1259 old = unfi[rev]
1266 old = unfi[rev]
1260 drevid = drevids[i]
1267 drevid = drevids[i]
1261 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1268 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1262 newdesc = getdescfromdrev(drev)
1269 newdesc = getdescfromdrev(drev)
1263 # Make sure commit message contain "Differential Revision"
1270 # Make sure commit message contain "Differential Revision"
1264 if old.description() != newdesc:
1271 if old.description() != newdesc:
1265 if old.phase() == phases.public:
1272 if old.phase() == phases.public:
1266 ui.warn(
1273 ui.warn(
1267 _(b"warning: not updating public commit %s\n")
1274 _(b"warning: not updating public commit %s\n")
1268 % scmutil.formatchangeid(old)
1275 % scmutil.formatchangeid(old)
1269 )
1276 )
1270 continue
1277 continue
1271 parents = [
1278 parents = [
1272 mapping.get(old.p1().node(), (old.p1(),))[0],
1279 mapping.get(old.p1().node(), (old.p1(),))[0],
1273 mapping.get(old.p2().node(), (old.p2(),))[0],
1280 mapping.get(old.p2().node(), (old.p2(),))[0],
1274 ]
1281 ]
1275 new = context.metadataonlyctx(
1282 new = context.metadataonlyctx(
1276 repo,
1283 repo,
1277 old,
1284 old,
1278 parents=parents,
1285 parents=parents,
1279 text=newdesc,
1286 text=newdesc,
1280 user=old.user(),
1287 user=old.user(),
1281 date=old.date(),
1288 date=old.date(),
1282 extra=old.extra(),
1289 extra=old.extra(),
1283 )
1290 )
1284
1291
1285 newnode = new.commit()
1292 newnode = new.commit()
1286
1293
1287 mapping[old.node()] = [newnode]
1294 mapping[old.node()] = [newnode]
1288 # Update diff property
1295 # Update diff property
1289 # If it fails just warn and keep going, otherwise the DREV
1296 # If it fails just warn and keep going, otherwise the DREV
1290 # associations will be lost
1297 # associations will be lost
1291 try:
1298 try:
1292 writediffproperties(unfi[newnode], diffmap[old.node()])
1299 writediffproperties(
1300 [unfi[newnode]], diffmap[old.node()]
1301 )
1293 except util.urlerr.urlerror:
1302 except util.urlerr.urlerror:
1294 ui.warnnoi18n(
1303 ui.warnnoi18n(
1295 b'Failed to update metadata for D%d\n' % drevid
1304 b'Failed to update metadata for D%d\n' % drevid
1296 )
1305 )
1297 # Remove local tags since it's no longer necessary
1306 # Remove local tags since it's no longer necessary
1298 tagname = b'D%d' % drevid
1307 tagname = b'D%d' % drevid
1299 if tagname in repo.tags():
1308 if tagname in repo.tags():
1300 tags.tag(
1309 tags.tag(
1301 repo,
1310 repo,
1302 tagname,
1311 tagname,
1303 nullid,
1312 nullid,
1304 message=None,
1313 message=None,
1305 user=None,
1314 user=None,
1306 date=None,
1315 date=None,
1307 local=True,
1316 local=True,
1308 )
1317 )
1309 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1318 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1310 if wnode in mapping:
1319 if wnode in mapping:
1311 unfi.setparents(mapping[wnode][0])
1320 unfi.setparents(mapping[wnode][0])
1312
1321
1313
1322
1314 # Map from "hg:meta" keys to header understood by "hg import". The order is
1323 # Map from "hg:meta" keys to header understood by "hg import". The order is
1315 # consistent with "hg export" output.
1324 # consistent with "hg export" output.
1316 _metanamemap = util.sortdict(
1325 _metanamemap = util.sortdict(
1317 [
1326 [
1318 (b'user', b'User'),
1327 (b'user', b'User'),
1319 (b'date', b'Date'),
1328 (b'date', b'Date'),
1320 (b'branch', b'Branch'),
1329 (b'branch', b'Branch'),
1321 (b'node', b'Node ID'),
1330 (b'node', b'Node ID'),
1322 (b'parent', b'Parent '),
1331 (b'parent', b'Parent '),
1323 ]
1332 ]
1324 )
1333 )
1325
1334
1326
1335
1327 def _confirmbeforesend(repo, revs, oldmap):
1336 def _confirmbeforesend(repo, revs, oldmap):
1328 url, token = readurltoken(repo.ui)
1337 url, token = readurltoken(repo.ui)
1329 ui = repo.ui
1338 ui = repo.ui
1330 for rev in revs:
1339 for rev in revs:
1331 ctx = repo[rev]
1340 ctx = repo[rev]
1332 desc = ctx.description().splitlines()[0]
1341 desc = ctx.description().splitlines()[0]
1333 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1342 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1334 if drevid:
1343 if drevid:
1335 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1344 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1336 else:
1345 else:
1337 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1346 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1338
1347
1339 ui.write(
1348 ui.write(
1340 _(b'%s - %s: %s\n')
1349 _(b'%s - %s: %s\n')
1341 % (
1350 % (
1342 drevdesc,
1351 drevdesc,
1343 ui.label(bytes(ctx), b'phabricator.node'),
1352 ui.label(bytes(ctx), b'phabricator.node'),
1344 ui.label(desc, b'phabricator.desc'),
1353 ui.label(desc, b'phabricator.desc'),
1345 )
1354 )
1346 )
1355 )
1347
1356
1348 if ui.promptchoice(
1357 if ui.promptchoice(
1349 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1358 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1350 ):
1359 ):
1351 return False
1360 return False
1352
1361
1353 return True
1362 return True
1354
1363
1355
1364
1356 _knownstatusnames = {
1365 _knownstatusnames = {
1357 b'accepted',
1366 b'accepted',
1358 b'needsreview',
1367 b'needsreview',
1359 b'needsrevision',
1368 b'needsrevision',
1360 b'closed',
1369 b'closed',
1361 b'abandoned',
1370 b'abandoned',
1362 b'changesplanned',
1371 b'changesplanned',
1363 }
1372 }
1364
1373
1365
1374
1366 def _getstatusname(drev):
1375 def _getstatusname(drev):
1367 """get normalized status name from a Differential Revision"""
1376 """get normalized status name from a Differential Revision"""
1368 return drev[b'statusName'].replace(b' ', b'').lower()
1377 return drev[b'statusName'].replace(b' ', b'').lower()
1369
1378
1370
1379
1371 # Small language to specify differential revisions. Support symbols: (), :X,
1380 # Small language to specify differential revisions. Support symbols: (), :X,
1372 # +, and -.
1381 # +, and -.
1373
1382
1374 _elements = {
1383 _elements = {
1375 # token-type: binding-strength, primary, prefix, infix, suffix
1384 # token-type: binding-strength, primary, prefix, infix, suffix
1376 b'(': (12, None, (b'group', 1, b')'), None, None),
1385 b'(': (12, None, (b'group', 1, b')'), None, None),
1377 b':': (8, None, (b'ancestors', 8), None, None),
1386 b':': (8, None, (b'ancestors', 8), None, None),
1378 b'&': (5, None, None, (b'and_', 5), None),
1387 b'&': (5, None, None, (b'and_', 5), None),
1379 b'+': (4, None, None, (b'add', 4), None),
1388 b'+': (4, None, None, (b'add', 4), None),
1380 b'-': (4, None, None, (b'sub', 4), None),
1389 b'-': (4, None, None, (b'sub', 4), None),
1381 b')': (0, None, None, None, None),
1390 b')': (0, None, None, None, None),
1382 b'symbol': (0, b'symbol', None, None, None),
1391 b'symbol': (0, b'symbol', None, None, None),
1383 b'end': (0, None, None, None, None),
1392 b'end': (0, None, None, None, None),
1384 }
1393 }
1385
1394
1386
1395
1387 def _tokenize(text):
1396 def _tokenize(text):
1388 view = memoryview(text) # zero-copy slice
1397 view = memoryview(text) # zero-copy slice
1389 special = b'():+-& '
1398 special = b'():+-& '
1390 pos = 0
1399 pos = 0
1391 length = len(text)
1400 length = len(text)
1392 while pos < length:
1401 while pos < length:
1393 symbol = b''.join(
1402 symbol = b''.join(
1394 itertools.takewhile(
1403 itertools.takewhile(
1395 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1404 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1396 )
1405 )
1397 )
1406 )
1398 if symbol:
1407 if symbol:
1399 yield (b'symbol', symbol, pos)
1408 yield (b'symbol', symbol, pos)
1400 pos += len(symbol)
1409 pos += len(symbol)
1401 else: # special char, ignore space
1410 else: # special char, ignore space
1402 if text[pos : pos + 1] != b' ':
1411 if text[pos : pos + 1] != b' ':
1403 yield (text[pos : pos + 1], None, pos)
1412 yield (text[pos : pos + 1], None, pos)
1404 pos += 1
1413 pos += 1
1405 yield (b'end', None, pos)
1414 yield (b'end', None, pos)
1406
1415
1407
1416
1408 def _parse(text):
1417 def _parse(text):
1409 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1418 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1410 if pos != len(text):
1419 if pos != len(text):
1411 raise error.ParseError(b'invalid token', pos)
1420 raise error.ParseError(b'invalid token', pos)
1412 return tree
1421 return tree
1413
1422
1414
1423
1415 def _parsedrev(symbol):
1424 def _parsedrev(symbol):
1416 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1425 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1417 if symbol.startswith(b'D') and symbol[1:].isdigit():
1426 if symbol.startswith(b'D') and symbol[1:].isdigit():
1418 return int(symbol[1:])
1427 return int(symbol[1:])
1419 if symbol.isdigit():
1428 if symbol.isdigit():
1420 return int(symbol)
1429 return int(symbol)
1421
1430
1422
1431
1423 def _prefetchdrevs(tree):
1432 def _prefetchdrevs(tree):
1424 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1433 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1425 drevs = set()
1434 drevs = set()
1426 ancestordrevs = set()
1435 ancestordrevs = set()
1427 op = tree[0]
1436 op = tree[0]
1428 if op == b'symbol':
1437 if op == b'symbol':
1429 r = _parsedrev(tree[1])
1438 r = _parsedrev(tree[1])
1430 if r:
1439 if r:
1431 drevs.add(r)
1440 drevs.add(r)
1432 elif op == b'ancestors':
1441 elif op == b'ancestors':
1433 r, a = _prefetchdrevs(tree[1])
1442 r, a = _prefetchdrevs(tree[1])
1434 drevs.update(r)
1443 drevs.update(r)
1435 ancestordrevs.update(r)
1444 ancestordrevs.update(r)
1436 ancestordrevs.update(a)
1445 ancestordrevs.update(a)
1437 else:
1446 else:
1438 for t in tree[1:]:
1447 for t in tree[1:]:
1439 r, a = _prefetchdrevs(t)
1448 r, a = _prefetchdrevs(t)
1440 drevs.update(r)
1449 drevs.update(r)
1441 ancestordrevs.update(a)
1450 ancestordrevs.update(a)
1442 return drevs, ancestordrevs
1451 return drevs, ancestordrevs
1443
1452
1444
1453
1445 def querydrev(ui, spec):
1454 def querydrev(ui, spec):
1446 """return a list of "Differential Revision" dicts
1455 """return a list of "Differential Revision" dicts
1447
1456
1448 spec is a string using a simple query language, see docstring in phabread
1457 spec is a string using a simple query language, see docstring in phabread
1449 for details.
1458 for details.
1450
1459
1451 A "Differential Revision dict" looks like:
1460 A "Differential Revision dict" looks like:
1452
1461
1453 {
1462 {
1454 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1463 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1455 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1464 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1456 "auxiliary": {
1465 "auxiliary": {
1457 "phabricator:depends-on": [
1466 "phabricator:depends-on": [
1458 "PHID-DREV-gbapp366kutjebt7agcd"
1467 "PHID-DREV-gbapp366kutjebt7agcd"
1459 ]
1468 ]
1460 "phabricator:projects": [],
1469 "phabricator:projects": [],
1461 },
1470 },
1462 "branch": "default",
1471 "branch": "default",
1463 "ccs": [],
1472 "ccs": [],
1464 "commits": [],
1473 "commits": [],
1465 "dateCreated": "1499181406",
1474 "dateCreated": "1499181406",
1466 "dateModified": "1499182103",
1475 "dateModified": "1499182103",
1467 "diffs": [
1476 "diffs": [
1468 "3",
1477 "3",
1469 "4",
1478 "4",
1470 ],
1479 ],
1471 "hashes": [],
1480 "hashes": [],
1472 "id": "2",
1481 "id": "2",
1473 "lineCount": "2",
1482 "lineCount": "2",
1474 "phid": "PHID-DREV-672qvysjcczopag46qty",
1483 "phid": "PHID-DREV-672qvysjcczopag46qty",
1475 "properties": {},
1484 "properties": {},
1476 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1485 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1477 "reviewers": [],
1486 "reviewers": [],
1478 "sourcePath": null
1487 "sourcePath": null
1479 "status": "0",
1488 "status": "0",
1480 "statusName": "Needs Review",
1489 "statusName": "Needs Review",
1481 "summary": "",
1490 "summary": "",
1482 "testPlan": "",
1491 "testPlan": "",
1483 "title": "example",
1492 "title": "example",
1484 "uri": "https://phab.example.com/D2",
1493 "uri": "https://phab.example.com/D2",
1485 }
1494 }
1486 """
1495 """
1487 # TODO: replace differential.query and differential.querydiffs with
1496 # TODO: replace differential.query and differential.querydiffs with
1488 # differential.diff.search because the former (and their output) are
1497 # differential.diff.search because the former (and their output) are
1489 # frozen, and planned to be deprecated and removed.
1498 # frozen, and planned to be deprecated and removed.
1490
1499
1491 def fetch(params):
1500 def fetch(params):
1492 """params -> single drev or None"""
1501 """params -> single drev or None"""
1493 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1502 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1494 if key in prefetched:
1503 if key in prefetched:
1495 return prefetched[key]
1504 return prefetched[key]
1496 drevs = callconduit(ui, b'differential.query', params)
1505 drevs = callconduit(ui, b'differential.query', params)
1497 # Fill prefetched with the result
1506 # Fill prefetched with the result
1498 for drev in drevs:
1507 for drev in drevs:
1499 prefetched[drev[b'phid']] = drev
1508 prefetched[drev[b'phid']] = drev
1500 prefetched[int(drev[b'id'])] = drev
1509 prefetched[int(drev[b'id'])] = drev
1501 if key not in prefetched:
1510 if key not in prefetched:
1502 raise error.Abort(
1511 raise error.Abort(
1503 _(b'cannot get Differential Revision %r') % params
1512 _(b'cannot get Differential Revision %r') % params
1504 )
1513 )
1505 return prefetched[key]
1514 return prefetched[key]
1506
1515
1507 def getstack(topdrevids):
1516 def getstack(topdrevids):
1508 """given a top, get a stack from the bottom, [id] -> [id]"""
1517 """given a top, get a stack from the bottom, [id] -> [id]"""
1509 visited = set()
1518 visited = set()
1510 result = []
1519 result = []
1511 queue = [{b'ids': [i]} for i in topdrevids]
1520 queue = [{b'ids': [i]} for i in topdrevids]
1512 while queue:
1521 while queue:
1513 params = queue.pop()
1522 params = queue.pop()
1514 drev = fetch(params)
1523 drev = fetch(params)
1515 if drev[b'id'] in visited:
1524 if drev[b'id'] in visited:
1516 continue
1525 continue
1517 visited.add(drev[b'id'])
1526 visited.add(drev[b'id'])
1518 result.append(int(drev[b'id']))
1527 result.append(int(drev[b'id']))
1519 auxiliary = drev.get(b'auxiliary', {})
1528 auxiliary = drev.get(b'auxiliary', {})
1520 depends = auxiliary.get(b'phabricator:depends-on', [])
1529 depends = auxiliary.get(b'phabricator:depends-on', [])
1521 for phid in depends:
1530 for phid in depends:
1522 queue.append({b'phids': [phid]})
1531 queue.append({b'phids': [phid]})
1523 result.reverse()
1532 result.reverse()
1524 return smartset.baseset(result)
1533 return smartset.baseset(result)
1525
1534
1526 # Initialize prefetch cache
1535 # Initialize prefetch cache
1527 prefetched = {} # {id or phid: drev}
1536 prefetched = {} # {id or phid: drev}
1528
1537
1529 tree = _parse(spec)
1538 tree = _parse(spec)
1530 drevs, ancestordrevs = _prefetchdrevs(tree)
1539 drevs, ancestordrevs = _prefetchdrevs(tree)
1531
1540
1532 # developer config: phabricator.batchsize
1541 # developer config: phabricator.batchsize
1533 batchsize = ui.configint(b'phabricator', b'batchsize')
1542 batchsize = ui.configint(b'phabricator', b'batchsize')
1534
1543
1535 # Prefetch Differential Revisions in batch
1544 # Prefetch Differential Revisions in batch
1536 tofetch = set(drevs)
1545 tofetch = set(drevs)
1537 for r in ancestordrevs:
1546 for r in ancestordrevs:
1538 tofetch.update(range(max(1, r - batchsize), r + 1))
1547 tofetch.update(range(max(1, r - batchsize), r + 1))
1539 if drevs:
1548 if drevs:
1540 fetch({b'ids': list(tofetch)})
1549 fetch({b'ids': list(tofetch)})
1541 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1550 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1542
1551
1543 # Walk through the tree, return smartsets
1552 # Walk through the tree, return smartsets
1544 def walk(tree):
1553 def walk(tree):
1545 op = tree[0]
1554 op = tree[0]
1546 if op == b'symbol':
1555 if op == b'symbol':
1547 drev = _parsedrev(tree[1])
1556 drev = _parsedrev(tree[1])
1548 if drev:
1557 if drev:
1549 return smartset.baseset([drev])
1558 return smartset.baseset([drev])
1550 elif tree[1] in _knownstatusnames:
1559 elif tree[1] in _knownstatusnames:
1551 drevs = [
1560 drevs = [
1552 r
1561 r
1553 for r in validids
1562 for r in validids
1554 if _getstatusname(prefetched[r]) == tree[1]
1563 if _getstatusname(prefetched[r]) == tree[1]
1555 ]
1564 ]
1556 return smartset.baseset(drevs)
1565 return smartset.baseset(drevs)
1557 else:
1566 else:
1558 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1567 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1559 elif op in {b'and_', b'add', b'sub'}:
1568 elif op in {b'and_', b'add', b'sub'}:
1560 assert len(tree) == 3
1569 assert len(tree) == 3
1561 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1570 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1562 elif op == b'group':
1571 elif op == b'group':
1563 return walk(tree[1])
1572 return walk(tree[1])
1564 elif op == b'ancestors':
1573 elif op == b'ancestors':
1565 return getstack(walk(tree[1]))
1574 return getstack(walk(tree[1]))
1566 else:
1575 else:
1567 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1576 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1568
1577
1569 return [prefetched[r] for r in walk(tree)]
1578 return [prefetched[r] for r in walk(tree)]
1570
1579
1571
1580
1572 def getdescfromdrev(drev):
1581 def getdescfromdrev(drev):
1573 """get description (commit message) from "Differential Revision"
1582 """get description (commit message) from "Differential Revision"
1574
1583
1575 This is similar to differential.getcommitmessage API. But we only care
1584 This is similar to differential.getcommitmessage API. But we only care
1576 about limited fields: title, summary, test plan, and URL.
1585 about limited fields: title, summary, test plan, and URL.
1577 """
1586 """
1578 title = drev[b'title']
1587 title = drev[b'title']
1579 summary = drev[b'summary'].rstrip()
1588 summary = drev[b'summary'].rstrip()
1580 testplan = drev[b'testPlan'].rstrip()
1589 testplan = drev[b'testPlan'].rstrip()
1581 if testplan:
1590 if testplan:
1582 testplan = b'Test Plan:\n%s' % testplan
1591 testplan = b'Test Plan:\n%s' % testplan
1583 uri = b'Differential Revision: %s' % drev[b'uri']
1592 uri = b'Differential Revision: %s' % drev[b'uri']
1584 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1593 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1585
1594
1586
1595
1587 def getdiffmeta(diff):
1596 def getdiffmeta(diff):
1588 """get commit metadata (date, node, user, p1) from a diff object
1597 """get commit metadata (date, node, user, p1) from a diff object
1589
1598
1590 The metadata could be "hg:meta", sent by phabsend, like:
1599 The metadata could be "hg:meta", sent by phabsend, like:
1591
1600
1592 "properties": {
1601 "properties": {
1593 "hg:meta": {
1602 "hg:meta": {
1594 "branch": "default",
1603 "branch": "default",
1595 "date": "1499571514 25200",
1604 "date": "1499571514 25200",
1596 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1605 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1597 "user": "Foo Bar <foo@example.com>",
1606 "user": "Foo Bar <foo@example.com>",
1598 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1607 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1599 }
1608 }
1600 }
1609 }
1601
1610
1602 Or converted from "local:commits", sent by "arc", like:
1611 Or converted from "local:commits", sent by "arc", like:
1603
1612
1604 "properties": {
1613 "properties": {
1605 "local:commits": {
1614 "local:commits": {
1606 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1615 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1607 "author": "Foo Bar",
1616 "author": "Foo Bar",
1608 "authorEmail": "foo@example.com"
1617 "authorEmail": "foo@example.com"
1609 "branch": "default",
1618 "branch": "default",
1610 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1619 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1611 "local": "1000",
1620 "local": "1000",
1612 "message": "...",
1621 "message": "...",
1613 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1622 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1614 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1623 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1615 "summary": "...",
1624 "summary": "...",
1616 "tag": "",
1625 "tag": "",
1617 "time": 1499546314,
1626 "time": 1499546314,
1618 }
1627 }
1619 }
1628 }
1620 }
1629 }
1621
1630
1622 Note: metadata extracted from "local:commits" will lose time zone
1631 Note: metadata extracted from "local:commits" will lose time zone
1623 information.
1632 information.
1624 """
1633 """
1625 props = diff.get(b'properties') or {}
1634 props = diff.get(b'properties') or {}
1626 meta = props.get(b'hg:meta')
1635 meta = props.get(b'hg:meta')
1627 if not meta:
1636 if not meta:
1628 if props.get(b'local:commits'):
1637 if props.get(b'local:commits'):
1629 commit = sorted(props[b'local:commits'].values())[0]
1638 commit = sorted(props[b'local:commits'].values())[0]
1630 meta = {}
1639 meta = {}
1631 if b'author' in commit and b'authorEmail' in commit:
1640 if b'author' in commit and b'authorEmail' in commit:
1632 meta[b'user'] = b'%s <%s>' % (
1641 meta[b'user'] = b'%s <%s>' % (
1633 commit[b'author'],
1642 commit[b'author'],
1634 commit[b'authorEmail'],
1643 commit[b'authorEmail'],
1635 )
1644 )
1636 if b'time' in commit:
1645 if b'time' in commit:
1637 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1646 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1638 if b'branch' in commit:
1647 if b'branch' in commit:
1639 meta[b'branch'] = commit[b'branch']
1648 meta[b'branch'] = commit[b'branch']
1640 node = commit.get(b'commit', commit.get(b'rev'))
1649 node = commit.get(b'commit', commit.get(b'rev'))
1641 if node:
1650 if node:
1642 meta[b'node'] = node
1651 meta[b'node'] = node
1643 if len(commit.get(b'parents', ())) >= 1:
1652 if len(commit.get(b'parents', ())) >= 1:
1644 meta[b'parent'] = commit[b'parents'][0]
1653 meta[b'parent'] = commit[b'parents'][0]
1645 else:
1654 else:
1646 meta = {}
1655 meta = {}
1647 if b'date' not in meta and b'dateCreated' in diff:
1656 if b'date' not in meta and b'dateCreated' in diff:
1648 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1657 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1649 if b'branch' not in meta and diff.get(b'branch'):
1658 if b'branch' not in meta and diff.get(b'branch'):
1650 meta[b'branch'] = diff[b'branch']
1659 meta[b'branch'] = diff[b'branch']
1651 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1660 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1652 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1661 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1653 return meta
1662 return meta
1654
1663
1655
1664
1656 def _getdrevs(ui, stack, specs):
1665 def _getdrevs(ui, stack, specs):
1657 """convert user supplied DREVSPECs into "Differential Revision" dicts
1666 """convert user supplied DREVSPECs into "Differential Revision" dicts
1658
1667
1659 See ``hg help phabread`` for how to specify each DREVSPEC.
1668 See ``hg help phabread`` for how to specify each DREVSPEC.
1660 """
1669 """
1661 if len(specs) > 0:
1670 if len(specs) > 0:
1662
1671
1663 def _formatspec(s):
1672 def _formatspec(s):
1664 if stack:
1673 if stack:
1665 s = b':(%s)' % s
1674 s = b':(%s)' % s
1666 return b'(%s)' % s
1675 return b'(%s)' % s
1667
1676
1668 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1677 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1669
1678
1670 drevs = querydrev(ui, spec)
1679 drevs = querydrev(ui, spec)
1671 if drevs:
1680 if drevs:
1672 return drevs
1681 return drevs
1673
1682
1674 raise error.Abort(_(b"empty DREVSPEC set"))
1683 raise error.Abort(_(b"empty DREVSPEC set"))
1675
1684
1676
1685
1677 def readpatch(ui, drevs, write):
1686 def readpatch(ui, drevs, write):
1678 """generate plain-text patch readable by 'hg import'
1687 """generate plain-text patch readable by 'hg import'
1679
1688
1680 write takes a list of (DREV, bytes), where DREV is the differential number
1689 write takes a list of (DREV, bytes), where DREV is the differential number
1681 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1690 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1682 to be imported. drevs is what "querydrev" returns, results of
1691 to be imported. drevs is what "querydrev" returns, results of
1683 "differential.query".
1692 "differential.query".
1684 """
1693 """
1685 # Prefetch hg:meta property for all diffs
1694 # Prefetch hg:meta property for all diffs
1686 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1695 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1687 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1696 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1688
1697
1689 patches = []
1698 patches = []
1690
1699
1691 # Generate patch for each drev
1700 # Generate patch for each drev
1692 for drev in drevs:
1701 for drev in drevs:
1693 ui.note(_(b'reading D%s\n') % drev[b'id'])
1702 ui.note(_(b'reading D%s\n') % drev[b'id'])
1694
1703
1695 diffid = max(int(v) for v in drev[b'diffs'])
1704 diffid = max(int(v) for v in drev[b'diffs'])
1696 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1705 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1697 desc = getdescfromdrev(drev)
1706 desc = getdescfromdrev(drev)
1698 header = b'# HG changeset patch\n'
1707 header = b'# HG changeset patch\n'
1699
1708
1700 # Try to preserve metadata from hg:meta property. Write hg patch
1709 # Try to preserve metadata from hg:meta property. Write hg patch
1701 # headers that can be read by the "import" command. See patchheadermap
1710 # headers that can be read by the "import" command. See patchheadermap
1702 # and extract in mercurial/patch.py for supported headers.
1711 # and extract in mercurial/patch.py for supported headers.
1703 meta = getdiffmeta(diffs[b'%d' % diffid])
1712 meta = getdiffmeta(diffs[b'%d' % diffid])
1704 for k in _metanamemap.keys():
1713 for k in _metanamemap.keys():
1705 if k in meta:
1714 if k in meta:
1706 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1715 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1707
1716
1708 content = b'%s%s\n%s' % (header, desc, body)
1717 content = b'%s%s\n%s' % (header, desc, body)
1709 patches.append((drev[b'id'], content))
1718 patches.append((drev[b'id'], content))
1710
1719
1711 # Write patches to the supplied callback
1720 # Write patches to the supplied callback
1712 write(patches)
1721 write(patches)
1713
1722
1714
1723
1715 @vcrcommand(
1724 @vcrcommand(
1716 b'phabread',
1725 b'phabread',
1717 [(b'', b'stack', False, _(b'read dependencies'))],
1726 [(b'', b'stack', False, _(b'read dependencies'))],
1718 _(b'DREVSPEC... [OPTIONS]'),
1727 _(b'DREVSPEC... [OPTIONS]'),
1719 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1728 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1720 optionalrepo=True,
1729 optionalrepo=True,
1721 )
1730 )
1722 def phabread(ui, repo, *specs, **opts):
1731 def phabread(ui, repo, *specs, **opts):
1723 """print patches from Phabricator suitable for importing
1732 """print patches from Phabricator suitable for importing
1724
1733
1725 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1734 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1726 the number ``123``. It could also have common operators like ``+``, ``-``,
1735 the number ``123``. It could also have common operators like ``+``, ``-``,
1727 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1736 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1728 select a stack. If multiple DREVSPEC values are given, the result is the
1737 select a stack. If multiple DREVSPEC values are given, the result is the
1729 union of each individually evaluated value. No attempt is currently made
1738 union of each individually evaluated value. No attempt is currently made
1730 to reorder the values to run from parent to child.
1739 to reorder the values to run from parent to child.
1731
1740
1732 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1741 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1733 could be used to filter patches by status. For performance reason, they
1742 could be used to filter patches by status. For performance reason, they
1734 only represent a subset of non-status selections and cannot be used alone.
1743 only represent a subset of non-status selections and cannot be used alone.
1735
1744
1736 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1745 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1737 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1746 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1738 stack up to D9.
1747 stack up to D9.
1739
1748
1740 If --stack is given, follow dependencies information and read all patches.
1749 If --stack is given, follow dependencies information and read all patches.
1741 It is equivalent to the ``:`` operator.
1750 It is equivalent to the ``:`` operator.
1742 """
1751 """
1743 opts = pycompat.byteskwargs(opts)
1752 opts = pycompat.byteskwargs(opts)
1744 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1753 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1745
1754
1746 def _write(patches):
1755 def _write(patches):
1747 for drev, content in patches:
1756 for drev, content in patches:
1748 ui.write(content)
1757 ui.write(content)
1749
1758
1750 readpatch(ui, drevs, _write)
1759 readpatch(ui, drevs, _write)
1751
1760
1752
1761
1753 @vcrcommand(
1762 @vcrcommand(
1754 b'phabimport',
1763 b'phabimport',
1755 [(b'', b'stack', False, _(b'import dependencies as well'))],
1764 [(b'', b'stack', False, _(b'import dependencies as well'))],
1756 _(b'DREVSPEC... [OPTIONS]'),
1765 _(b'DREVSPEC... [OPTIONS]'),
1757 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1766 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1758 )
1767 )
1759 def phabimport(ui, repo, *specs, **opts):
1768 def phabimport(ui, repo, *specs, **opts):
1760 """import patches from Phabricator for the specified Differential Revisions
1769 """import patches from Phabricator for the specified Differential Revisions
1761
1770
1762 The patches are read and applied starting at the parent of the working
1771 The patches are read and applied starting at the parent of the working
1763 directory.
1772 directory.
1764
1773
1765 See ``hg help phabread`` for how to specify DREVSPEC.
1774 See ``hg help phabread`` for how to specify DREVSPEC.
1766 """
1775 """
1767 opts = pycompat.byteskwargs(opts)
1776 opts = pycompat.byteskwargs(opts)
1768
1777
1769 # --bypass avoids losing exec and symlink bits when importing on Windows,
1778 # --bypass avoids losing exec and symlink bits when importing on Windows,
1770 # and allows importing with a dirty wdir. It also aborts instead of leaving
1779 # and allows importing with a dirty wdir. It also aborts instead of leaving
1771 # rejects.
1780 # rejects.
1772 opts[b'bypass'] = True
1781 opts[b'bypass'] = True
1773
1782
1774 # Mandatory default values, synced with commands.import
1783 # Mandatory default values, synced with commands.import
1775 opts[b'strip'] = 1
1784 opts[b'strip'] = 1
1776 opts[b'prefix'] = b''
1785 opts[b'prefix'] = b''
1777 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1786 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1778 opts[b'obsolete'] = False
1787 opts[b'obsolete'] = False
1779
1788
1780 if ui.configbool(b'phabimport', b'secret'):
1789 if ui.configbool(b'phabimport', b'secret'):
1781 opts[b'secret'] = True
1790 opts[b'secret'] = True
1782 if ui.configbool(b'phabimport', b'obsolete'):
1791 if ui.configbool(b'phabimport', b'obsolete'):
1783 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1792 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1784
1793
1785 def _write(patches):
1794 def _write(patches):
1786 parents = repo[None].parents()
1795 parents = repo[None].parents()
1787
1796
1788 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1797 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1789 for drev, contents in patches:
1798 for drev, contents in patches:
1790 ui.status(_(b'applying patch from D%s\n') % drev)
1799 ui.status(_(b'applying patch from D%s\n') % drev)
1791
1800
1792 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1801 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1793 msg, node, rej = cmdutil.tryimportone(
1802 msg, node, rej = cmdutil.tryimportone(
1794 ui,
1803 ui,
1795 repo,
1804 repo,
1796 patchdata,
1805 patchdata,
1797 parents,
1806 parents,
1798 opts,
1807 opts,
1799 [],
1808 [],
1800 None, # Never update wdir to another revision
1809 None, # Never update wdir to another revision
1801 )
1810 )
1802
1811
1803 if not node:
1812 if not node:
1804 raise error.Abort(_(b'D%s: no diffs found') % drev)
1813 raise error.Abort(_(b'D%s: no diffs found') % drev)
1805
1814
1806 ui.note(msg + b'\n')
1815 ui.note(msg + b'\n')
1807 parents = [repo[node]]
1816 parents = [repo[node]]
1808
1817
1809 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1818 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1810
1819
1811 readpatch(repo.ui, drevs, _write)
1820 readpatch(repo.ui, drevs, _write)
1812
1821
1813
1822
1814 @vcrcommand(
1823 @vcrcommand(
1815 b'phabupdate',
1824 b'phabupdate',
1816 [
1825 [
1817 (b'', b'accept', False, _(b'accept revisions')),
1826 (b'', b'accept', False, _(b'accept revisions')),
1818 (b'', b'reject', False, _(b'reject revisions')),
1827 (b'', b'reject', False, _(b'reject revisions')),
1819 (b'', b'abandon', False, _(b'abandon revisions')),
1828 (b'', b'abandon', False, _(b'abandon revisions')),
1820 (b'', b'reclaim', False, _(b'reclaim revisions')),
1829 (b'', b'reclaim', False, _(b'reclaim revisions')),
1821 (b'm', b'comment', b'', _(b'comment on the last revision')),
1830 (b'm', b'comment', b'', _(b'comment on the last revision')),
1822 ],
1831 ],
1823 _(b'DREVSPEC... [OPTIONS]'),
1832 _(b'DREVSPEC... [OPTIONS]'),
1824 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1833 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1825 optionalrepo=True,
1834 optionalrepo=True,
1826 )
1835 )
1827 def phabupdate(ui, repo, *specs, **opts):
1836 def phabupdate(ui, repo, *specs, **opts):
1828 """update Differential Revision in batch
1837 """update Differential Revision in batch
1829
1838
1830 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1839 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1831 """
1840 """
1832 opts = pycompat.byteskwargs(opts)
1841 opts = pycompat.byteskwargs(opts)
1833 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1842 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1834 if len(flags) > 1:
1843 if len(flags) > 1:
1835 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1844 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1836
1845
1837 actions = []
1846 actions = []
1838 for f in flags:
1847 for f in flags:
1839 actions.append({b'type': f, b'value': True})
1848 actions.append({b'type': f, b'value': True})
1840
1849
1841 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1850 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1842 for i, drev in enumerate(drevs):
1851 for i, drev in enumerate(drevs):
1843 if i + 1 == len(drevs) and opts.get(b'comment'):
1852 if i + 1 == len(drevs) and opts.get(b'comment'):
1844 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1853 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1845 if actions:
1854 if actions:
1846 params = {
1855 params = {
1847 b'objectIdentifier': drev[b'phid'],
1856 b'objectIdentifier': drev[b'phid'],
1848 b'transactions': actions,
1857 b'transactions': actions,
1849 }
1858 }
1850 callconduit(ui, b'differential.revision.edit', params)
1859 callconduit(ui, b'differential.revision.edit', params)
1851
1860
1852
1861
1853 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1862 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1854 def template_review(context, mapping):
1863 def template_review(context, mapping):
1855 """:phabreview: Object describing the review for this changeset.
1864 """:phabreview: Object describing the review for this changeset.
1856 Has attributes `url` and `id`.
1865 Has attributes `url` and `id`.
1857 """
1866 """
1858 ctx = context.resource(mapping, b'ctx')
1867 ctx = context.resource(mapping, b'ctx')
1859 m = _differentialrevisiondescre.search(ctx.description())
1868 m = _differentialrevisiondescre.search(ctx.description())
1860 if m:
1869 if m:
1861 return templateutil.hybriddict(
1870 return templateutil.hybriddict(
1862 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1871 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1863 )
1872 )
1864 else:
1873 else:
1865 tags = ctx.repo().nodetags(ctx.node())
1874 tags = ctx.repo().nodetags(ctx.node())
1866 for t in tags:
1875 for t in tags:
1867 if _differentialrevisiontagre.match(t):
1876 if _differentialrevisiontagre.match(t):
1868 url = ctx.repo().ui.config(b'phabricator', b'url')
1877 url = ctx.repo().ui.config(b'phabricator', b'url')
1869 if not url.endswith(b'/'):
1878 if not url.endswith(b'/'):
1870 url += b'/'
1879 url += b'/'
1871 url += t
1880 url += t
1872
1881
1873 return templateutil.hybriddict({b'url': url, b'id': t,})
1882 return templateutil.hybriddict({b'url': url, b'id': t,})
1874 return None
1883 return None
1875
1884
1876
1885
1877 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1886 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1878 def template_status(context, mapping):
1887 def template_status(context, mapping):
1879 """:phabstatus: String. Status of Phabricator differential.
1888 """:phabstatus: String. Status of Phabricator differential.
1880 """
1889 """
1881 ctx = context.resource(mapping, b'ctx')
1890 ctx = context.resource(mapping, b'ctx')
1882 repo = context.resource(mapping, b'repo')
1891 repo = context.resource(mapping, b'repo')
1883 ui = context.resource(mapping, b'ui')
1892 ui = context.resource(mapping, b'ui')
1884
1893
1885 rev = ctx.rev()
1894 rev = ctx.rev()
1886 try:
1895 try:
1887 drevid = getdrevmap(repo, [rev])[rev]
1896 drevid = getdrevmap(repo, [rev])[rev]
1888 except KeyError:
1897 except KeyError:
1889 return None
1898 return None
1890 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1899 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1891 for drev in drevs:
1900 for drev in drevs:
1892 if int(drev[b'id']) == drevid:
1901 if int(drev[b'id']) == drevid:
1893 return templateutil.hybriddict(
1902 return templateutil.hybriddict(
1894 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1903 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1895 )
1904 )
1896 return None
1905 return None
1897
1906
1898
1907
1899 @show.showview(b'phabstatus', csettopic=b'work')
1908 @show.showview(b'phabstatus', csettopic=b'work')
1900 def phabstatusshowview(ui, repo, displayer):
1909 def phabstatusshowview(ui, repo, displayer):
1901 """Phabricator differiential status"""
1910 """Phabricator differiential status"""
1902 revs = repo.revs('sort(_underway(), topo)')
1911 revs = repo.revs('sort(_underway(), topo)')
1903 drevmap = getdrevmap(repo, revs)
1912 drevmap = getdrevmap(repo, revs)
1904 unknownrevs, drevids, revsbydrevid = [], set(), {}
1913 unknownrevs, drevids, revsbydrevid = [], set(), {}
1905 for rev, drevid in pycompat.iteritems(drevmap):
1914 for rev, drevid in pycompat.iteritems(drevmap):
1906 if drevid is not None:
1915 if drevid is not None:
1907 drevids.add(drevid)
1916 drevids.add(drevid)
1908 revsbydrevid.setdefault(drevid, set()).add(rev)
1917 revsbydrevid.setdefault(drevid, set()).add(rev)
1909 else:
1918 else:
1910 unknownrevs.append(rev)
1919 unknownrevs.append(rev)
1911
1920
1912 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1921 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1913 drevsbyrev = {}
1922 drevsbyrev = {}
1914 for drev in drevs:
1923 for drev in drevs:
1915 for rev in revsbydrevid[int(drev[b'id'])]:
1924 for rev in revsbydrevid[int(drev[b'id'])]:
1916 drevsbyrev[rev] = drev
1925 drevsbyrev[rev] = drev
1917
1926
1918 def phabstatus(ctx):
1927 def phabstatus(ctx):
1919 drev = drevsbyrev[ctx.rev()]
1928 drev = drevsbyrev[ctx.rev()]
1920 status = ui.label(
1929 status = ui.label(
1921 b'%(statusName)s' % drev,
1930 b'%(statusName)s' % drev,
1922 b'phabricator.status.%s' % _getstatusname(drev),
1931 b'phabricator.status.%s' % _getstatusname(drev),
1923 )
1932 )
1924 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1933 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1925
1934
1926 revs -= smartset.baseset(unknownrevs)
1935 revs -= smartset.baseset(unknownrevs)
1927 revdag = graphmod.dagwalker(repo, revs)
1936 revdag = graphmod.dagwalker(repo, revs)
1928
1937
1929 ui.setconfig(b'experimental', b'graphshorten', True)
1938 ui.setconfig(b'experimental', b'graphshorten', True)
1930 displayer._exthook = phabstatus
1939 displayer._exthook = phabstatus
1931 nodelen = show.longestshortest(repo, revs)
1940 nodelen = show.longestshortest(repo, revs)
1932 logcmdutil.displaygraph(
1941 logcmdutil.displaygraph(
1933 ui,
1942 ui,
1934 repo,
1943 repo,
1935 revdag,
1944 revdag,
1936 displayer,
1945 displayer,
1937 graphmod.asciiedges,
1946 graphmod.asciiedges,
1938 props={b'nodelen': nodelen},
1947 props={b'nodelen': nodelen},
1939 )
1948 )
General Comments 0
You need to be logged in to leave comments. Login now