##// END OF EJS Templates
phabricator: fix loadhgrc() override broken by D8656...
Martin von Zweigbergk -
r46074:c7fe0dfb default
parent child Browse files
Show More
@@ -1,2312 +1,2312 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 rewriteutil,
79 rewriteutil,
80 scmutil,
80 scmutil,
81 smartset,
81 smartset,
82 tags,
82 tags,
83 templatefilters,
83 templatefilters,
84 templateutil,
84 templateutil,
85 url as urlmod,
85 url as urlmod,
86 util,
86 util,
87 )
87 )
88 from mercurial.utils import (
88 from mercurial.utils import (
89 procutil,
89 procutil,
90 stringutil,
90 stringutil,
91 )
91 )
92 from . import show
92 from . import show
93
93
94
94
95 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
96 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
97 # be specifying the version(s) of Mercurial they are tested with, or
97 # be specifying the version(s) of Mercurial they are tested with, or
98 # leave the attribute unspecified.
98 # leave the attribute unspecified.
99 testedwith = b'ships-with-hg-core'
99 testedwith = b'ships-with-hg-core'
100
100
101 eh = exthelper.exthelper()
101 eh = exthelper.exthelper()
102
102
103 cmdtable = eh.cmdtable
103 cmdtable = eh.cmdtable
104 command = eh.command
104 command = eh.command
105 configtable = eh.configtable
105 configtable = eh.configtable
106 templatekeyword = eh.templatekeyword
106 templatekeyword = eh.templatekeyword
107 uisetup = eh.finaluisetup
107 uisetup = eh.finaluisetup
108
108
109 # developer config: phabricator.batchsize
109 # developer config: phabricator.batchsize
110 eh.configitem(
110 eh.configitem(
111 b'phabricator', b'batchsize', default=12,
111 b'phabricator', b'batchsize', default=12,
112 )
112 )
113 eh.configitem(
113 eh.configitem(
114 b'phabricator', b'callsign', default=None,
114 b'phabricator', b'callsign', default=None,
115 )
115 )
116 eh.configitem(
116 eh.configitem(
117 b'phabricator', b'curlcmd', default=None,
117 b'phabricator', b'curlcmd', default=None,
118 )
118 )
119 # developer config: phabricator.debug
119 # developer config: phabricator.debug
120 eh.configitem(
120 eh.configitem(
121 b'phabricator', b'debug', default=False,
121 b'phabricator', b'debug', default=False,
122 )
122 )
123 # developer config: phabricator.repophid
123 # developer config: phabricator.repophid
124 eh.configitem(
124 eh.configitem(
125 b'phabricator', b'repophid', default=None,
125 b'phabricator', b'repophid', default=None,
126 )
126 )
127 eh.configitem(
127 eh.configitem(
128 b'phabricator', b'url', default=None,
128 b'phabricator', b'url', default=None,
129 )
129 )
130 eh.configitem(
130 eh.configitem(
131 b'phabsend', b'confirm', default=False,
131 b'phabsend', b'confirm', default=False,
132 )
132 )
133 eh.configitem(
133 eh.configitem(
134 b'phabimport', b'secret', default=False,
134 b'phabimport', b'secret', default=False,
135 )
135 )
136 eh.configitem(
136 eh.configitem(
137 b'phabimport', b'obsolete', default=False,
137 b'phabimport', b'obsolete', default=False,
138 )
138 )
139
139
140 colortable = {
140 colortable = {
141 b'phabricator.action.created': b'green',
141 b'phabricator.action.created': b'green',
142 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.skipped': b'magenta',
143 b'phabricator.action.updated': b'magenta',
143 b'phabricator.action.updated': b'magenta',
144 b'phabricator.desc': b'',
144 b'phabricator.desc': b'',
145 b'phabricator.drev': b'bold',
145 b'phabricator.drev': b'bold',
146 b'phabricator.node': b'',
146 b'phabricator.node': b'',
147 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.abandoned': b'magenta dim',
148 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.accepted': b'green bold',
149 b'phabricator.status.closed': b'green',
149 b'phabricator.status.closed': b'green',
150 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsreview': b'yellow',
151 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.needsrevision': b'red',
152 b'phabricator.status.changesplanned': b'red',
152 b'phabricator.status.changesplanned': b'red',
153 }
153 }
154
154
155 _VCR_FLAGS = [
155 _VCR_FLAGS = [
156 (
156 (
157 b'',
157 b'',
158 b'test-vcr',
158 b'test-vcr',
159 b'',
159 b'',
160 _(
160 _(
161 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
162 b', otherwise will mock all http requests using the specified vcr file.'
162 b', otherwise will mock all http requests using the specified vcr file.'
163 b' (ADVANCED)'
163 b' (ADVANCED)'
164 ),
164 ),
165 ),
165 ),
166 ]
166 ]
167
167
168
168
169 @eh.wrapfunction(localrepo, "loadhgrc")
169 @eh.wrapfunction(localrepo, "loadhgrc")
170 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, **opts):
170 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
171 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """Load ``.arcconfig`` content into a ui instance on repository open.
172 """
172 """
173 result = False
173 result = False
174 arcconfig = {}
174 arcconfig = {}
175
175
176 try:
176 try:
177 # json.loads only accepts bytes from 3.6+
177 # json.loads only accepts bytes from 3.6+
178 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
179 # json.loads only returns unicode strings
179 # json.loads only returns unicode strings
180 arcconfig = pycompat.rapply(
180 arcconfig = pycompat.rapply(
181 lambda x: encoding.unitolocal(x)
181 lambda x: encoding.unitolocal(x)
182 if isinstance(x, pycompat.unicode)
182 if isinstance(x, pycompat.unicode)
183 else x,
183 else x,
184 pycompat.json_loads(rawparams),
184 pycompat.json_loads(rawparams),
185 )
185 )
186
186
187 result = True
187 result = True
188 except ValueError:
188 except ValueError:
189 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
190 except IOError:
190 except IOError:
191 pass
191 pass
192
192
193 cfg = util.sortdict()
193 cfg = util.sortdict()
194
194
195 if b"repository.callsign" in arcconfig:
195 if b"repository.callsign" in arcconfig:
196 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
197
197
198 if b"phabricator.uri" in arcconfig:
198 if b"phabricator.uri" in arcconfig:
199 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
200
200
201 if cfg:
201 if cfg:
202 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
203
203
204 return (
204 return (
205 orig(ui, wdirvfs, hgvfs, requirements, **opts) or result
205 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
206 ) # Load .hg/hgrc
206 ) # Load .hg/hgrc
207
207
208
208
209 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
209 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
210 fullflags = flags + _VCR_FLAGS
210 fullflags = flags + _VCR_FLAGS
211
211
212 def hgmatcher(r1, r2):
212 def hgmatcher(r1, r2):
213 if r1.uri != r2.uri or r1.method != r2.method:
213 if r1.uri != r2.uri or r1.method != r2.method:
214 return False
214 return False
215 r1params = util.urlreq.parseqs(r1.body)
215 r1params = util.urlreq.parseqs(r1.body)
216 r2params = util.urlreq.parseqs(r2.body)
216 r2params = util.urlreq.parseqs(r2.body)
217 for key in r1params:
217 for key in r1params:
218 if key not in r2params:
218 if key not in r2params:
219 return False
219 return False
220 value = r1params[key][0]
220 value = r1params[key][0]
221 # we want to compare json payloads without worrying about ordering
221 # we want to compare json payloads without worrying about ordering
222 if value.startswith(b'{') and value.endswith(b'}'):
222 if value.startswith(b'{') and value.endswith(b'}'):
223 r1json = pycompat.json_loads(value)
223 r1json = pycompat.json_loads(value)
224 r2json = pycompat.json_loads(r2params[key][0])
224 r2json = pycompat.json_loads(r2params[key][0])
225 if r1json != r2json:
225 if r1json != r2json:
226 return False
226 return False
227 elif r2params[key][0] != value:
227 elif r2params[key][0] != value:
228 return False
228 return False
229 return True
229 return True
230
230
231 def sanitiserequest(request):
231 def sanitiserequest(request):
232 request.body = re.sub(
232 request.body = re.sub(
233 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
233 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
234 )
234 )
235 return request
235 return request
236
236
237 def sanitiseresponse(response):
237 def sanitiseresponse(response):
238 if 'set-cookie' in response['headers']:
238 if 'set-cookie' in response['headers']:
239 del response['headers']['set-cookie']
239 del response['headers']['set-cookie']
240 return response
240 return response
241
241
242 def decorate(fn):
242 def decorate(fn):
243 def inner(*args, **kwargs):
243 def inner(*args, **kwargs):
244 vcr = kwargs.pop('test_vcr')
244 vcr = kwargs.pop('test_vcr')
245 if vcr:
245 if vcr:
246 cassette = pycompat.fsdecode(vcr)
246 cassette = pycompat.fsdecode(vcr)
247 import hgdemandimport
247 import hgdemandimport
248
248
249 with hgdemandimport.deactivated():
249 with hgdemandimport.deactivated():
250 import vcr as vcrmod
250 import vcr as vcrmod
251 import vcr.stubs as stubs
251 import vcr.stubs as stubs
252
252
253 vcr = vcrmod.VCR(
253 vcr = vcrmod.VCR(
254 serializer='json',
254 serializer='json',
255 before_record_request=sanitiserequest,
255 before_record_request=sanitiserequest,
256 before_record_response=sanitiseresponse,
256 before_record_response=sanitiseresponse,
257 custom_patches=[
257 custom_patches=[
258 (
258 (
259 urlmod,
259 urlmod,
260 'httpconnection',
260 'httpconnection',
261 stubs.VCRHTTPConnection,
261 stubs.VCRHTTPConnection,
262 ),
262 ),
263 (
263 (
264 urlmod,
264 urlmod,
265 'httpsconnection',
265 'httpsconnection',
266 stubs.VCRHTTPSConnection,
266 stubs.VCRHTTPSConnection,
267 ),
267 ),
268 ],
268 ],
269 )
269 )
270 vcr.register_matcher('hgmatcher', hgmatcher)
270 vcr.register_matcher('hgmatcher', hgmatcher)
271 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
271 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
272 return fn(*args, **kwargs)
272 return fn(*args, **kwargs)
273 return fn(*args, **kwargs)
273 return fn(*args, **kwargs)
274
274
275 cmd = util.checksignature(inner, depth=2)
275 cmd = util.checksignature(inner, depth=2)
276 cmd.__name__ = fn.__name__
276 cmd.__name__ = fn.__name__
277 cmd.__doc__ = fn.__doc__
277 cmd.__doc__ = fn.__doc__
278
278
279 return command(
279 return command(
280 name,
280 name,
281 fullflags,
281 fullflags,
282 spec,
282 spec,
283 helpcategory=helpcategory,
283 helpcategory=helpcategory,
284 optionalrepo=optionalrepo,
284 optionalrepo=optionalrepo,
285 )(cmd)
285 )(cmd)
286
286
287 return decorate
287 return decorate
288
288
289
289
290 def _debug(ui, *msg, **opts):
290 def _debug(ui, *msg, **opts):
291 """write debug output for Phabricator if ``phabricator.debug`` is set
291 """write debug output for Phabricator if ``phabricator.debug`` is set
292
292
293 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
293 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
294 printed with the --debug argument.
294 printed with the --debug argument.
295 """
295 """
296 if ui.configbool(b"phabricator", b"debug"):
296 if ui.configbool(b"phabricator", b"debug"):
297 flag = ui.debugflag
297 flag = ui.debugflag
298 try:
298 try:
299 ui.debugflag = True
299 ui.debugflag = True
300 ui.write(*msg, **opts)
300 ui.write(*msg, **opts)
301 finally:
301 finally:
302 ui.debugflag = flag
302 ui.debugflag = flag
303
303
304
304
305 def urlencodenested(params):
305 def urlencodenested(params):
306 """like urlencode, but works with nested parameters.
306 """like urlencode, but works with nested parameters.
307
307
308 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
308 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
309 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
309 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
310 urlencode. Note: the encoding is consistent with PHP's http_build_query.
310 urlencode. Note: the encoding is consistent with PHP's http_build_query.
311 """
311 """
312 flatparams = util.sortdict()
312 flatparams = util.sortdict()
313
313
314 def process(prefix, obj):
314 def process(prefix, obj):
315 if isinstance(obj, bool):
315 if isinstance(obj, bool):
316 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
316 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
317 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
317 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
318 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
318 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
319 if items is None:
319 if items is None:
320 flatparams[prefix] = obj
320 flatparams[prefix] = obj
321 else:
321 else:
322 for k, v in items(obj):
322 for k, v in items(obj):
323 if prefix:
323 if prefix:
324 process(b'%s[%s]' % (prefix, k), v)
324 process(b'%s[%s]' % (prefix, k), v)
325 else:
325 else:
326 process(k, v)
326 process(k, v)
327
327
328 process(b'', params)
328 process(b'', params)
329 return util.urlreq.urlencode(flatparams)
329 return util.urlreq.urlencode(flatparams)
330
330
331
331
332 def readurltoken(ui):
332 def readurltoken(ui):
333 """return conduit url, token and make sure they exist
333 """return conduit url, token and make sure they exist
334
334
335 Currently read from [auth] config section. In the future, it might
335 Currently read from [auth] config section. In the future, it might
336 make sense to read from .arcconfig and .arcrc as well.
336 make sense to read from .arcconfig and .arcrc as well.
337 """
337 """
338 url = ui.config(b'phabricator', b'url')
338 url = ui.config(b'phabricator', b'url')
339 if not url:
339 if not url:
340 raise error.Abort(
340 raise error.Abort(
341 _(b'config %s.%s is required') % (b'phabricator', b'url')
341 _(b'config %s.%s is required') % (b'phabricator', b'url')
342 )
342 )
343
343
344 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
344 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
345 token = None
345 token = None
346
346
347 if res:
347 if res:
348 group, auth = res
348 group, auth = res
349
349
350 ui.debug(b"using auth.%s.* for authentication\n" % group)
350 ui.debug(b"using auth.%s.* for authentication\n" % group)
351
351
352 token = auth.get(b'phabtoken')
352 token = auth.get(b'phabtoken')
353
353
354 if not token:
354 if not token:
355 raise error.Abort(
355 raise error.Abort(
356 _(b'Can\'t find conduit token associated to %s') % (url,)
356 _(b'Can\'t find conduit token associated to %s') % (url,)
357 )
357 )
358
358
359 return url, token
359 return url, token
360
360
361
361
362 def callconduit(ui, name, params):
362 def callconduit(ui, name, params):
363 """call Conduit API, params is a dict. return json.loads result, or None"""
363 """call Conduit API, params is a dict. return json.loads result, or None"""
364 host, token = readurltoken(ui)
364 host, token = readurltoken(ui)
365 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
365 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
366 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
366 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
367 params = params.copy()
367 params = params.copy()
368 params[b'__conduit__'] = {
368 params[b'__conduit__'] = {
369 b'token': token,
369 b'token': token,
370 }
370 }
371 rawdata = {
371 rawdata = {
372 b'params': templatefilters.json(params),
372 b'params': templatefilters.json(params),
373 b'output': b'json',
373 b'output': b'json',
374 b'__conduit__': 1,
374 b'__conduit__': 1,
375 }
375 }
376 data = urlencodenested(rawdata)
376 data = urlencodenested(rawdata)
377 curlcmd = ui.config(b'phabricator', b'curlcmd')
377 curlcmd = ui.config(b'phabricator', b'curlcmd')
378 if curlcmd:
378 if curlcmd:
379 sin, sout = procutil.popen2(
379 sin, sout = procutil.popen2(
380 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
380 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
381 )
381 )
382 sin.write(data)
382 sin.write(data)
383 sin.close()
383 sin.close()
384 body = sout.read()
384 body = sout.read()
385 else:
385 else:
386 urlopener = urlmod.opener(ui, authinfo)
386 urlopener = urlmod.opener(ui, authinfo)
387 request = util.urlreq.request(pycompat.strurl(url), data=data)
387 request = util.urlreq.request(pycompat.strurl(url), data=data)
388 with contextlib.closing(urlopener.open(request)) as rsp:
388 with contextlib.closing(urlopener.open(request)) as rsp:
389 body = rsp.read()
389 body = rsp.read()
390 ui.debug(b'Conduit Response: %s\n' % body)
390 ui.debug(b'Conduit Response: %s\n' % body)
391 parsed = pycompat.rapply(
391 parsed = pycompat.rapply(
392 lambda x: encoding.unitolocal(x)
392 lambda x: encoding.unitolocal(x)
393 if isinstance(x, pycompat.unicode)
393 if isinstance(x, pycompat.unicode)
394 else x,
394 else x,
395 # json.loads only accepts bytes from py3.6+
395 # json.loads only accepts bytes from py3.6+
396 pycompat.json_loads(encoding.unifromlocal(body)),
396 pycompat.json_loads(encoding.unifromlocal(body)),
397 )
397 )
398 if parsed.get(b'error_code'):
398 if parsed.get(b'error_code'):
399 msg = _(b'Conduit Error (%s): %s') % (
399 msg = _(b'Conduit Error (%s): %s') % (
400 parsed[b'error_code'],
400 parsed[b'error_code'],
401 parsed[b'error_info'],
401 parsed[b'error_info'],
402 )
402 )
403 raise error.Abort(msg)
403 raise error.Abort(msg)
404 return parsed[b'result']
404 return parsed[b'result']
405
405
406
406
407 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
407 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
408 def debugcallconduit(ui, repo, name):
408 def debugcallconduit(ui, repo, name):
409 """call Conduit API
409 """call Conduit API
410
410
411 Call parameters are read from stdin as a JSON blob. Result will be written
411 Call parameters are read from stdin as a JSON blob. Result will be written
412 to stdout as a JSON blob.
412 to stdout as a JSON blob.
413 """
413 """
414 # json.loads only accepts bytes from 3.6+
414 # json.loads only accepts bytes from 3.6+
415 rawparams = encoding.unifromlocal(ui.fin.read())
415 rawparams = encoding.unifromlocal(ui.fin.read())
416 # json.loads only returns unicode strings
416 # json.loads only returns unicode strings
417 params = pycompat.rapply(
417 params = pycompat.rapply(
418 lambda x: encoding.unitolocal(x)
418 lambda x: encoding.unitolocal(x)
419 if isinstance(x, pycompat.unicode)
419 if isinstance(x, pycompat.unicode)
420 else x,
420 else x,
421 pycompat.json_loads(rawparams),
421 pycompat.json_loads(rawparams),
422 )
422 )
423 # json.dumps only accepts unicode strings
423 # json.dumps only accepts unicode strings
424 result = pycompat.rapply(
424 result = pycompat.rapply(
425 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
425 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
426 callconduit(ui, name, params),
426 callconduit(ui, name, params),
427 )
427 )
428 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
428 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
429 ui.write(b'%s\n' % encoding.unitolocal(s))
429 ui.write(b'%s\n' % encoding.unitolocal(s))
430
430
431
431
432 def getrepophid(repo):
432 def getrepophid(repo):
433 """given callsign, return repository PHID or None"""
433 """given callsign, return repository PHID or None"""
434 # developer config: phabricator.repophid
434 # developer config: phabricator.repophid
435 repophid = repo.ui.config(b'phabricator', b'repophid')
435 repophid = repo.ui.config(b'phabricator', b'repophid')
436 if repophid:
436 if repophid:
437 return repophid
437 return repophid
438 callsign = repo.ui.config(b'phabricator', b'callsign')
438 callsign = repo.ui.config(b'phabricator', b'callsign')
439 if not callsign:
439 if not callsign:
440 return None
440 return None
441 query = callconduit(
441 query = callconduit(
442 repo.ui,
442 repo.ui,
443 b'diffusion.repository.search',
443 b'diffusion.repository.search',
444 {b'constraints': {b'callsigns': [callsign]}},
444 {b'constraints': {b'callsigns': [callsign]}},
445 )
445 )
446 if len(query[b'data']) == 0:
446 if len(query[b'data']) == 0:
447 return None
447 return None
448 repophid = query[b'data'][0][b'phid']
448 repophid = query[b'data'][0][b'phid']
449 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
449 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
450 return repophid
450 return repophid
451
451
452
452
453 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
453 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
454 _differentialrevisiondescre = re.compile(
454 _differentialrevisiondescre = re.compile(
455 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
455 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
456 )
456 )
457
457
458
458
459 def getoldnodedrevmap(repo, nodelist):
459 def getoldnodedrevmap(repo, nodelist):
460 """find previous nodes that has been sent to Phabricator
460 """find previous nodes that has been sent to Phabricator
461
461
462 return {node: (oldnode, Differential diff, Differential Revision ID)}
462 return {node: (oldnode, Differential diff, Differential Revision ID)}
463 for node in nodelist with known previous sent versions, or associated
463 for node in nodelist with known previous sent versions, or associated
464 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
464 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
465 be ``None``.
465 be ``None``.
466
466
467 Examines commit messages like "Differential Revision:" to get the
467 Examines commit messages like "Differential Revision:" to get the
468 association information.
468 association information.
469
469
470 If such commit message line is not found, examines all precursors and their
470 If such commit message line is not found, examines all precursors and their
471 tags. Tags with format like "D1234" are considered a match and the node
471 tags. Tags with format like "D1234" are considered a match and the node
472 with that tag, and the number after "D" (ex. 1234) will be returned.
472 with that tag, and the number after "D" (ex. 1234) will be returned.
473
473
474 The ``old node``, if not None, is guaranteed to be the last diff of
474 The ``old node``, if not None, is guaranteed to be the last diff of
475 corresponding Differential Revision, and exist in the repo.
475 corresponding Differential Revision, and exist in the repo.
476 """
476 """
477 unfi = repo.unfiltered()
477 unfi = repo.unfiltered()
478 has_node = unfi.changelog.index.has_node
478 has_node = unfi.changelog.index.has_node
479
479
480 result = {} # {node: (oldnode?, lastdiff?, drev)}
480 result = {} # {node: (oldnode?, lastdiff?, drev)}
481 # ordered for test stability when printing new -> old mapping below
481 # ordered for test stability when printing new -> old mapping below
482 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
482 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
483 for node in nodelist:
483 for node in nodelist:
484 ctx = unfi[node]
484 ctx = unfi[node]
485 # For tags like "D123", put them into "toconfirm" to verify later
485 # For tags like "D123", put them into "toconfirm" to verify later
486 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
486 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
487 for n in precnodes:
487 for n in precnodes:
488 if has_node(n):
488 if has_node(n):
489 for tag in unfi.nodetags(n):
489 for tag in unfi.nodetags(n):
490 m = _differentialrevisiontagre.match(tag)
490 m = _differentialrevisiontagre.match(tag)
491 if m:
491 if m:
492 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
492 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
493 break
493 break
494 else:
494 else:
495 continue # move to next predecessor
495 continue # move to next predecessor
496 break # found a tag, stop
496 break # found a tag, stop
497 else:
497 else:
498 # Check commit message
498 # Check commit message
499 m = _differentialrevisiondescre.search(ctx.description())
499 m = _differentialrevisiondescre.search(ctx.description())
500 if m:
500 if m:
501 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
501 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
502
502
503 # Double check if tags are genuine by collecting all old nodes from
503 # Double check if tags are genuine by collecting all old nodes from
504 # Phabricator, and expect precursors overlap with it.
504 # Phabricator, and expect precursors overlap with it.
505 if toconfirm:
505 if toconfirm:
506 drevs = [drev for force, precs, drev in toconfirm.values()]
506 drevs = [drev for force, precs, drev in toconfirm.values()]
507 alldiffs = callconduit(
507 alldiffs = callconduit(
508 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
508 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
509 )
509 )
510
510
511 def getnodes(d, precset):
511 def getnodes(d, precset):
512 # Ignore other nodes that were combined into the Differential
512 # Ignore other nodes that were combined into the Differential
513 # that aren't predecessors of the current local node.
513 # that aren't predecessors of the current local node.
514 return [n for n in getlocalcommits(d) if n in precset]
514 return [n for n in getlocalcommits(d) if n in precset]
515
515
516 for newnode, (force, precset, drev) in toconfirm.items():
516 for newnode, (force, precset, drev) in toconfirm.items():
517 diffs = [
517 diffs = [
518 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
518 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
519 ]
519 ]
520
520
521 # local predecessors known by Phabricator
521 # local predecessors known by Phabricator
522 phprecset = {n for d in diffs for n in getnodes(d, precset)}
522 phprecset = {n for d in diffs for n in getnodes(d, precset)}
523
523
524 # Ignore if precursors (Phabricator and local repo) do not overlap,
524 # Ignore if precursors (Phabricator and local repo) do not overlap,
525 # and force is not set (when commit message says nothing)
525 # and force is not set (when commit message says nothing)
526 if not force and not phprecset:
526 if not force and not phprecset:
527 tagname = b'D%d' % drev
527 tagname = b'D%d' % drev
528 tags.tag(
528 tags.tag(
529 repo,
529 repo,
530 tagname,
530 tagname,
531 nullid,
531 nullid,
532 message=None,
532 message=None,
533 user=None,
533 user=None,
534 date=None,
534 date=None,
535 local=True,
535 local=True,
536 )
536 )
537 unfi.ui.warn(
537 unfi.ui.warn(
538 _(
538 _(
539 b'D%d: local tag removed - does not match '
539 b'D%d: local tag removed - does not match '
540 b'Differential history\n'
540 b'Differential history\n'
541 )
541 )
542 % drev
542 % drev
543 )
543 )
544 continue
544 continue
545
545
546 # Find the last node using Phabricator metadata, and make sure it
546 # Find the last node using Phabricator metadata, and make sure it
547 # exists in the repo
547 # exists in the repo
548 oldnode = lastdiff = None
548 oldnode = lastdiff = None
549 if diffs:
549 if diffs:
550 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
550 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
551 oldnodes = getnodes(lastdiff, precset)
551 oldnodes = getnodes(lastdiff, precset)
552
552
553 _debug(
553 _debug(
554 unfi.ui,
554 unfi.ui,
555 b"%s mapped to old nodes %s\n"
555 b"%s mapped to old nodes %s\n"
556 % (
556 % (
557 short(newnode),
557 short(newnode),
558 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
558 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
559 ),
559 ),
560 )
560 )
561
561
562 # If this commit was the result of `hg fold` after submission,
562 # If this commit was the result of `hg fold` after submission,
563 # and now resubmitted with --fold, the easiest thing to do is
563 # and now resubmitted with --fold, the easiest thing to do is
564 # to leave the node clear. This only results in creating a new
564 # to leave the node clear. This only results in creating a new
565 # diff for the _same_ Differential Revision if this commit is
565 # diff for the _same_ Differential Revision if this commit is
566 # the first or last in the selected range. If we picked a node
566 # the first or last in the selected range. If we picked a node
567 # from the list instead, it would have to be the lowest if at
567 # from the list instead, it would have to be the lowest if at
568 # the beginning of the --fold range, or the highest at the end.
568 # the beginning of the --fold range, or the highest at the end.
569 # Otherwise, one or more of the nodes wouldn't be considered in
569 # Otherwise, one or more of the nodes wouldn't be considered in
570 # the diff, and the Differential wouldn't be properly updated.
570 # the diff, and the Differential wouldn't be properly updated.
571 # If this commit is the result of `hg split` in the same
571 # If this commit is the result of `hg split` in the same
572 # scenario, there is a single oldnode here (and multiple
572 # scenario, there is a single oldnode here (and multiple
573 # newnodes mapped to it). That makes it the same as the normal
573 # newnodes mapped to it). That makes it the same as the normal
574 # case, as the edges of the newnode range cleanly maps to one
574 # case, as the edges of the newnode range cleanly maps to one
575 # oldnode each.
575 # oldnode each.
576 if len(oldnodes) == 1:
576 if len(oldnodes) == 1:
577 oldnode = oldnodes[0]
577 oldnode = oldnodes[0]
578 if oldnode and not has_node(oldnode):
578 if oldnode and not has_node(oldnode):
579 oldnode = None
579 oldnode = None
580
580
581 result[newnode] = (oldnode, lastdiff, drev)
581 result[newnode] = (oldnode, lastdiff, drev)
582
582
583 return result
583 return result
584
584
585
585
586 def getdrevmap(repo, revs):
586 def getdrevmap(repo, revs):
587 """Return a dict mapping each rev in `revs` to their Differential Revision
587 """Return a dict mapping each rev in `revs` to their Differential Revision
588 ID or None.
588 ID or None.
589 """
589 """
590 result = {}
590 result = {}
591 for rev in revs:
591 for rev in revs:
592 result[rev] = None
592 result[rev] = None
593 ctx = repo[rev]
593 ctx = repo[rev]
594 # Check commit message
594 # Check commit message
595 m = _differentialrevisiondescre.search(ctx.description())
595 m = _differentialrevisiondescre.search(ctx.description())
596 if m:
596 if m:
597 result[rev] = int(m.group('id'))
597 result[rev] = int(m.group('id'))
598 continue
598 continue
599 # Check tags
599 # Check tags
600 for tag in repo.nodetags(ctx.node()):
600 for tag in repo.nodetags(ctx.node()):
601 m = _differentialrevisiontagre.match(tag)
601 m = _differentialrevisiontagre.match(tag)
602 if m:
602 if m:
603 result[rev] = int(m.group(1))
603 result[rev] = int(m.group(1))
604 break
604 break
605
605
606 return result
606 return result
607
607
608
608
609 def getdiff(basectx, ctx, diffopts):
609 def getdiff(basectx, ctx, diffopts):
610 """plain-text diff without header (user, commit message, etc)"""
610 """plain-text diff without header (user, commit message, etc)"""
611 output = util.stringio()
611 output = util.stringio()
612 for chunk, _label in patch.diffui(
612 for chunk, _label in patch.diffui(
613 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
613 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
614 ):
614 ):
615 output.write(chunk)
615 output.write(chunk)
616 return output.getvalue()
616 return output.getvalue()
617
617
618
618
619 class DiffChangeType(object):
619 class DiffChangeType(object):
620 ADD = 1
620 ADD = 1
621 CHANGE = 2
621 CHANGE = 2
622 DELETE = 3
622 DELETE = 3
623 MOVE_AWAY = 4
623 MOVE_AWAY = 4
624 COPY_AWAY = 5
624 COPY_AWAY = 5
625 MOVE_HERE = 6
625 MOVE_HERE = 6
626 COPY_HERE = 7
626 COPY_HERE = 7
627 MULTICOPY = 8
627 MULTICOPY = 8
628
628
629
629
630 class DiffFileType(object):
630 class DiffFileType(object):
631 TEXT = 1
631 TEXT = 1
632 IMAGE = 2
632 IMAGE = 2
633 BINARY = 3
633 BINARY = 3
634
634
635
635
636 @attr.s
636 @attr.s
637 class phabhunk(dict):
637 class phabhunk(dict):
638 """Represents a Differential hunk, which is owned by a Differential change
638 """Represents a Differential hunk, which is owned by a Differential change
639 """
639 """
640
640
641 oldOffset = attr.ib(default=0) # camelcase-required
641 oldOffset = attr.ib(default=0) # camelcase-required
642 oldLength = attr.ib(default=0) # camelcase-required
642 oldLength = attr.ib(default=0) # camelcase-required
643 newOffset = attr.ib(default=0) # camelcase-required
643 newOffset = attr.ib(default=0) # camelcase-required
644 newLength = attr.ib(default=0) # camelcase-required
644 newLength = attr.ib(default=0) # camelcase-required
645 corpus = attr.ib(default='')
645 corpus = attr.ib(default='')
646 # These get added to the phabchange's equivalents
646 # These get added to the phabchange's equivalents
647 addLines = attr.ib(default=0) # camelcase-required
647 addLines = attr.ib(default=0) # camelcase-required
648 delLines = attr.ib(default=0) # camelcase-required
648 delLines = attr.ib(default=0) # camelcase-required
649
649
650
650
651 @attr.s
651 @attr.s
652 class phabchange(object):
652 class phabchange(object):
653 """Represents a Differential change, owns Differential hunks and owned by a
653 """Represents a Differential change, owns Differential hunks and owned by a
654 Differential diff. Each one represents one file in a diff.
654 Differential diff. Each one represents one file in a diff.
655 """
655 """
656
656
657 currentPath = attr.ib(default=None) # camelcase-required
657 currentPath = attr.ib(default=None) # camelcase-required
658 oldPath = attr.ib(default=None) # camelcase-required
658 oldPath = attr.ib(default=None) # camelcase-required
659 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
659 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
660 metadata = attr.ib(default=attr.Factory(dict))
660 metadata = attr.ib(default=attr.Factory(dict))
661 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
661 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
662 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
662 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
663 type = attr.ib(default=DiffChangeType.CHANGE)
663 type = attr.ib(default=DiffChangeType.CHANGE)
664 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
664 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
665 commitHash = attr.ib(default=None) # camelcase-required
665 commitHash = attr.ib(default=None) # camelcase-required
666 addLines = attr.ib(default=0) # camelcase-required
666 addLines = attr.ib(default=0) # camelcase-required
667 delLines = attr.ib(default=0) # camelcase-required
667 delLines = attr.ib(default=0) # camelcase-required
668 hunks = attr.ib(default=attr.Factory(list))
668 hunks = attr.ib(default=attr.Factory(list))
669
669
670 def copynewmetadatatoold(self):
670 def copynewmetadatatoold(self):
671 for key in list(self.metadata.keys()):
671 for key in list(self.metadata.keys()):
672 newkey = key.replace(b'new:', b'old:')
672 newkey = key.replace(b'new:', b'old:')
673 self.metadata[newkey] = self.metadata[key]
673 self.metadata[newkey] = self.metadata[key]
674
674
675 def addoldmode(self, value):
675 def addoldmode(self, value):
676 self.oldProperties[b'unix:filemode'] = value
676 self.oldProperties[b'unix:filemode'] = value
677
677
678 def addnewmode(self, value):
678 def addnewmode(self, value):
679 self.newProperties[b'unix:filemode'] = value
679 self.newProperties[b'unix:filemode'] = value
680
680
681 def addhunk(self, hunk):
681 def addhunk(self, hunk):
682 if not isinstance(hunk, phabhunk):
682 if not isinstance(hunk, phabhunk):
683 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
683 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
684 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
684 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
685 # It's useful to include these stats since the Phab web UI shows them,
685 # It's useful to include these stats since the Phab web UI shows them,
686 # and uses them to estimate how large a change a Revision is. Also used
686 # and uses them to estimate how large a change a Revision is. Also used
687 # in email subjects for the [+++--] bit.
687 # in email subjects for the [+++--] bit.
688 self.addLines += hunk.addLines
688 self.addLines += hunk.addLines
689 self.delLines += hunk.delLines
689 self.delLines += hunk.delLines
690
690
691
691
692 @attr.s
692 @attr.s
693 class phabdiff(object):
693 class phabdiff(object):
694 """Represents a Differential diff, owns Differential changes. Corresponds
694 """Represents a Differential diff, owns Differential changes. Corresponds
695 to a commit.
695 to a commit.
696 """
696 """
697
697
698 # Doesn't seem to be any reason to send this (output of uname -n)
698 # Doesn't seem to be any reason to send this (output of uname -n)
699 sourceMachine = attr.ib(default=b'') # camelcase-required
699 sourceMachine = attr.ib(default=b'') # camelcase-required
700 sourcePath = attr.ib(default=b'/') # camelcase-required
700 sourcePath = attr.ib(default=b'/') # camelcase-required
701 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
701 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
702 sourceControlPath = attr.ib(default=b'/') # camelcase-required
702 sourceControlPath = attr.ib(default=b'/') # camelcase-required
703 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
703 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
704 branch = attr.ib(default=b'default')
704 branch = attr.ib(default=b'default')
705 bookmark = attr.ib(default=None)
705 bookmark = attr.ib(default=None)
706 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
706 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
707 lintStatus = attr.ib(default=b'none') # camelcase-required
707 lintStatus = attr.ib(default=b'none') # camelcase-required
708 unitStatus = attr.ib(default=b'none') # camelcase-required
708 unitStatus = attr.ib(default=b'none') # camelcase-required
709 changes = attr.ib(default=attr.Factory(dict))
709 changes = attr.ib(default=attr.Factory(dict))
710 repositoryPHID = attr.ib(default=None) # camelcase-required
710 repositoryPHID = attr.ib(default=None) # camelcase-required
711
711
712 def addchange(self, change):
712 def addchange(self, change):
713 if not isinstance(change, phabchange):
713 if not isinstance(change, phabchange):
714 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
714 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
715 self.changes[change.currentPath] = pycompat.byteskwargs(
715 self.changes[change.currentPath] = pycompat.byteskwargs(
716 attr.asdict(change)
716 attr.asdict(change)
717 )
717 )
718
718
719
719
720 def maketext(pchange, basectx, ctx, fname):
720 def maketext(pchange, basectx, ctx, fname):
721 """populate the phabchange for a text file"""
721 """populate the phabchange for a text file"""
722 repo = ctx.repo()
722 repo = ctx.repo()
723 fmatcher = match.exact([fname])
723 fmatcher = match.exact([fname])
724 diffopts = mdiff.diffopts(git=True, context=32767)
724 diffopts = mdiff.diffopts(git=True, context=32767)
725 _pfctx, _fctx, header, fhunks = next(
725 _pfctx, _fctx, header, fhunks = next(
726 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
726 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
727 )
727 )
728
728
729 for fhunk in fhunks:
729 for fhunk in fhunks:
730 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
730 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
731 corpus = b''.join(lines[1:])
731 corpus = b''.join(lines[1:])
732 shunk = list(header)
732 shunk = list(header)
733 shunk.extend(lines)
733 shunk.extend(lines)
734 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
734 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
735 patch.diffstatdata(util.iterlines(shunk))
735 patch.diffstatdata(util.iterlines(shunk))
736 )
736 )
737 pchange.addhunk(
737 pchange.addhunk(
738 phabhunk(
738 phabhunk(
739 oldOffset,
739 oldOffset,
740 oldLength,
740 oldLength,
741 newOffset,
741 newOffset,
742 newLength,
742 newLength,
743 corpus,
743 corpus,
744 addLines,
744 addLines,
745 delLines,
745 delLines,
746 )
746 )
747 )
747 )
748
748
749
749
750 def uploadchunks(fctx, fphid):
750 def uploadchunks(fctx, fphid):
751 """upload large binary files as separate chunks.
751 """upload large binary files as separate chunks.
752 Phab requests chunking over 8MiB, and splits into 4MiB chunks
752 Phab requests chunking over 8MiB, and splits into 4MiB chunks
753 """
753 """
754 ui = fctx.repo().ui
754 ui = fctx.repo().ui
755 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
755 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
756 with ui.makeprogress(
756 with ui.makeprogress(
757 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
757 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
758 ) as progress:
758 ) as progress:
759 for chunk in chunks:
759 for chunk in chunks:
760 progress.increment()
760 progress.increment()
761 if chunk[b'complete']:
761 if chunk[b'complete']:
762 continue
762 continue
763 bstart = int(chunk[b'byteStart'])
763 bstart = int(chunk[b'byteStart'])
764 bend = int(chunk[b'byteEnd'])
764 bend = int(chunk[b'byteEnd'])
765 callconduit(
765 callconduit(
766 ui,
766 ui,
767 b'file.uploadchunk',
767 b'file.uploadchunk',
768 {
768 {
769 b'filePHID': fphid,
769 b'filePHID': fphid,
770 b'byteStart': bstart,
770 b'byteStart': bstart,
771 b'data': base64.b64encode(fctx.data()[bstart:bend]),
771 b'data': base64.b64encode(fctx.data()[bstart:bend]),
772 b'dataEncoding': b'base64',
772 b'dataEncoding': b'base64',
773 },
773 },
774 )
774 )
775
775
776
776
777 def uploadfile(fctx):
777 def uploadfile(fctx):
778 """upload binary files to Phabricator"""
778 """upload binary files to Phabricator"""
779 repo = fctx.repo()
779 repo = fctx.repo()
780 ui = repo.ui
780 ui = repo.ui
781 fname = fctx.path()
781 fname = fctx.path()
782 size = fctx.size()
782 size = fctx.size()
783 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
783 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
784
784
785 # an allocate call is required first to see if an upload is even required
785 # an allocate call is required first to see if an upload is even required
786 # (Phab might already have it) and to determine if chunking is needed
786 # (Phab might already have it) and to determine if chunking is needed
787 allocateparams = {
787 allocateparams = {
788 b'name': fname,
788 b'name': fname,
789 b'contentLength': size,
789 b'contentLength': size,
790 b'contentHash': fhash,
790 b'contentHash': fhash,
791 }
791 }
792 filealloc = callconduit(ui, b'file.allocate', allocateparams)
792 filealloc = callconduit(ui, b'file.allocate', allocateparams)
793 fphid = filealloc[b'filePHID']
793 fphid = filealloc[b'filePHID']
794
794
795 if filealloc[b'upload']:
795 if filealloc[b'upload']:
796 ui.write(_(b'uploading %s\n') % bytes(fctx))
796 ui.write(_(b'uploading %s\n') % bytes(fctx))
797 if not fphid:
797 if not fphid:
798 uploadparams = {
798 uploadparams = {
799 b'name': fname,
799 b'name': fname,
800 b'data_base64': base64.b64encode(fctx.data()),
800 b'data_base64': base64.b64encode(fctx.data()),
801 }
801 }
802 fphid = callconduit(ui, b'file.upload', uploadparams)
802 fphid = callconduit(ui, b'file.upload', uploadparams)
803 else:
803 else:
804 uploadchunks(fctx, fphid)
804 uploadchunks(fctx, fphid)
805 else:
805 else:
806 ui.debug(b'server already has %s\n' % bytes(fctx))
806 ui.debug(b'server already has %s\n' % bytes(fctx))
807
807
808 if not fphid:
808 if not fphid:
809 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
809 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
810
810
811 return fphid
811 return fphid
812
812
813
813
814 def addoldbinary(pchange, oldfctx, fctx):
814 def addoldbinary(pchange, oldfctx, fctx):
815 """add the metadata for the previous version of a binary file to the
815 """add the metadata for the previous version of a binary file to the
816 phabchange for the new version
816 phabchange for the new version
817
817
818 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
818 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
819 version of the file, or None if the file is being removed.
819 version of the file, or None if the file is being removed.
820 """
820 """
821 if not fctx or fctx.cmp(oldfctx):
821 if not fctx or fctx.cmp(oldfctx):
822 # Files differ, add the old one
822 # Files differ, add the old one
823 pchange.metadata[b'old:file:size'] = oldfctx.size()
823 pchange.metadata[b'old:file:size'] = oldfctx.size()
824 mimeguess, _enc = mimetypes.guess_type(
824 mimeguess, _enc = mimetypes.guess_type(
825 encoding.unifromlocal(oldfctx.path())
825 encoding.unifromlocal(oldfctx.path())
826 )
826 )
827 if mimeguess:
827 if mimeguess:
828 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
828 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
829 mimeguess
829 mimeguess
830 )
830 )
831 fphid = uploadfile(oldfctx)
831 fphid = uploadfile(oldfctx)
832 pchange.metadata[b'old:binary-phid'] = fphid
832 pchange.metadata[b'old:binary-phid'] = fphid
833 else:
833 else:
834 # If it's left as IMAGE/BINARY web UI might try to display it
834 # If it's left as IMAGE/BINARY web UI might try to display it
835 pchange.fileType = DiffFileType.TEXT
835 pchange.fileType = DiffFileType.TEXT
836 pchange.copynewmetadatatoold()
836 pchange.copynewmetadatatoold()
837
837
838
838
839 def makebinary(pchange, fctx):
839 def makebinary(pchange, fctx):
840 """populate the phabchange for a binary file"""
840 """populate the phabchange for a binary file"""
841 pchange.fileType = DiffFileType.BINARY
841 pchange.fileType = DiffFileType.BINARY
842 fphid = uploadfile(fctx)
842 fphid = uploadfile(fctx)
843 pchange.metadata[b'new:binary-phid'] = fphid
843 pchange.metadata[b'new:binary-phid'] = fphid
844 pchange.metadata[b'new:file:size'] = fctx.size()
844 pchange.metadata[b'new:file:size'] = fctx.size()
845 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
845 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
846 if mimeguess:
846 if mimeguess:
847 mimeguess = pycompat.bytestr(mimeguess)
847 mimeguess = pycompat.bytestr(mimeguess)
848 pchange.metadata[b'new:file:mime-type'] = mimeguess
848 pchange.metadata[b'new:file:mime-type'] = mimeguess
849 if mimeguess.startswith(b'image/'):
849 if mimeguess.startswith(b'image/'):
850 pchange.fileType = DiffFileType.IMAGE
850 pchange.fileType = DiffFileType.IMAGE
851
851
852
852
853 # Copied from mercurial/patch.py
853 # Copied from mercurial/patch.py
854 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
854 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
855
855
856
856
857 def notutf8(fctx):
857 def notutf8(fctx):
858 """detect non-UTF-8 text files since Phabricator requires them to be marked
858 """detect non-UTF-8 text files since Phabricator requires them to be marked
859 as binary
859 as binary
860 """
860 """
861 try:
861 try:
862 fctx.data().decode('utf-8')
862 fctx.data().decode('utf-8')
863 return False
863 return False
864 except UnicodeDecodeError:
864 except UnicodeDecodeError:
865 fctx.repo().ui.write(
865 fctx.repo().ui.write(
866 _(b'file %s detected as non-UTF-8, marked as binary\n')
866 _(b'file %s detected as non-UTF-8, marked as binary\n')
867 % fctx.path()
867 % fctx.path()
868 )
868 )
869 return True
869 return True
870
870
871
871
872 def addremoved(pdiff, basectx, ctx, removed):
872 def addremoved(pdiff, basectx, ctx, removed):
873 """add removed files to the phabdiff. Shouldn't include moves"""
873 """add removed files to the phabdiff. Shouldn't include moves"""
874 for fname in removed:
874 for fname in removed:
875 pchange = phabchange(
875 pchange = phabchange(
876 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
876 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
877 )
877 )
878 oldfctx = basectx.p1()[fname]
878 oldfctx = basectx.p1()[fname]
879 pchange.addoldmode(gitmode[oldfctx.flags()])
879 pchange.addoldmode(gitmode[oldfctx.flags()])
880 if not (oldfctx.isbinary() or notutf8(oldfctx)):
880 if not (oldfctx.isbinary() or notutf8(oldfctx)):
881 maketext(pchange, basectx, ctx, fname)
881 maketext(pchange, basectx, ctx, fname)
882
882
883 pdiff.addchange(pchange)
883 pdiff.addchange(pchange)
884
884
885
885
886 def addmodified(pdiff, basectx, ctx, modified):
886 def addmodified(pdiff, basectx, ctx, modified):
887 """add modified files to the phabdiff"""
887 """add modified files to the phabdiff"""
888 for fname in modified:
888 for fname in modified:
889 fctx = ctx[fname]
889 fctx = ctx[fname]
890 oldfctx = basectx.p1()[fname]
890 oldfctx = basectx.p1()[fname]
891 pchange = phabchange(currentPath=fname, oldPath=fname)
891 pchange = phabchange(currentPath=fname, oldPath=fname)
892 filemode = gitmode[fctx.flags()]
892 filemode = gitmode[fctx.flags()]
893 originalmode = gitmode[oldfctx.flags()]
893 originalmode = gitmode[oldfctx.flags()]
894 if filemode != originalmode:
894 if filemode != originalmode:
895 pchange.addoldmode(originalmode)
895 pchange.addoldmode(originalmode)
896 pchange.addnewmode(filemode)
896 pchange.addnewmode(filemode)
897
897
898 if (
898 if (
899 fctx.isbinary()
899 fctx.isbinary()
900 or notutf8(fctx)
900 or notutf8(fctx)
901 or oldfctx.isbinary()
901 or oldfctx.isbinary()
902 or notutf8(oldfctx)
902 or notutf8(oldfctx)
903 ):
903 ):
904 makebinary(pchange, fctx)
904 makebinary(pchange, fctx)
905 addoldbinary(pchange, oldfctx, fctx)
905 addoldbinary(pchange, oldfctx, fctx)
906 else:
906 else:
907 maketext(pchange, basectx, ctx, fname)
907 maketext(pchange, basectx, ctx, fname)
908
908
909 pdiff.addchange(pchange)
909 pdiff.addchange(pchange)
910
910
911
911
912 def addadded(pdiff, basectx, ctx, added, removed):
912 def addadded(pdiff, basectx, ctx, added, removed):
913 """add file adds to the phabdiff, both new files and copies/moves"""
913 """add file adds to the phabdiff, both new files and copies/moves"""
914 # Keep track of files that've been recorded as moved/copied, so if there are
914 # Keep track of files that've been recorded as moved/copied, so if there are
915 # additional copies we can mark them (moves get removed from removed)
915 # additional copies we can mark them (moves get removed from removed)
916 copiedchanges = {}
916 copiedchanges = {}
917 movedchanges = {}
917 movedchanges = {}
918
918
919 copy = {}
919 copy = {}
920 if basectx != ctx:
920 if basectx != ctx:
921 copy = copies.pathcopies(basectx.p1(), ctx)
921 copy = copies.pathcopies(basectx.p1(), ctx)
922
922
923 for fname in added:
923 for fname in added:
924 fctx = ctx[fname]
924 fctx = ctx[fname]
925 oldfctx = None
925 oldfctx = None
926 pchange = phabchange(currentPath=fname)
926 pchange = phabchange(currentPath=fname)
927
927
928 filemode = gitmode[fctx.flags()]
928 filemode = gitmode[fctx.flags()]
929
929
930 if copy:
930 if copy:
931 originalfname = copy.get(fname, fname)
931 originalfname = copy.get(fname, fname)
932 else:
932 else:
933 originalfname = fname
933 originalfname = fname
934 if fctx.renamed():
934 if fctx.renamed():
935 originalfname = fctx.renamed()[0]
935 originalfname = fctx.renamed()[0]
936
936
937 renamed = fname != originalfname
937 renamed = fname != originalfname
938
938
939 if renamed:
939 if renamed:
940 oldfctx = basectx.p1()[originalfname]
940 oldfctx = basectx.p1()[originalfname]
941 originalmode = gitmode[oldfctx.flags()]
941 originalmode = gitmode[oldfctx.flags()]
942 pchange.oldPath = originalfname
942 pchange.oldPath = originalfname
943
943
944 if originalfname in removed:
944 if originalfname in removed:
945 origpchange = phabchange(
945 origpchange = phabchange(
946 currentPath=originalfname,
946 currentPath=originalfname,
947 oldPath=originalfname,
947 oldPath=originalfname,
948 type=DiffChangeType.MOVE_AWAY,
948 type=DiffChangeType.MOVE_AWAY,
949 awayPaths=[fname],
949 awayPaths=[fname],
950 )
950 )
951 movedchanges[originalfname] = origpchange
951 movedchanges[originalfname] = origpchange
952 removed.remove(originalfname)
952 removed.remove(originalfname)
953 pchange.type = DiffChangeType.MOVE_HERE
953 pchange.type = DiffChangeType.MOVE_HERE
954 elif originalfname in movedchanges:
954 elif originalfname in movedchanges:
955 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
955 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
956 movedchanges[originalfname].awayPaths.append(fname)
956 movedchanges[originalfname].awayPaths.append(fname)
957 pchange.type = DiffChangeType.COPY_HERE
957 pchange.type = DiffChangeType.COPY_HERE
958 else: # pure copy
958 else: # pure copy
959 if originalfname not in copiedchanges:
959 if originalfname not in copiedchanges:
960 origpchange = phabchange(
960 origpchange = phabchange(
961 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
961 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
962 )
962 )
963 copiedchanges[originalfname] = origpchange
963 copiedchanges[originalfname] = origpchange
964 else:
964 else:
965 origpchange = copiedchanges[originalfname]
965 origpchange = copiedchanges[originalfname]
966 origpchange.awayPaths.append(fname)
966 origpchange.awayPaths.append(fname)
967 pchange.type = DiffChangeType.COPY_HERE
967 pchange.type = DiffChangeType.COPY_HERE
968
968
969 if filemode != originalmode:
969 if filemode != originalmode:
970 pchange.addoldmode(originalmode)
970 pchange.addoldmode(originalmode)
971 pchange.addnewmode(filemode)
971 pchange.addnewmode(filemode)
972 else: # Brand-new file
972 else: # Brand-new file
973 pchange.addnewmode(gitmode[fctx.flags()])
973 pchange.addnewmode(gitmode[fctx.flags()])
974 pchange.type = DiffChangeType.ADD
974 pchange.type = DiffChangeType.ADD
975
975
976 if (
976 if (
977 fctx.isbinary()
977 fctx.isbinary()
978 or notutf8(fctx)
978 or notutf8(fctx)
979 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
979 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
980 ):
980 ):
981 makebinary(pchange, fctx)
981 makebinary(pchange, fctx)
982 if renamed:
982 if renamed:
983 addoldbinary(pchange, oldfctx, fctx)
983 addoldbinary(pchange, oldfctx, fctx)
984 else:
984 else:
985 maketext(pchange, basectx, ctx, fname)
985 maketext(pchange, basectx, ctx, fname)
986
986
987 pdiff.addchange(pchange)
987 pdiff.addchange(pchange)
988
988
989 for _path, copiedchange in copiedchanges.items():
989 for _path, copiedchange in copiedchanges.items():
990 pdiff.addchange(copiedchange)
990 pdiff.addchange(copiedchange)
991 for _path, movedchange in movedchanges.items():
991 for _path, movedchange in movedchanges.items():
992 pdiff.addchange(movedchange)
992 pdiff.addchange(movedchange)
993
993
994
994
995 def creatediff(basectx, ctx):
995 def creatediff(basectx, ctx):
996 """create a Differential Diff"""
996 """create a Differential Diff"""
997 repo = ctx.repo()
997 repo = ctx.repo()
998 repophid = getrepophid(repo)
998 repophid = getrepophid(repo)
999 # Create a "Differential Diff" via "differential.creatediff" API
999 # Create a "Differential Diff" via "differential.creatediff" API
1000 pdiff = phabdiff(
1000 pdiff = phabdiff(
1001 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1001 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1002 branch=b'%s' % ctx.branch(),
1002 branch=b'%s' % ctx.branch(),
1003 )
1003 )
1004 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1004 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1005 # addadded will remove moved files from removed, so addremoved won't get
1005 # addadded will remove moved files from removed, so addremoved won't get
1006 # them
1006 # them
1007 addadded(pdiff, basectx, ctx, added, removed)
1007 addadded(pdiff, basectx, ctx, added, removed)
1008 addmodified(pdiff, basectx, ctx, modified)
1008 addmodified(pdiff, basectx, ctx, modified)
1009 addremoved(pdiff, basectx, ctx, removed)
1009 addremoved(pdiff, basectx, ctx, removed)
1010 if repophid:
1010 if repophid:
1011 pdiff.repositoryPHID = repophid
1011 pdiff.repositoryPHID = repophid
1012 diff = callconduit(
1012 diff = callconduit(
1013 repo.ui,
1013 repo.ui,
1014 b'differential.creatediff',
1014 b'differential.creatediff',
1015 pycompat.byteskwargs(attr.asdict(pdiff)),
1015 pycompat.byteskwargs(attr.asdict(pdiff)),
1016 )
1016 )
1017 if not diff:
1017 if not diff:
1018 if basectx != ctx:
1018 if basectx != ctx:
1019 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1019 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1020 else:
1020 else:
1021 msg = _(b'cannot create diff for %s') % ctx
1021 msg = _(b'cannot create diff for %s') % ctx
1022 raise error.Abort(msg)
1022 raise error.Abort(msg)
1023 return diff
1023 return diff
1024
1024
1025
1025
1026 def writediffproperties(ctxs, diff):
1026 def writediffproperties(ctxs, diff):
1027 """write metadata to diff so patches could be applied losslessly
1027 """write metadata to diff so patches could be applied losslessly
1028
1028
1029 ``ctxs`` is the list of commits that created the diff, in ascending order.
1029 ``ctxs`` is the list of commits that created the diff, in ascending order.
1030 The list is generally a single commit, but may be several when using
1030 The list is generally a single commit, but may be several when using
1031 ``phabsend --fold``.
1031 ``phabsend --fold``.
1032 """
1032 """
1033 # creatediff returns with a diffid but query returns with an id
1033 # creatediff returns with a diffid but query returns with an id
1034 diffid = diff.get(b'diffid', diff.get(b'id'))
1034 diffid = diff.get(b'diffid', diff.get(b'id'))
1035 basectx = ctxs[0]
1035 basectx = ctxs[0]
1036 tipctx = ctxs[-1]
1036 tipctx = ctxs[-1]
1037
1037
1038 params = {
1038 params = {
1039 b'diff_id': diffid,
1039 b'diff_id': diffid,
1040 b'name': b'hg:meta',
1040 b'name': b'hg:meta',
1041 b'data': templatefilters.json(
1041 b'data': templatefilters.json(
1042 {
1042 {
1043 b'user': tipctx.user(),
1043 b'user': tipctx.user(),
1044 b'date': b'%d %d' % tipctx.date(),
1044 b'date': b'%d %d' % tipctx.date(),
1045 b'branch': tipctx.branch(),
1045 b'branch': tipctx.branch(),
1046 b'node': tipctx.hex(),
1046 b'node': tipctx.hex(),
1047 b'parent': basectx.p1().hex(),
1047 b'parent': basectx.p1().hex(),
1048 }
1048 }
1049 ),
1049 ),
1050 }
1050 }
1051 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1051 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1052
1052
1053 commits = {}
1053 commits = {}
1054 for ctx in ctxs:
1054 for ctx in ctxs:
1055 commits[ctx.hex()] = {
1055 commits[ctx.hex()] = {
1056 b'author': stringutil.person(ctx.user()),
1056 b'author': stringutil.person(ctx.user()),
1057 b'authorEmail': stringutil.email(ctx.user()),
1057 b'authorEmail': stringutil.email(ctx.user()),
1058 b'time': int(ctx.date()[0]),
1058 b'time': int(ctx.date()[0]),
1059 b'commit': ctx.hex(),
1059 b'commit': ctx.hex(),
1060 b'parents': [ctx.p1().hex()],
1060 b'parents': [ctx.p1().hex()],
1061 b'branch': ctx.branch(),
1061 b'branch': ctx.branch(),
1062 }
1062 }
1063 params = {
1063 params = {
1064 b'diff_id': diffid,
1064 b'diff_id': diffid,
1065 b'name': b'local:commits',
1065 b'name': b'local:commits',
1066 b'data': templatefilters.json(commits),
1066 b'data': templatefilters.json(commits),
1067 }
1067 }
1068 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1068 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1069
1069
1070
1070
1071 def createdifferentialrevision(
1071 def createdifferentialrevision(
1072 ctxs,
1072 ctxs,
1073 revid=None,
1073 revid=None,
1074 parentrevphid=None,
1074 parentrevphid=None,
1075 oldbasenode=None,
1075 oldbasenode=None,
1076 oldnode=None,
1076 oldnode=None,
1077 olddiff=None,
1077 olddiff=None,
1078 actions=None,
1078 actions=None,
1079 comment=None,
1079 comment=None,
1080 ):
1080 ):
1081 """create or update a Differential Revision
1081 """create or update a Differential Revision
1082
1082
1083 If revid is None, create a new Differential Revision, otherwise update
1083 If revid is None, create a new Differential Revision, otherwise update
1084 revid. If parentrevphid is not None, set it as a dependency.
1084 revid. If parentrevphid is not None, set it as a dependency.
1085
1085
1086 If there is a single commit for the new Differential Revision, ``ctxs`` will
1086 If there is a single commit for the new Differential Revision, ``ctxs`` will
1087 be a list of that single context. Otherwise, it is a list that covers the
1087 be a list of that single context. Otherwise, it is a list that covers the
1088 range of changes for the differential, where ``ctxs[0]`` is the first change
1088 range of changes for the differential, where ``ctxs[0]`` is the first change
1089 to include and ``ctxs[-1]`` is the last.
1089 to include and ``ctxs[-1]`` is the last.
1090
1090
1091 If oldnode is not None, check if the patch content (without commit message
1091 If oldnode is not None, check if the patch content (without commit message
1092 and metadata) has changed before creating another diff. For a Revision with
1092 and metadata) has changed before creating another diff. For a Revision with
1093 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1093 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1094 Revision covering multiple commits, ``oldbasenode`` corresponds to
1094 Revision covering multiple commits, ``oldbasenode`` corresponds to
1095 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1095 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1096 corresponds to ``ctxs[-1]``.
1096 corresponds to ``ctxs[-1]``.
1097
1097
1098 If actions is not None, they will be appended to the transaction.
1098 If actions is not None, they will be appended to the transaction.
1099 """
1099 """
1100 ctx = ctxs[-1]
1100 ctx = ctxs[-1]
1101 basectx = ctxs[0]
1101 basectx = ctxs[0]
1102
1102
1103 repo = ctx.repo()
1103 repo = ctx.repo()
1104 if oldnode:
1104 if oldnode:
1105 diffopts = mdiff.diffopts(git=True, context=32767)
1105 diffopts = mdiff.diffopts(git=True, context=32767)
1106 unfi = repo.unfiltered()
1106 unfi = repo.unfiltered()
1107 oldctx = unfi[oldnode]
1107 oldctx = unfi[oldnode]
1108 oldbasectx = unfi[oldbasenode]
1108 oldbasectx = unfi[oldbasenode]
1109 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1109 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1110 oldbasectx, oldctx, diffopts
1110 oldbasectx, oldctx, diffopts
1111 )
1111 )
1112 else:
1112 else:
1113 neednewdiff = True
1113 neednewdiff = True
1114
1114
1115 transactions = []
1115 transactions = []
1116 if neednewdiff:
1116 if neednewdiff:
1117 diff = creatediff(basectx, ctx)
1117 diff = creatediff(basectx, ctx)
1118 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1118 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1119 if comment:
1119 if comment:
1120 transactions.append({b'type': b'comment', b'value': comment})
1120 transactions.append({b'type': b'comment', b'value': comment})
1121 else:
1121 else:
1122 # Even if we don't need to upload a new diff because the patch content
1122 # Even if we don't need to upload a new diff because the patch content
1123 # does not change. We might still need to update its metadata so
1123 # does not change. We might still need to update its metadata so
1124 # pushers could know the correct node metadata.
1124 # pushers could know the correct node metadata.
1125 assert olddiff
1125 assert olddiff
1126 diff = olddiff
1126 diff = olddiff
1127 writediffproperties(ctxs, diff)
1127 writediffproperties(ctxs, diff)
1128
1128
1129 # Set the parent Revision every time, so commit re-ordering is picked-up
1129 # Set the parent Revision every time, so commit re-ordering is picked-up
1130 if parentrevphid:
1130 if parentrevphid:
1131 transactions.append(
1131 transactions.append(
1132 {b'type': b'parents.set', b'value': [parentrevphid]}
1132 {b'type': b'parents.set', b'value': [parentrevphid]}
1133 )
1133 )
1134
1134
1135 if actions:
1135 if actions:
1136 transactions += actions
1136 transactions += actions
1137
1137
1138 # When folding multiple local commits into a single review, arcanist will
1138 # When folding multiple local commits into a single review, arcanist will
1139 # take the summary line of the first commit as the title, and then
1139 # take the summary line of the first commit as the title, and then
1140 # concatenate the rest of the remaining messages (including each of their
1140 # concatenate the rest of the remaining messages (including each of their
1141 # first lines) to the rest of the first commit message (each separated by
1141 # first lines) to the rest of the first commit message (each separated by
1142 # an empty line), and use that as the summary field. Do the same here.
1142 # an empty line), and use that as the summary field. Do the same here.
1143 # For commits with only a one line message, there is no summary field, as
1143 # For commits with only a one line message, there is no summary field, as
1144 # this gets assigned to the title.
1144 # this gets assigned to the title.
1145 fields = util.sortdict() # sorted for stable wire protocol in tests
1145 fields = util.sortdict() # sorted for stable wire protocol in tests
1146
1146
1147 for i, _ctx in enumerate(ctxs):
1147 for i, _ctx in enumerate(ctxs):
1148 # Parse commit message and update related fields.
1148 # Parse commit message and update related fields.
1149 desc = _ctx.description()
1149 desc = _ctx.description()
1150 info = callconduit(
1150 info = callconduit(
1151 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1151 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1152 )
1152 )
1153
1153
1154 for k in [b'title', b'summary', b'testPlan']:
1154 for k in [b'title', b'summary', b'testPlan']:
1155 v = info[b'fields'].get(k)
1155 v = info[b'fields'].get(k)
1156 if not v:
1156 if not v:
1157 continue
1157 continue
1158
1158
1159 if i == 0:
1159 if i == 0:
1160 # Title, summary and test plan (if present) are taken verbatim
1160 # Title, summary and test plan (if present) are taken verbatim
1161 # for the first commit.
1161 # for the first commit.
1162 fields[k] = v.rstrip()
1162 fields[k] = v.rstrip()
1163 continue
1163 continue
1164 elif k == b'title':
1164 elif k == b'title':
1165 # Add subsequent titles (i.e. the first line of the commit
1165 # Add subsequent titles (i.e. the first line of the commit
1166 # message) back to the summary.
1166 # message) back to the summary.
1167 k = b'summary'
1167 k = b'summary'
1168
1168
1169 # Append any current field to the existing composite field
1169 # Append any current field to the existing composite field
1170 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1170 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1171
1171
1172 for k, v in fields.items():
1172 for k, v in fields.items():
1173 transactions.append({b'type': k, b'value': v})
1173 transactions.append({b'type': k, b'value': v})
1174
1174
1175 params = {b'transactions': transactions}
1175 params = {b'transactions': transactions}
1176 if revid is not None:
1176 if revid is not None:
1177 # Update an existing Differential Revision
1177 # Update an existing Differential Revision
1178 params[b'objectIdentifier'] = revid
1178 params[b'objectIdentifier'] = revid
1179
1179
1180 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1180 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1181 if not revision:
1181 if not revision:
1182 if len(ctxs) == 1:
1182 if len(ctxs) == 1:
1183 msg = _(b'cannot create revision for %s') % ctx
1183 msg = _(b'cannot create revision for %s') % ctx
1184 else:
1184 else:
1185 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1185 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1186 raise error.Abort(msg)
1186 raise error.Abort(msg)
1187
1187
1188 return revision, diff
1188 return revision, diff
1189
1189
1190
1190
1191 def userphids(ui, names):
1191 def userphids(ui, names):
1192 """convert user names to PHIDs"""
1192 """convert user names to PHIDs"""
1193 names = [name.lower() for name in names]
1193 names = [name.lower() for name in names]
1194 query = {b'constraints': {b'usernames': names}}
1194 query = {b'constraints': {b'usernames': names}}
1195 result = callconduit(ui, b'user.search', query)
1195 result = callconduit(ui, b'user.search', query)
1196 # username not found is not an error of the API. So check if we have missed
1196 # username not found is not an error of the API. So check if we have missed
1197 # some names here.
1197 # some names here.
1198 data = result[b'data']
1198 data = result[b'data']
1199 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1199 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1200 unresolved = set(names) - resolved
1200 unresolved = set(names) - resolved
1201 if unresolved:
1201 if unresolved:
1202 raise error.Abort(
1202 raise error.Abort(
1203 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1203 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1204 )
1204 )
1205 return [entry[b'phid'] for entry in data]
1205 return [entry[b'phid'] for entry in data]
1206
1206
1207
1207
1208 def _print_phabsend_action(ui, ctx, newrevid, action):
1208 def _print_phabsend_action(ui, ctx, newrevid, action):
1209 """print the ``action`` that occurred when posting ``ctx`` for review
1209 """print the ``action`` that occurred when posting ``ctx`` for review
1210
1210
1211 This is a utility function for the sending phase of ``phabsend``, which
1211 This is a utility function for the sending phase of ``phabsend``, which
1212 makes it easier to show a status for all local commits with `--fold``.
1212 makes it easier to show a status for all local commits with `--fold``.
1213 """
1213 """
1214 actiondesc = ui.label(
1214 actiondesc = ui.label(
1215 {
1215 {
1216 b'created': _(b'created'),
1216 b'created': _(b'created'),
1217 b'skipped': _(b'skipped'),
1217 b'skipped': _(b'skipped'),
1218 b'updated': _(b'updated'),
1218 b'updated': _(b'updated'),
1219 }[action],
1219 }[action],
1220 b'phabricator.action.%s' % action,
1220 b'phabricator.action.%s' % action,
1221 )
1221 )
1222 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1222 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1223 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1223 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1224 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1224 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1225 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1225 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1226
1226
1227
1227
1228 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1228 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1229 """update the local commit list for the ``diff`` associated with ``drevid``
1229 """update the local commit list for the ``diff`` associated with ``drevid``
1230
1230
1231 This is a utility function for the amend phase of ``phabsend``, which
1231 This is a utility function for the amend phase of ``phabsend``, which
1232 converts failures to warning messages.
1232 converts failures to warning messages.
1233 """
1233 """
1234 _debug(
1234 _debug(
1235 unfi.ui,
1235 unfi.ui,
1236 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1236 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1237 )
1237 )
1238
1238
1239 try:
1239 try:
1240 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1240 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1241 except util.urlerr.urlerror:
1241 except util.urlerr.urlerror:
1242 # If it fails just warn and keep going, otherwise the DREV
1242 # If it fails just warn and keep going, otherwise the DREV
1243 # associations will be lost
1243 # associations will be lost
1244 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1244 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1245
1245
1246
1246
1247 @vcrcommand(
1247 @vcrcommand(
1248 b'phabsend',
1248 b'phabsend',
1249 [
1249 [
1250 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1250 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1251 (b'', b'amend', True, _(b'update commit messages')),
1251 (b'', b'amend', True, _(b'update commit messages')),
1252 (b'', b'reviewer', [], _(b'specify reviewers')),
1252 (b'', b'reviewer', [], _(b'specify reviewers')),
1253 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1253 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1254 (
1254 (
1255 b'm',
1255 b'm',
1256 b'comment',
1256 b'comment',
1257 b'',
1257 b'',
1258 _(b'add a comment to Revisions with new/updated Diffs'),
1258 _(b'add a comment to Revisions with new/updated Diffs'),
1259 ),
1259 ),
1260 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1260 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1261 (b'', b'fold', False, _(b'combine the revisions into one review')),
1261 (b'', b'fold', False, _(b'combine the revisions into one review')),
1262 ],
1262 ],
1263 _(b'REV [OPTIONS]'),
1263 _(b'REV [OPTIONS]'),
1264 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1264 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1265 )
1265 )
1266 def phabsend(ui, repo, *revs, **opts):
1266 def phabsend(ui, repo, *revs, **opts):
1267 """upload changesets to Phabricator
1267 """upload changesets to Phabricator
1268
1268
1269 If there are multiple revisions specified, they will be send as a stack
1269 If there are multiple revisions specified, they will be send as a stack
1270 with a linear dependencies relationship using the order specified by the
1270 with a linear dependencies relationship using the order specified by the
1271 revset.
1271 revset.
1272
1272
1273 For the first time uploading changesets, local tags will be created to
1273 For the first time uploading changesets, local tags will be created to
1274 maintain the association. After the first time, phabsend will check
1274 maintain the association. After the first time, phabsend will check
1275 obsstore and tags information so it can figure out whether to update an
1275 obsstore and tags information so it can figure out whether to update an
1276 existing Differential Revision, or create a new one.
1276 existing Differential Revision, or create a new one.
1277
1277
1278 If --amend is set, update commit messages so they have the
1278 If --amend is set, update commit messages so they have the
1279 ``Differential Revision`` URL, remove related tags. This is similar to what
1279 ``Differential Revision`` URL, remove related tags. This is similar to what
1280 arcanist will do, and is more desired in author-push workflows. Otherwise,
1280 arcanist will do, and is more desired in author-push workflows. Otherwise,
1281 use local tags to record the ``Differential Revision`` association.
1281 use local tags to record the ``Differential Revision`` association.
1282
1282
1283 The --confirm option lets you confirm changesets before sending them. You
1283 The --confirm option lets you confirm changesets before sending them. You
1284 can also add following to your configuration file to make it default
1284 can also add following to your configuration file to make it default
1285 behaviour::
1285 behaviour::
1286
1286
1287 [phabsend]
1287 [phabsend]
1288 confirm = true
1288 confirm = true
1289
1289
1290 By default, a separate review will be created for each commit that is
1290 By default, a separate review will be created for each commit that is
1291 selected, and will have the same parent/child relationship in Phabricator.
1291 selected, and will have the same parent/child relationship in Phabricator.
1292 If ``--fold`` is set, multiple commits are rolled up into a single review
1292 If ``--fold`` is set, multiple commits are rolled up into a single review
1293 as if diffed from the parent of the first revision to the last. The commit
1293 as if diffed from the parent of the first revision to the last. The commit
1294 messages are concatenated in the summary field on Phabricator.
1294 messages are concatenated in the summary field on Phabricator.
1295
1295
1296 phabsend will check obsstore and the above association to decide whether to
1296 phabsend will check obsstore and the above association to decide whether to
1297 update an existing Differential Revision, or create a new one.
1297 update an existing Differential Revision, or create a new one.
1298 """
1298 """
1299 opts = pycompat.byteskwargs(opts)
1299 opts = pycompat.byteskwargs(opts)
1300 revs = list(revs) + opts.get(b'rev', [])
1300 revs = list(revs) + opts.get(b'rev', [])
1301 revs = scmutil.revrange(repo, revs)
1301 revs = scmutil.revrange(repo, revs)
1302 revs.sort() # ascending order to preserve topological parent/child in phab
1302 revs.sort() # ascending order to preserve topological parent/child in phab
1303
1303
1304 if not revs:
1304 if not revs:
1305 raise error.Abort(_(b'phabsend requires at least one changeset'))
1305 raise error.Abort(_(b'phabsend requires at least one changeset'))
1306 if opts.get(b'amend'):
1306 if opts.get(b'amend'):
1307 cmdutil.checkunfinished(repo)
1307 cmdutil.checkunfinished(repo)
1308
1308
1309 ctxs = [repo[rev] for rev in revs]
1309 ctxs = [repo[rev] for rev in revs]
1310
1310
1311 if any(c for c in ctxs if c.obsolete()):
1311 if any(c for c in ctxs if c.obsolete()):
1312 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1312 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1313
1313
1314 # Ensure the local commits are an unbroken range. The semantics of the
1314 # Ensure the local commits are an unbroken range. The semantics of the
1315 # --fold option implies this, and the auto restacking of orphans requires
1315 # --fold option implies this, and the auto restacking of orphans requires
1316 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1316 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1317 # get A' as a parent.
1317 # get A' as a parent.
1318 def _fail_nonlinear_revs(revs, revtype):
1318 def _fail_nonlinear_revs(revs, revtype):
1319 badnodes = [repo[r].node() for r in revs]
1319 badnodes = [repo[r].node() for r in revs]
1320 raise error.Abort(
1320 raise error.Abort(
1321 _(b"cannot phabsend multiple %s revisions: %s")
1321 _(b"cannot phabsend multiple %s revisions: %s")
1322 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1322 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1323 hint=_(b"the revisions must form a linear chain"),
1323 hint=_(b"the revisions must form a linear chain"),
1324 )
1324 )
1325
1325
1326 heads = repo.revs(b'heads(%ld)', revs)
1326 heads = repo.revs(b'heads(%ld)', revs)
1327 if len(heads) > 1:
1327 if len(heads) > 1:
1328 _fail_nonlinear_revs(heads, b"head")
1328 _fail_nonlinear_revs(heads, b"head")
1329
1329
1330 roots = repo.revs(b'roots(%ld)', revs)
1330 roots = repo.revs(b'roots(%ld)', revs)
1331 if len(roots) > 1:
1331 if len(roots) > 1:
1332 _fail_nonlinear_revs(roots, b"root")
1332 _fail_nonlinear_revs(roots, b"root")
1333
1333
1334 fold = opts.get(b'fold')
1334 fold = opts.get(b'fold')
1335 if fold:
1335 if fold:
1336 if len(revs) == 1:
1336 if len(revs) == 1:
1337 # TODO: just switch to --no-fold instead?
1337 # TODO: just switch to --no-fold instead?
1338 raise error.Abort(_(b"cannot fold a single revision"))
1338 raise error.Abort(_(b"cannot fold a single revision"))
1339
1339
1340 # There's no clear way to manage multiple commits with a Dxxx tag, so
1340 # There's no clear way to manage multiple commits with a Dxxx tag, so
1341 # require the amend option. (We could append "_nnn", but then it
1341 # require the amend option. (We could append "_nnn", but then it
1342 # becomes jumbled if earlier commits are added to an update.) It should
1342 # becomes jumbled if earlier commits are added to an update.) It should
1343 # lock the repo and ensure that the range is editable, but that would
1343 # lock the repo and ensure that the range is editable, but that would
1344 # make the code pretty convoluted. The default behavior of `arc` is to
1344 # make the code pretty convoluted. The default behavior of `arc` is to
1345 # create a new review anyway.
1345 # create a new review anyway.
1346 if not opts.get(b"amend"):
1346 if not opts.get(b"amend"):
1347 raise error.Abort(_(b"cannot fold with --no-amend"))
1347 raise error.Abort(_(b"cannot fold with --no-amend"))
1348
1348
1349 # It might be possible to bucketize the revisions by the DREV value, and
1349 # It might be possible to bucketize the revisions by the DREV value, and
1350 # iterate over those groups when posting, and then again when amending.
1350 # iterate over those groups when posting, and then again when amending.
1351 # But for simplicity, require all selected revisions to be for the same
1351 # But for simplicity, require all selected revisions to be for the same
1352 # DREV (if present). Adding local revisions to an existing DREV is
1352 # DREV (if present). Adding local revisions to an existing DREV is
1353 # acceptable.
1353 # acceptable.
1354 drevmatchers = [
1354 drevmatchers = [
1355 _differentialrevisiondescre.search(ctx.description())
1355 _differentialrevisiondescre.search(ctx.description())
1356 for ctx in ctxs
1356 for ctx in ctxs
1357 ]
1357 ]
1358 if len({m.group('url') for m in drevmatchers if m}) > 1:
1358 if len({m.group('url') for m in drevmatchers if m}) > 1:
1359 raise error.Abort(
1359 raise error.Abort(
1360 _(b"cannot fold revisions with different DREV values")
1360 _(b"cannot fold revisions with different DREV values")
1361 )
1361 )
1362
1362
1363 # {newnode: (oldnode, olddiff, olddrev}
1363 # {newnode: (oldnode, olddiff, olddrev}
1364 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1364 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1365
1365
1366 confirm = ui.configbool(b'phabsend', b'confirm')
1366 confirm = ui.configbool(b'phabsend', b'confirm')
1367 confirm |= bool(opts.get(b'confirm'))
1367 confirm |= bool(opts.get(b'confirm'))
1368 if confirm:
1368 if confirm:
1369 confirmed = _confirmbeforesend(repo, revs, oldmap)
1369 confirmed = _confirmbeforesend(repo, revs, oldmap)
1370 if not confirmed:
1370 if not confirmed:
1371 raise error.Abort(_(b'phabsend cancelled'))
1371 raise error.Abort(_(b'phabsend cancelled'))
1372
1372
1373 actions = []
1373 actions = []
1374 reviewers = opts.get(b'reviewer', [])
1374 reviewers = opts.get(b'reviewer', [])
1375 blockers = opts.get(b'blocker', [])
1375 blockers = opts.get(b'blocker', [])
1376 phids = []
1376 phids = []
1377 if reviewers:
1377 if reviewers:
1378 phids.extend(userphids(repo.ui, reviewers))
1378 phids.extend(userphids(repo.ui, reviewers))
1379 if blockers:
1379 if blockers:
1380 phids.extend(
1380 phids.extend(
1381 map(
1381 map(
1382 lambda phid: b'blocking(%s)' % phid,
1382 lambda phid: b'blocking(%s)' % phid,
1383 userphids(repo.ui, blockers),
1383 userphids(repo.ui, blockers),
1384 )
1384 )
1385 )
1385 )
1386 if phids:
1386 if phids:
1387 actions.append({b'type': b'reviewers.add', b'value': phids})
1387 actions.append({b'type': b'reviewers.add', b'value': phids})
1388
1388
1389 drevids = [] # [int]
1389 drevids = [] # [int]
1390 diffmap = {} # {newnode: diff}
1390 diffmap = {} # {newnode: diff}
1391
1391
1392 # Send patches one by one so we know their Differential Revision PHIDs and
1392 # Send patches one by one so we know their Differential Revision PHIDs and
1393 # can provide dependency relationship
1393 # can provide dependency relationship
1394 lastrevphid = None
1394 lastrevphid = None
1395 for ctx in ctxs:
1395 for ctx in ctxs:
1396 if fold:
1396 if fold:
1397 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1397 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1398 else:
1398 else:
1399 ui.debug(b'sending rev %d\n' % ctx.rev())
1399 ui.debug(b'sending rev %d\n' % ctx.rev())
1400
1400
1401 # Get Differential Revision ID
1401 # Get Differential Revision ID
1402 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1402 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1403 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1403 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1404
1404
1405 if fold:
1405 if fold:
1406 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1406 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1407 ctxs[-1].node(), (None, None, None)
1407 ctxs[-1].node(), (None, None, None)
1408 )
1408 )
1409
1409
1410 if oldnode != ctx.node() or opts.get(b'amend'):
1410 if oldnode != ctx.node() or opts.get(b'amend'):
1411 # Create or update Differential Revision
1411 # Create or update Differential Revision
1412 revision, diff = createdifferentialrevision(
1412 revision, diff = createdifferentialrevision(
1413 ctxs if fold else [ctx],
1413 ctxs if fold else [ctx],
1414 revid,
1414 revid,
1415 lastrevphid,
1415 lastrevphid,
1416 oldbasenode,
1416 oldbasenode,
1417 oldnode,
1417 oldnode,
1418 olddiff,
1418 olddiff,
1419 actions,
1419 actions,
1420 opts.get(b'comment'),
1420 opts.get(b'comment'),
1421 )
1421 )
1422
1422
1423 if fold:
1423 if fold:
1424 for ctx in ctxs:
1424 for ctx in ctxs:
1425 diffmap[ctx.node()] = diff
1425 diffmap[ctx.node()] = diff
1426 else:
1426 else:
1427 diffmap[ctx.node()] = diff
1427 diffmap[ctx.node()] = diff
1428
1428
1429 newrevid = int(revision[b'object'][b'id'])
1429 newrevid = int(revision[b'object'][b'id'])
1430 newrevphid = revision[b'object'][b'phid']
1430 newrevphid = revision[b'object'][b'phid']
1431 if revid:
1431 if revid:
1432 action = b'updated'
1432 action = b'updated'
1433 else:
1433 else:
1434 action = b'created'
1434 action = b'created'
1435
1435
1436 # Create a local tag to note the association, if commit message
1436 # Create a local tag to note the association, if commit message
1437 # does not have it already
1437 # does not have it already
1438 if not fold:
1438 if not fold:
1439 m = _differentialrevisiondescre.search(ctx.description())
1439 m = _differentialrevisiondescre.search(ctx.description())
1440 if not m or int(m.group('id')) != newrevid:
1440 if not m or int(m.group('id')) != newrevid:
1441 tagname = b'D%d' % newrevid
1441 tagname = b'D%d' % newrevid
1442 tags.tag(
1442 tags.tag(
1443 repo,
1443 repo,
1444 tagname,
1444 tagname,
1445 ctx.node(),
1445 ctx.node(),
1446 message=None,
1446 message=None,
1447 user=None,
1447 user=None,
1448 date=None,
1448 date=None,
1449 local=True,
1449 local=True,
1450 )
1450 )
1451 else:
1451 else:
1452 # Nothing changed. But still set "newrevphid" so the next revision
1452 # Nothing changed. But still set "newrevphid" so the next revision
1453 # could depend on this one and "newrevid" for the summary line.
1453 # could depend on this one and "newrevid" for the summary line.
1454 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1454 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1455 newrevid = revid
1455 newrevid = revid
1456 action = b'skipped'
1456 action = b'skipped'
1457
1457
1458 drevids.append(newrevid)
1458 drevids.append(newrevid)
1459 lastrevphid = newrevphid
1459 lastrevphid = newrevphid
1460
1460
1461 if fold:
1461 if fold:
1462 for c in ctxs:
1462 for c in ctxs:
1463 if oldmap.get(c.node(), (None, None, None))[2]:
1463 if oldmap.get(c.node(), (None, None, None))[2]:
1464 action = b'updated'
1464 action = b'updated'
1465 else:
1465 else:
1466 action = b'created'
1466 action = b'created'
1467 _print_phabsend_action(ui, c, newrevid, action)
1467 _print_phabsend_action(ui, c, newrevid, action)
1468 break
1468 break
1469
1469
1470 _print_phabsend_action(ui, ctx, newrevid, action)
1470 _print_phabsend_action(ui, ctx, newrevid, action)
1471
1471
1472 # Update commit messages and remove tags
1472 # Update commit messages and remove tags
1473 if opts.get(b'amend'):
1473 if opts.get(b'amend'):
1474 unfi = repo.unfiltered()
1474 unfi = repo.unfiltered()
1475 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1475 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1476 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1476 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1477 # Eagerly evaluate commits to restabilize before creating new
1477 # Eagerly evaluate commits to restabilize before creating new
1478 # commits. The selected revisions are excluded because they are
1478 # commits. The selected revisions are excluded because they are
1479 # automatically restacked as part of the submission process.
1479 # automatically restacked as part of the submission process.
1480 restack = [
1480 restack = [
1481 c
1481 c
1482 for c in repo.set(
1482 for c in repo.set(
1483 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1483 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1484 revs,
1484 revs,
1485 revs,
1485 revs,
1486 )
1486 )
1487 ]
1487 ]
1488 wnode = unfi[b'.'].node()
1488 wnode = unfi[b'.'].node()
1489 mapping = {} # {oldnode: [newnode]}
1489 mapping = {} # {oldnode: [newnode]}
1490 newnodes = []
1490 newnodes = []
1491
1491
1492 drevid = drevids[0]
1492 drevid = drevids[0]
1493
1493
1494 for i, rev in enumerate(revs):
1494 for i, rev in enumerate(revs):
1495 old = unfi[rev]
1495 old = unfi[rev]
1496 if not fold:
1496 if not fold:
1497 drevid = drevids[i]
1497 drevid = drevids[i]
1498 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1498 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1499
1499
1500 newdesc = get_amended_desc(drev, old, fold)
1500 newdesc = get_amended_desc(drev, old, fold)
1501 # Make sure commit message contain "Differential Revision"
1501 # Make sure commit message contain "Differential Revision"
1502 if (
1502 if (
1503 old.description() != newdesc
1503 old.description() != newdesc
1504 or old.p1().node() in mapping
1504 or old.p1().node() in mapping
1505 or old.p2().node() in mapping
1505 or old.p2().node() in mapping
1506 ):
1506 ):
1507 if old.phase() == phases.public:
1507 if old.phase() == phases.public:
1508 ui.warn(
1508 ui.warn(
1509 _(b"warning: not updating public commit %s\n")
1509 _(b"warning: not updating public commit %s\n")
1510 % scmutil.formatchangeid(old)
1510 % scmutil.formatchangeid(old)
1511 )
1511 )
1512 continue
1512 continue
1513 parents = [
1513 parents = [
1514 mapping.get(old.p1().node(), (old.p1(),))[0],
1514 mapping.get(old.p1().node(), (old.p1(),))[0],
1515 mapping.get(old.p2().node(), (old.p2(),))[0],
1515 mapping.get(old.p2().node(), (old.p2(),))[0],
1516 ]
1516 ]
1517 newdesc = rewriteutil.update_hash_refs(
1517 newdesc = rewriteutil.update_hash_refs(
1518 repo, newdesc, mapping,
1518 repo, newdesc, mapping,
1519 )
1519 )
1520 new = context.metadataonlyctx(
1520 new = context.metadataonlyctx(
1521 repo,
1521 repo,
1522 old,
1522 old,
1523 parents=parents,
1523 parents=parents,
1524 text=newdesc,
1524 text=newdesc,
1525 user=old.user(),
1525 user=old.user(),
1526 date=old.date(),
1526 date=old.date(),
1527 extra=old.extra(),
1527 extra=old.extra(),
1528 )
1528 )
1529
1529
1530 newnode = new.commit()
1530 newnode = new.commit()
1531
1531
1532 mapping[old.node()] = [newnode]
1532 mapping[old.node()] = [newnode]
1533
1533
1534 if fold:
1534 if fold:
1535 # Defer updating the (single) Diff until all nodes are
1535 # Defer updating the (single) Diff until all nodes are
1536 # collected. No tags were created, so none need to be
1536 # collected. No tags were created, so none need to be
1537 # removed.
1537 # removed.
1538 newnodes.append(newnode)
1538 newnodes.append(newnode)
1539 continue
1539 continue
1540
1540
1541 _amend_diff_properties(
1541 _amend_diff_properties(
1542 unfi, drevid, [newnode], diffmap[old.node()]
1542 unfi, drevid, [newnode], diffmap[old.node()]
1543 )
1543 )
1544
1544
1545 # Remove local tags since it's no longer necessary
1545 # Remove local tags since it's no longer necessary
1546 tagname = b'D%d' % drevid
1546 tagname = b'D%d' % drevid
1547 if tagname in repo.tags():
1547 if tagname in repo.tags():
1548 tags.tag(
1548 tags.tag(
1549 repo,
1549 repo,
1550 tagname,
1550 tagname,
1551 nullid,
1551 nullid,
1552 message=None,
1552 message=None,
1553 user=None,
1553 user=None,
1554 date=None,
1554 date=None,
1555 local=True,
1555 local=True,
1556 )
1556 )
1557 elif fold:
1557 elif fold:
1558 # When folding multiple commits into one review with
1558 # When folding multiple commits into one review with
1559 # --fold, track even the commits that weren't amended, so
1559 # --fold, track even the commits that weren't amended, so
1560 # that their association isn't lost if the properties are
1560 # that their association isn't lost if the properties are
1561 # rewritten below.
1561 # rewritten below.
1562 newnodes.append(old.node())
1562 newnodes.append(old.node())
1563
1563
1564 # If the submitted commits are public, no amend takes place so
1564 # If the submitted commits are public, no amend takes place so
1565 # there are no newnodes and therefore no diff update to do.
1565 # there are no newnodes and therefore no diff update to do.
1566 if fold and newnodes:
1566 if fold and newnodes:
1567 diff = diffmap[old.node()]
1567 diff = diffmap[old.node()]
1568
1568
1569 # The diff object in diffmap doesn't have the local commits
1569 # The diff object in diffmap doesn't have the local commits
1570 # because that could be returned from differential.creatediff,
1570 # because that could be returned from differential.creatediff,
1571 # not differential.querydiffs. So use the queried diff (if
1571 # not differential.querydiffs. So use the queried diff (if
1572 # present), or force the amend (a new revision is being posted.)
1572 # present), or force the amend (a new revision is being posted.)
1573 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1573 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1574 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1574 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1575 _amend_diff_properties(unfi, drevid, newnodes, diff)
1575 _amend_diff_properties(unfi, drevid, newnodes, diff)
1576 else:
1576 else:
1577 _debug(
1577 _debug(
1578 ui,
1578 ui,
1579 b"local commit list for D%d is already up-to-date\n"
1579 b"local commit list for D%d is already up-to-date\n"
1580 % drevid,
1580 % drevid,
1581 )
1581 )
1582 elif fold:
1582 elif fold:
1583 _debug(ui, b"no newnodes to update\n")
1583 _debug(ui, b"no newnodes to update\n")
1584
1584
1585 # Restack any children of first-time submissions that were orphaned
1585 # Restack any children of first-time submissions that were orphaned
1586 # in the process. The ctx won't report that it is an orphan until
1586 # in the process. The ctx won't report that it is an orphan until
1587 # the cleanup takes place below.
1587 # the cleanup takes place below.
1588 for old in restack:
1588 for old in restack:
1589 parents = [
1589 parents = [
1590 mapping.get(old.p1().node(), (old.p1(),))[0],
1590 mapping.get(old.p1().node(), (old.p1(),))[0],
1591 mapping.get(old.p2().node(), (old.p2(),))[0],
1591 mapping.get(old.p2().node(), (old.p2(),))[0],
1592 ]
1592 ]
1593 new = context.metadataonlyctx(
1593 new = context.metadataonlyctx(
1594 repo,
1594 repo,
1595 old,
1595 old,
1596 parents=parents,
1596 parents=parents,
1597 text=rewriteutil.update_hash_refs(
1597 text=rewriteutil.update_hash_refs(
1598 repo, old.description(), mapping
1598 repo, old.description(), mapping
1599 ),
1599 ),
1600 user=old.user(),
1600 user=old.user(),
1601 date=old.date(),
1601 date=old.date(),
1602 extra=old.extra(),
1602 extra=old.extra(),
1603 )
1603 )
1604
1604
1605 newnode = new.commit()
1605 newnode = new.commit()
1606
1606
1607 # Don't obsolete unselected descendants of nodes that have not
1607 # Don't obsolete unselected descendants of nodes that have not
1608 # been changed in this transaction- that results in an error.
1608 # been changed in this transaction- that results in an error.
1609 if newnode != old.node():
1609 if newnode != old.node():
1610 mapping[old.node()] = [newnode]
1610 mapping[old.node()] = [newnode]
1611 _debug(
1611 _debug(
1612 ui,
1612 ui,
1613 b"restabilizing %s as %s\n"
1613 b"restabilizing %s as %s\n"
1614 % (short(old.node()), short(newnode)),
1614 % (short(old.node()), short(newnode)),
1615 )
1615 )
1616 else:
1616 else:
1617 _debug(
1617 _debug(
1618 ui,
1618 ui,
1619 b"not restabilizing unchanged %s\n" % short(old.node()),
1619 b"not restabilizing unchanged %s\n" % short(old.node()),
1620 )
1620 )
1621
1621
1622 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1622 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1623 if wnode in mapping:
1623 if wnode in mapping:
1624 unfi.setparents(mapping[wnode][0])
1624 unfi.setparents(mapping[wnode][0])
1625
1625
1626
1626
1627 # Map from "hg:meta" keys to header understood by "hg import". The order is
1627 # Map from "hg:meta" keys to header understood by "hg import". The order is
1628 # consistent with "hg export" output.
1628 # consistent with "hg export" output.
1629 _metanamemap = util.sortdict(
1629 _metanamemap = util.sortdict(
1630 [
1630 [
1631 (b'user', b'User'),
1631 (b'user', b'User'),
1632 (b'date', b'Date'),
1632 (b'date', b'Date'),
1633 (b'branch', b'Branch'),
1633 (b'branch', b'Branch'),
1634 (b'node', b'Node ID'),
1634 (b'node', b'Node ID'),
1635 (b'parent', b'Parent '),
1635 (b'parent', b'Parent '),
1636 ]
1636 ]
1637 )
1637 )
1638
1638
1639
1639
1640 def _confirmbeforesend(repo, revs, oldmap):
1640 def _confirmbeforesend(repo, revs, oldmap):
1641 url, token = readurltoken(repo.ui)
1641 url, token = readurltoken(repo.ui)
1642 ui = repo.ui
1642 ui = repo.ui
1643 for rev in revs:
1643 for rev in revs:
1644 ctx = repo[rev]
1644 ctx = repo[rev]
1645 desc = ctx.description().splitlines()[0]
1645 desc = ctx.description().splitlines()[0]
1646 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1646 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1647 if drevid:
1647 if drevid:
1648 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1648 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1649 else:
1649 else:
1650 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1650 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1651
1651
1652 ui.write(
1652 ui.write(
1653 _(b'%s - %s: %s\n')
1653 _(b'%s - %s: %s\n')
1654 % (
1654 % (
1655 drevdesc,
1655 drevdesc,
1656 ui.label(bytes(ctx), b'phabricator.node'),
1656 ui.label(bytes(ctx), b'phabricator.node'),
1657 ui.label(desc, b'phabricator.desc'),
1657 ui.label(desc, b'phabricator.desc'),
1658 )
1658 )
1659 )
1659 )
1660
1660
1661 if ui.promptchoice(
1661 if ui.promptchoice(
1662 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1662 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1663 ):
1663 ):
1664 return False
1664 return False
1665
1665
1666 return True
1666 return True
1667
1667
1668
1668
1669 _knownstatusnames = {
1669 _knownstatusnames = {
1670 b'accepted',
1670 b'accepted',
1671 b'needsreview',
1671 b'needsreview',
1672 b'needsrevision',
1672 b'needsrevision',
1673 b'closed',
1673 b'closed',
1674 b'abandoned',
1674 b'abandoned',
1675 b'changesplanned',
1675 b'changesplanned',
1676 }
1676 }
1677
1677
1678
1678
1679 def _getstatusname(drev):
1679 def _getstatusname(drev):
1680 """get normalized status name from a Differential Revision"""
1680 """get normalized status name from a Differential Revision"""
1681 return drev[b'statusName'].replace(b' ', b'').lower()
1681 return drev[b'statusName'].replace(b' ', b'').lower()
1682
1682
1683
1683
1684 # Small language to specify differential revisions. Support symbols: (), :X,
1684 # Small language to specify differential revisions. Support symbols: (), :X,
1685 # +, and -.
1685 # +, and -.
1686
1686
1687 _elements = {
1687 _elements = {
1688 # token-type: binding-strength, primary, prefix, infix, suffix
1688 # token-type: binding-strength, primary, prefix, infix, suffix
1689 b'(': (12, None, (b'group', 1, b')'), None, None),
1689 b'(': (12, None, (b'group', 1, b')'), None, None),
1690 b':': (8, None, (b'ancestors', 8), None, None),
1690 b':': (8, None, (b'ancestors', 8), None, None),
1691 b'&': (5, None, None, (b'and_', 5), None),
1691 b'&': (5, None, None, (b'and_', 5), None),
1692 b'+': (4, None, None, (b'add', 4), None),
1692 b'+': (4, None, None, (b'add', 4), None),
1693 b'-': (4, None, None, (b'sub', 4), None),
1693 b'-': (4, None, None, (b'sub', 4), None),
1694 b')': (0, None, None, None, None),
1694 b')': (0, None, None, None, None),
1695 b'symbol': (0, b'symbol', None, None, None),
1695 b'symbol': (0, b'symbol', None, None, None),
1696 b'end': (0, None, None, None, None),
1696 b'end': (0, None, None, None, None),
1697 }
1697 }
1698
1698
1699
1699
1700 def _tokenize(text):
1700 def _tokenize(text):
1701 view = memoryview(text) # zero-copy slice
1701 view = memoryview(text) # zero-copy slice
1702 special = b'():+-& '
1702 special = b'():+-& '
1703 pos = 0
1703 pos = 0
1704 length = len(text)
1704 length = len(text)
1705 while pos < length:
1705 while pos < length:
1706 symbol = b''.join(
1706 symbol = b''.join(
1707 itertools.takewhile(
1707 itertools.takewhile(
1708 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1708 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1709 )
1709 )
1710 )
1710 )
1711 if symbol:
1711 if symbol:
1712 yield (b'symbol', symbol, pos)
1712 yield (b'symbol', symbol, pos)
1713 pos += len(symbol)
1713 pos += len(symbol)
1714 else: # special char, ignore space
1714 else: # special char, ignore space
1715 if text[pos : pos + 1] != b' ':
1715 if text[pos : pos + 1] != b' ':
1716 yield (text[pos : pos + 1], None, pos)
1716 yield (text[pos : pos + 1], None, pos)
1717 pos += 1
1717 pos += 1
1718 yield (b'end', None, pos)
1718 yield (b'end', None, pos)
1719
1719
1720
1720
1721 def _parse(text):
1721 def _parse(text):
1722 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1722 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1723 if pos != len(text):
1723 if pos != len(text):
1724 raise error.ParseError(b'invalid token', pos)
1724 raise error.ParseError(b'invalid token', pos)
1725 return tree
1725 return tree
1726
1726
1727
1727
1728 def _parsedrev(symbol):
1728 def _parsedrev(symbol):
1729 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1729 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1730 if symbol.startswith(b'D') and symbol[1:].isdigit():
1730 if symbol.startswith(b'D') and symbol[1:].isdigit():
1731 return int(symbol[1:])
1731 return int(symbol[1:])
1732 if symbol.isdigit():
1732 if symbol.isdigit():
1733 return int(symbol)
1733 return int(symbol)
1734
1734
1735
1735
1736 def _prefetchdrevs(tree):
1736 def _prefetchdrevs(tree):
1737 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1737 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1738 drevs = set()
1738 drevs = set()
1739 ancestordrevs = set()
1739 ancestordrevs = set()
1740 op = tree[0]
1740 op = tree[0]
1741 if op == b'symbol':
1741 if op == b'symbol':
1742 r = _parsedrev(tree[1])
1742 r = _parsedrev(tree[1])
1743 if r:
1743 if r:
1744 drevs.add(r)
1744 drevs.add(r)
1745 elif op == b'ancestors':
1745 elif op == b'ancestors':
1746 r, a = _prefetchdrevs(tree[1])
1746 r, a = _prefetchdrevs(tree[1])
1747 drevs.update(r)
1747 drevs.update(r)
1748 ancestordrevs.update(r)
1748 ancestordrevs.update(r)
1749 ancestordrevs.update(a)
1749 ancestordrevs.update(a)
1750 else:
1750 else:
1751 for t in tree[1:]:
1751 for t in tree[1:]:
1752 r, a = _prefetchdrevs(t)
1752 r, a = _prefetchdrevs(t)
1753 drevs.update(r)
1753 drevs.update(r)
1754 ancestordrevs.update(a)
1754 ancestordrevs.update(a)
1755 return drevs, ancestordrevs
1755 return drevs, ancestordrevs
1756
1756
1757
1757
1758 def querydrev(ui, spec):
1758 def querydrev(ui, spec):
1759 """return a list of "Differential Revision" dicts
1759 """return a list of "Differential Revision" dicts
1760
1760
1761 spec is a string using a simple query language, see docstring in phabread
1761 spec is a string using a simple query language, see docstring in phabread
1762 for details.
1762 for details.
1763
1763
1764 A "Differential Revision dict" looks like:
1764 A "Differential Revision dict" looks like:
1765
1765
1766 {
1766 {
1767 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1767 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1768 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1768 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1769 "auxiliary": {
1769 "auxiliary": {
1770 "phabricator:depends-on": [
1770 "phabricator:depends-on": [
1771 "PHID-DREV-gbapp366kutjebt7agcd"
1771 "PHID-DREV-gbapp366kutjebt7agcd"
1772 ]
1772 ]
1773 "phabricator:projects": [],
1773 "phabricator:projects": [],
1774 },
1774 },
1775 "branch": "default",
1775 "branch": "default",
1776 "ccs": [],
1776 "ccs": [],
1777 "commits": [],
1777 "commits": [],
1778 "dateCreated": "1499181406",
1778 "dateCreated": "1499181406",
1779 "dateModified": "1499182103",
1779 "dateModified": "1499182103",
1780 "diffs": [
1780 "diffs": [
1781 "3",
1781 "3",
1782 "4",
1782 "4",
1783 ],
1783 ],
1784 "hashes": [],
1784 "hashes": [],
1785 "id": "2",
1785 "id": "2",
1786 "lineCount": "2",
1786 "lineCount": "2",
1787 "phid": "PHID-DREV-672qvysjcczopag46qty",
1787 "phid": "PHID-DREV-672qvysjcczopag46qty",
1788 "properties": {},
1788 "properties": {},
1789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1790 "reviewers": [],
1790 "reviewers": [],
1791 "sourcePath": null
1791 "sourcePath": null
1792 "status": "0",
1792 "status": "0",
1793 "statusName": "Needs Review",
1793 "statusName": "Needs Review",
1794 "summary": "",
1794 "summary": "",
1795 "testPlan": "",
1795 "testPlan": "",
1796 "title": "example",
1796 "title": "example",
1797 "uri": "https://phab.example.com/D2",
1797 "uri": "https://phab.example.com/D2",
1798 }
1798 }
1799 """
1799 """
1800 # TODO: replace differential.query and differential.querydiffs with
1800 # TODO: replace differential.query and differential.querydiffs with
1801 # differential.diff.search because the former (and their output) are
1801 # differential.diff.search because the former (and their output) are
1802 # frozen, and planned to be deprecated and removed.
1802 # frozen, and planned to be deprecated and removed.
1803
1803
1804 def fetch(params):
1804 def fetch(params):
1805 """params -> single drev or None"""
1805 """params -> single drev or None"""
1806 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1806 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1807 if key in prefetched:
1807 if key in prefetched:
1808 return prefetched[key]
1808 return prefetched[key]
1809 drevs = callconduit(ui, b'differential.query', params)
1809 drevs = callconduit(ui, b'differential.query', params)
1810 # Fill prefetched with the result
1810 # Fill prefetched with the result
1811 for drev in drevs:
1811 for drev in drevs:
1812 prefetched[drev[b'phid']] = drev
1812 prefetched[drev[b'phid']] = drev
1813 prefetched[int(drev[b'id'])] = drev
1813 prefetched[int(drev[b'id'])] = drev
1814 if key not in prefetched:
1814 if key not in prefetched:
1815 raise error.Abort(
1815 raise error.Abort(
1816 _(b'cannot get Differential Revision %r') % params
1816 _(b'cannot get Differential Revision %r') % params
1817 )
1817 )
1818 return prefetched[key]
1818 return prefetched[key]
1819
1819
1820 def getstack(topdrevids):
1820 def getstack(topdrevids):
1821 """given a top, get a stack from the bottom, [id] -> [id]"""
1821 """given a top, get a stack from the bottom, [id] -> [id]"""
1822 visited = set()
1822 visited = set()
1823 result = []
1823 result = []
1824 queue = [{b'ids': [i]} for i in topdrevids]
1824 queue = [{b'ids': [i]} for i in topdrevids]
1825 while queue:
1825 while queue:
1826 params = queue.pop()
1826 params = queue.pop()
1827 drev = fetch(params)
1827 drev = fetch(params)
1828 if drev[b'id'] in visited:
1828 if drev[b'id'] in visited:
1829 continue
1829 continue
1830 visited.add(drev[b'id'])
1830 visited.add(drev[b'id'])
1831 result.append(int(drev[b'id']))
1831 result.append(int(drev[b'id']))
1832 auxiliary = drev.get(b'auxiliary', {})
1832 auxiliary = drev.get(b'auxiliary', {})
1833 depends = auxiliary.get(b'phabricator:depends-on', [])
1833 depends = auxiliary.get(b'phabricator:depends-on', [])
1834 for phid in depends:
1834 for phid in depends:
1835 queue.append({b'phids': [phid]})
1835 queue.append({b'phids': [phid]})
1836 result.reverse()
1836 result.reverse()
1837 return smartset.baseset(result)
1837 return smartset.baseset(result)
1838
1838
1839 # Initialize prefetch cache
1839 # Initialize prefetch cache
1840 prefetched = {} # {id or phid: drev}
1840 prefetched = {} # {id or phid: drev}
1841
1841
1842 tree = _parse(spec)
1842 tree = _parse(spec)
1843 drevs, ancestordrevs = _prefetchdrevs(tree)
1843 drevs, ancestordrevs = _prefetchdrevs(tree)
1844
1844
1845 # developer config: phabricator.batchsize
1845 # developer config: phabricator.batchsize
1846 batchsize = ui.configint(b'phabricator', b'batchsize')
1846 batchsize = ui.configint(b'phabricator', b'batchsize')
1847
1847
1848 # Prefetch Differential Revisions in batch
1848 # Prefetch Differential Revisions in batch
1849 tofetch = set(drevs)
1849 tofetch = set(drevs)
1850 for r in ancestordrevs:
1850 for r in ancestordrevs:
1851 tofetch.update(range(max(1, r - batchsize), r + 1))
1851 tofetch.update(range(max(1, r - batchsize), r + 1))
1852 if drevs:
1852 if drevs:
1853 fetch({b'ids': list(tofetch)})
1853 fetch({b'ids': list(tofetch)})
1854 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1854 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1855
1855
1856 # Walk through the tree, return smartsets
1856 # Walk through the tree, return smartsets
1857 def walk(tree):
1857 def walk(tree):
1858 op = tree[0]
1858 op = tree[0]
1859 if op == b'symbol':
1859 if op == b'symbol':
1860 drev = _parsedrev(tree[1])
1860 drev = _parsedrev(tree[1])
1861 if drev:
1861 if drev:
1862 return smartset.baseset([drev])
1862 return smartset.baseset([drev])
1863 elif tree[1] in _knownstatusnames:
1863 elif tree[1] in _knownstatusnames:
1864 drevs = [
1864 drevs = [
1865 r
1865 r
1866 for r in validids
1866 for r in validids
1867 if _getstatusname(prefetched[r]) == tree[1]
1867 if _getstatusname(prefetched[r]) == tree[1]
1868 ]
1868 ]
1869 return smartset.baseset(drevs)
1869 return smartset.baseset(drevs)
1870 else:
1870 else:
1871 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1871 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1872 elif op in {b'and_', b'add', b'sub'}:
1872 elif op in {b'and_', b'add', b'sub'}:
1873 assert len(tree) == 3
1873 assert len(tree) == 3
1874 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1874 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1875 elif op == b'group':
1875 elif op == b'group':
1876 return walk(tree[1])
1876 return walk(tree[1])
1877 elif op == b'ancestors':
1877 elif op == b'ancestors':
1878 return getstack(walk(tree[1]))
1878 return getstack(walk(tree[1]))
1879 else:
1879 else:
1880 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1880 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1881
1881
1882 return [prefetched[r] for r in walk(tree)]
1882 return [prefetched[r] for r in walk(tree)]
1883
1883
1884
1884
1885 def getdescfromdrev(drev):
1885 def getdescfromdrev(drev):
1886 """get description (commit message) from "Differential Revision"
1886 """get description (commit message) from "Differential Revision"
1887
1887
1888 This is similar to differential.getcommitmessage API. But we only care
1888 This is similar to differential.getcommitmessage API. But we only care
1889 about limited fields: title, summary, test plan, and URL.
1889 about limited fields: title, summary, test plan, and URL.
1890 """
1890 """
1891 title = drev[b'title']
1891 title = drev[b'title']
1892 summary = drev[b'summary'].rstrip()
1892 summary = drev[b'summary'].rstrip()
1893 testplan = drev[b'testPlan'].rstrip()
1893 testplan = drev[b'testPlan'].rstrip()
1894 if testplan:
1894 if testplan:
1895 testplan = b'Test Plan:\n%s' % testplan
1895 testplan = b'Test Plan:\n%s' % testplan
1896 uri = b'Differential Revision: %s' % drev[b'uri']
1896 uri = b'Differential Revision: %s' % drev[b'uri']
1897 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1897 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1898
1898
1899
1899
1900 def get_amended_desc(drev, ctx, folded):
1900 def get_amended_desc(drev, ctx, folded):
1901 """similar to ``getdescfromdrev``, but supports a folded series of commits
1901 """similar to ``getdescfromdrev``, but supports a folded series of commits
1902
1902
1903 This is used when determining if an individual commit needs to have its
1903 This is used when determining if an individual commit needs to have its
1904 message amended after posting it for review. The determination is made for
1904 message amended after posting it for review. The determination is made for
1905 each individual commit, even when they were folded into one review.
1905 each individual commit, even when they were folded into one review.
1906 """
1906 """
1907 if not folded:
1907 if not folded:
1908 return getdescfromdrev(drev)
1908 return getdescfromdrev(drev)
1909
1909
1910 uri = b'Differential Revision: %s' % drev[b'uri']
1910 uri = b'Differential Revision: %s' % drev[b'uri']
1911
1911
1912 # Since the commit messages were combined when posting multiple commits
1912 # Since the commit messages were combined when posting multiple commits
1913 # with --fold, the fields can't be read from Phabricator here, or *all*
1913 # with --fold, the fields can't be read from Phabricator here, or *all*
1914 # affected local revisions will end up with the same commit message after
1914 # affected local revisions will end up with the same commit message after
1915 # the URI is amended in. Append in the DREV line, or update it if it
1915 # the URI is amended in. Append in the DREV line, or update it if it
1916 # exists. At worst, this means commit message or test plan updates on
1916 # exists. At worst, this means commit message or test plan updates on
1917 # Phabricator aren't propagated back to the repository, but that seems
1917 # Phabricator aren't propagated back to the repository, but that seems
1918 # reasonable for the case where local commits are effectively combined
1918 # reasonable for the case where local commits are effectively combined
1919 # in Phabricator.
1919 # in Phabricator.
1920 m = _differentialrevisiondescre.search(ctx.description())
1920 m = _differentialrevisiondescre.search(ctx.description())
1921 if not m:
1921 if not m:
1922 return b'\n\n'.join([ctx.description(), uri])
1922 return b'\n\n'.join([ctx.description(), uri])
1923
1923
1924 return _differentialrevisiondescre.sub(uri, ctx.description())
1924 return _differentialrevisiondescre.sub(uri, ctx.description())
1925
1925
1926
1926
1927 def getlocalcommits(diff):
1927 def getlocalcommits(diff):
1928 """get the set of local commits from a diff object
1928 """get the set of local commits from a diff object
1929
1929
1930 See ``getdiffmeta()`` for an example diff object.
1930 See ``getdiffmeta()`` for an example diff object.
1931 """
1931 """
1932 props = diff.get(b'properties') or {}
1932 props = diff.get(b'properties') or {}
1933 commits = props.get(b'local:commits') or {}
1933 commits = props.get(b'local:commits') or {}
1934 if len(commits) > 1:
1934 if len(commits) > 1:
1935 return {bin(c) for c in commits.keys()}
1935 return {bin(c) for c in commits.keys()}
1936
1936
1937 # Storing the diff metadata predates storing `local:commits`, so continue
1937 # Storing the diff metadata predates storing `local:commits`, so continue
1938 # to use that in the --no-fold case.
1938 # to use that in the --no-fold case.
1939 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1939 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1940
1940
1941
1941
1942 def getdiffmeta(diff):
1942 def getdiffmeta(diff):
1943 """get commit metadata (date, node, user, p1) from a diff object
1943 """get commit metadata (date, node, user, p1) from a diff object
1944
1944
1945 The metadata could be "hg:meta", sent by phabsend, like:
1945 The metadata could be "hg:meta", sent by phabsend, like:
1946
1946
1947 "properties": {
1947 "properties": {
1948 "hg:meta": {
1948 "hg:meta": {
1949 "branch": "default",
1949 "branch": "default",
1950 "date": "1499571514 25200",
1950 "date": "1499571514 25200",
1951 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1951 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1952 "user": "Foo Bar <foo@example.com>",
1952 "user": "Foo Bar <foo@example.com>",
1953 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1953 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1954 }
1954 }
1955 }
1955 }
1956
1956
1957 Or converted from "local:commits", sent by "arc", like:
1957 Or converted from "local:commits", sent by "arc", like:
1958
1958
1959 "properties": {
1959 "properties": {
1960 "local:commits": {
1960 "local:commits": {
1961 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1961 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1962 "author": "Foo Bar",
1962 "author": "Foo Bar",
1963 "authorEmail": "foo@example.com"
1963 "authorEmail": "foo@example.com"
1964 "branch": "default",
1964 "branch": "default",
1965 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1965 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1966 "local": "1000",
1966 "local": "1000",
1967 "message": "...",
1967 "message": "...",
1968 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1968 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1969 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1969 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1970 "summary": "...",
1970 "summary": "...",
1971 "tag": "",
1971 "tag": "",
1972 "time": 1499546314,
1972 "time": 1499546314,
1973 }
1973 }
1974 }
1974 }
1975 }
1975 }
1976
1976
1977 Note: metadata extracted from "local:commits" will lose time zone
1977 Note: metadata extracted from "local:commits" will lose time zone
1978 information.
1978 information.
1979 """
1979 """
1980 props = diff.get(b'properties') or {}
1980 props = diff.get(b'properties') or {}
1981 meta = props.get(b'hg:meta')
1981 meta = props.get(b'hg:meta')
1982 if not meta:
1982 if not meta:
1983 if props.get(b'local:commits'):
1983 if props.get(b'local:commits'):
1984 commit = sorted(props[b'local:commits'].values())[0]
1984 commit = sorted(props[b'local:commits'].values())[0]
1985 meta = {}
1985 meta = {}
1986 if b'author' in commit and b'authorEmail' in commit:
1986 if b'author' in commit and b'authorEmail' in commit:
1987 meta[b'user'] = b'%s <%s>' % (
1987 meta[b'user'] = b'%s <%s>' % (
1988 commit[b'author'],
1988 commit[b'author'],
1989 commit[b'authorEmail'],
1989 commit[b'authorEmail'],
1990 )
1990 )
1991 if b'time' in commit:
1991 if b'time' in commit:
1992 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1992 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1993 if b'branch' in commit:
1993 if b'branch' in commit:
1994 meta[b'branch'] = commit[b'branch']
1994 meta[b'branch'] = commit[b'branch']
1995 node = commit.get(b'commit', commit.get(b'rev'))
1995 node = commit.get(b'commit', commit.get(b'rev'))
1996 if node:
1996 if node:
1997 meta[b'node'] = node
1997 meta[b'node'] = node
1998 if len(commit.get(b'parents', ())) >= 1:
1998 if len(commit.get(b'parents', ())) >= 1:
1999 meta[b'parent'] = commit[b'parents'][0]
1999 meta[b'parent'] = commit[b'parents'][0]
2000 else:
2000 else:
2001 meta = {}
2001 meta = {}
2002 if b'date' not in meta and b'dateCreated' in diff:
2002 if b'date' not in meta and b'dateCreated' in diff:
2003 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2003 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2004 if b'branch' not in meta and diff.get(b'branch'):
2004 if b'branch' not in meta and diff.get(b'branch'):
2005 meta[b'branch'] = diff[b'branch']
2005 meta[b'branch'] = diff[b'branch']
2006 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2006 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2007 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2007 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2008 return meta
2008 return meta
2009
2009
2010
2010
2011 def _getdrevs(ui, stack, specs):
2011 def _getdrevs(ui, stack, specs):
2012 """convert user supplied DREVSPECs into "Differential Revision" dicts
2012 """convert user supplied DREVSPECs into "Differential Revision" dicts
2013
2013
2014 See ``hg help phabread`` for how to specify each DREVSPEC.
2014 See ``hg help phabread`` for how to specify each DREVSPEC.
2015 """
2015 """
2016 if len(specs) > 0:
2016 if len(specs) > 0:
2017
2017
2018 def _formatspec(s):
2018 def _formatspec(s):
2019 if stack:
2019 if stack:
2020 s = b':(%s)' % s
2020 s = b':(%s)' % s
2021 return b'(%s)' % s
2021 return b'(%s)' % s
2022
2022
2023 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2023 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2024
2024
2025 drevs = querydrev(ui, spec)
2025 drevs = querydrev(ui, spec)
2026 if drevs:
2026 if drevs:
2027 return drevs
2027 return drevs
2028
2028
2029 raise error.Abort(_(b"empty DREVSPEC set"))
2029 raise error.Abort(_(b"empty DREVSPEC set"))
2030
2030
2031
2031
2032 def readpatch(ui, drevs, write):
2032 def readpatch(ui, drevs, write):
2033 """generate plain-text patch readable by 'hg import'
2033 """generate plain-text patch readable by 'hg import'
2034
2034
2035 write takes a list of (DREV, bytes), where DREV is the differential number
2035 write takes a list of (DREV, bytes), where DREV is the differential number
2036 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2036 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2037 to be imported. drevs is what "querydrev" returns, results of
2037 to be imported. drevs is what "querydrev" returns, results of
2038 "differential.query".
2038 "differential.query".
2039 """
2039 """
2040 # Prefetch hg:meta property for all diffs
2040 # Prefetch hg:meta property for all diffs
2041 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2041 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2042 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2042 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2043
2043
2044 patches = []
2044 patches = []
2045
2045
2046 # Generate patch for each drev
2046 # Generate patch for each drev
2047 for drev in drevs:
2047 for drev in drevs:
2048 ui.note(_(b'reading D%s\n') % drev[b'id'])
2048 ui.note(_(b'reading D%s\n') % drev[b'id'])
2049
2049
2050 diffid = max(int(v) for v in drev[b'diffs'])
2050 diffid = max(int(v) for v in drev[b'diffs'])
2051 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2051 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2052 desc = getdescfromdrev(drev)
2052 desc = getdescfromdrev(drev)
2053 header = b'# HG changeset patch\n'
2053 header = b'# HG changeset patch\n'
2054
2054
2055 # Try to preserve metadata from hg:meta property. Write hg patch
2055 # Try to preserve metadata from hg:meta property. Write hg patch
2056 # headers that can be read by the "import" command. See patchheadermap
2056 # headers that can be read by the "import" command. See patchheadermap
2057 # and extract in mercurial/patch.py for supported headers.
2057 # and extract in mercurial/patch.py for supported headers.
2058 meta = getdiffmeta(diffs[b'%d' % diffid])
2058 meta = getdiffmeta(diffs[b'%d' % diffid])
2059 for k in _metanamemap.keys():
2059 for k in _metanamemap.keys():
2060 if k in meta:
2060 if k in meta:
2061 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2061 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2062
2062
2063 content = b'%s%s\n%s' % (header, desc, body)
2063 content = b'%s%s\n%s' % (header, desc, body)
2064 patches.append((drev[b'id'], content))
2064 patches.append((drev[b'id'], content))
2065
2065
2066 # Write patches to the supplied callback
2066 # Write patches to the supplied callback
2067 write(patches)
2067 write(patches)
2068
2068
2069
2069
2070 @vcrcommand(
2070 @vcrcommand(
2071 b'phabread',
2071 b'phabread',
2072 [(b'', b'stack', False, _(b'read dependencies'))],
2072 [(b'', b'stack', False, _(b'read dependencies'))],
2073 _(b'DREVSPEC... [OPTIONS]'),
2073 _(b'DREVSPEC... [OPTIONS]'),
2074 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2074 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2075 optionalrepo=True,
2075 optionalrepo=True,
2076 )
2076 )
2077 def phabread(ui, repo, *specs, **opts):
2077 def phabread(ui, repo, *specs, **opts):
2078 """print patches from Phabricator suitable for importing
2078 """print patches from Phabricator suitable for importing
2079
2079
2080 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2080 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2081 the number ``123``. It could also have common operators like ``+``, ``-``,
2081 the number ``123``. It could also have common operators like ``+``, ``-``,
2082 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2082 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2083 select a stack. If multiple DREVSPEC values are given, the result is the
2083 select a stack. If multiple DREVSPEC values are given, the result is the
2084 union of each individually evaluated value. No attempt is currently made
2084 union of each individually evaluated value. No attempt is currently made
2085 to reorder the values to run from parent to child.
2085 to reorder the values to run from parent to child.
2086
2086
2087 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2087 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2088 could be used to filter patches by status. For performance reason, they
2088 could be used to filter patches by status. For performance reason, they
2089 only represent a subset of non-status selections and cannot be used alone.
2089 only represent a subset of non-status selections and cannot be used alone.
2090
2090
2091 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2091 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2092 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2092 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2093 stack up to D9.
2093 stack up to D9.
2094
2094
2095 If --stack is given, follow dependencies information and read all patches.
2095 If --stack is given, follow dependencies information and read all patches.
2096 It is equivalent to the ``:`` operator.
2096 It is equivalent to the ``:`` operator.
2097 """
2097 """
2098 opts = pycompat.byteskwargs(opts)
2098 opts = pycompat.byteskwargs(opts)
2099 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2099 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2100
2100
2101 def _write(patches):
2101 def _write(patches):
2102 for drev, content in patches:
2102 for drev, content in patches:
2103 ui.write(content)
2103 ui.write(content)
2104
2104
2105 readpatch(ui, drevs, _write)
2105 readpatch(ui, drevs, _write)
2106
2106
2107
2107
2108 @vcrcommand(
2108 @vcrcommand(
2109 b'phabimport',
2109 b'phabimport',
2110 [(b'', b'stack', False, _(b'import dependencies as well'))],
2110 [(b'', b'stack', False, _(b'import dependencies as well'))],
2111 _(b'DREVSPEC... [OPTIONS]'),
2111 _(b'DREVSPEC... [OPTIONS]'),
2112 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2112 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2113 )
2113 )
2114 def phabimport(ui, repo, *specs, **opts):
2114 def phabimport(ui, repo, *specs, **opts):
2115 """import patches from Phabricator for the specified Differential Revisions
2115 """import patches from Phabricator for the specified Differential Revisions
2116
2116
2117 The patches are read and applied starting at the parent of the working
2117 The patches are read and applied starting at the parent of the working
2118 directory.
2118 directory.
2119
2119
2120 See ``hg help phabread`` for how to specify DREVSPEC.
2120 See ``hg help phabread`` for how to specify DREVSPEC.
2121 """
2121 """
2122 opts = pycompat.byteskwargs(opts)
2122 opts = pycompat.byteskwargs(opts)
2123
2123
2124 # --bypass avoids losing exec and symlink bits when importing on Windows,
2124 # --bypass avoids losing exec and symlink bits when importing on Windows,
2125 # and allows importing with a dirty wdir. It also aborts instead of leaving
2125 # and allows importing with a dirty wdir. It also aborts instead of leaving
2126 # rejects.
2126 # rejects.
2127 opts[b'bypass'] = True
2127 opts[b'bypass'] = True
2128
2128
2129 # Mandatory default values, synced with commands.import
2129 # Mandatory default values, synced with commands.import
2130 opts[b'strip'] = 1
2130 opts[b'strip'] = 1
2131 opts[b'prefix'] = b''
2131 opts[b'prefix'] = b''
2132 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2132 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2133 opts[b'obsolete'] = False
2133 opts[b'obsolete'] = False
2134
2134
2135 if ui.configbool(b'phabimport', b'secret'):
2135 if ui.configbool(b'phabimport', b'secret'):
2136 opts[b'secret'] = True
2136 opts[b'secret'] = True
2137 if ui.configbool(b'phabimport', b'obsolete'):
2137 if ui.configbool(b'phabimport', b'obsolete'):
2138 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2138 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2139
2139
2140 def _write(patches):
2140 def _write(patches):
2141 parents = repo[None].parents()
2141 parents = repo[None].parents()
2142
2142
2143 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2143 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2144 for drev, contents in patches:
2144 for drev, contents in patches:
2145 ui.status(_(b'applying patch from D%s\n') % drev)
2145 ui.status(_(b'applying patch from D%s\n') % drev)
2146
2146
2147 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2147 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2148 msg, node, rej = cmdutil.tryimportone(
2148 msg, node, rej = cmdutil.tryimportone(
2149 ui,
2149 ui,
2150 repo,
2150 repo,
2151 patchdata,
2151 patchdata,
2152 parents,
2152 parents,
2153 opts,
2153 opts,
2154 [],
2154 [],
2155 None, # Never update wdir to another revision
2155 None, # Never update wdir to another revision
2156 )
2156 )
2157
2157
2158 if not node:
2158 if not node:
2159 raise error.Abort(_(b'D%s: no diffs found') % drev)
2159 raise error.Abort(_(b'D%s: no diffs found') % drev)
2160
2160
2161 ui.note(msg + b'\n')
2161 ui.note(msg + b'\n')
2162 parents = [repo[node]]
2162 parents = [repo[node]]
2163
2163
2164 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2164 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2165
2165
2166 readpatch(repo.ui, drevs, _write)
2166 readpatch(repo.ui, drevs, _write)
2167
2167
2168
2168
2169 @vcrcommand(
2169 @vcrcommand(
2170 b'phabupdate',
2170 b'phabupdate',
2171 [
2171 [
2172 (b'', b'accept', False, _(b'accept revisions')),
2172 (b'', b'accept', False, _(b'accept revisions')),
2173 (b'', b'reject', False, _(b'reject revisions')),
2173 (b'', b'reject', False, _(b'reject revisions')),
2174 (b'', b'request-review', False, _(b'request review on revisions')),
2174 (b'', b'request-review', False, _(b'request review on revisions')),
2175 (b'', b'abandon', False, _(b'abandon revisions')),
2175 (b'', b'abandon', False, _(b'abandon revisions')),
2176 (b'', b'reclaim', False, _(b'reclaim revisions')),
2176 (b'', b'reclaim', False, _(b'reclaim revisions')),
2177 (b'', b'close', False, _(b'close revisions')),
2177 (b'', b'close', False, _(b'close revisions')),
2178 (b'', b'reopen', False, _(b'reopen revisions')),
2178 (b'', b'reopen', False, _(b'reopen revisions')),
2179 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2179 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2180 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2180 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2181 (b'', b'commandeer', False, _(b'commandeer revisions')),
2181 (b'', b'commandeer', False, _(b'commandeer revisions')),
2182 (b'm', b'comment', b'', _(b'comment on the last revision')),
2182 (b'm', b'comment', b'', _(b'comment on the last revision')),
2183 ],
2183 ],
2184 _(b'DREVSPEC... [OPTIONS]'),
2184 _(b'DREVSPEC... [OPTIONS]'),
2185 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2185 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2186 optionalrepo=True,
2186 optionalrepo=True,
2187 )
2187 )
2188 def phabupdate(ui, repo, *specs, **opts):
2188 def phabupdate(ui, repo, *specs, **opts):
2189 """update Differential Revision in batch
2189 """update Differential Revision in batch
2190
2190
2191 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2191 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2192 """
2192 """
2193 opts = pycompat.byteskwargs(opts)
2193 opts = pycompat.byteskwargs(opts)
2194 transactions = [
2194 transactions = [
2195 b'abandon',
2195 b'abandon',
2196 b'accept',
2196 b'accept',
2197 b'close',
2197 b'close',
2198 b'commandeer',
2198 b'commandeer',
2199 b'plan-changes',
2199 b'plan-changes',
2200 b'reclaim',
2200 b'reclaim',
2201 b'reject',
2201 b'reject',
2202 b'reopen',
2202 b'reopen',
2203 b'request-review',
2203 b'request-review',
2204 b'resign',
2204 b'resign',
2205 ]
2205 ]
2206 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2206 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2207 if len(flags) > 1:
2207 if len(flags) > 1:
2208 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2208 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2209
2209
2210 actions = []
2210 actions = []
2211 for f in flags:
2211 for f in flags:
2212 actions.append({b'type': f, b'value': True})
2212 actions.append({b'type': f, b'value': True})
2213
2213
2214 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2214 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2215 for i, drev in enumerate(drevs):
2215 for i, drev in enumerate(drevs):
2216 if i + 1 == len(drevs) and opts.get(b'comment'):
2216 if i + 1 == len(drevs) and opts.get(b'comment'):
2217 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2217 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2218 if actions:
2218 if actions:
2219 params = {
2219 params = {
2220 b'objectIdentifier': drev[b'phid'],
2220 b'objectIdentifier': drev[b'phid'],
2221 b'transactions': actions,
2221 b'transactions': actions,
2222 }
2222 }
2223 callconduit(ui, b'differential.revision.edit', params)
2223 callconduit(ui, b'differential.revision.edit', params)
2224
2224
2225
2225
2226 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2226 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2227 def template_review(context, mapping):
2227 def template_review(context, mapping):
2228 """:phabreview: Object describing the review for this changeset.
2228 """:phabreview: Object describing the review for this changeset.
2229 Has attributes `url` and `id`.
2229 Has attributes `url` and `id`.
2230 """
2230 """
2231 ctx = context.resource(mapping, b'ctx')
2231 ctx = context.resource(mapping, b'ctx')
2232 m = _differentialrevisiondescre.search(ctx.description())
2232 m = _differentialrevisiondescre.search(ctx.description())
2233 if m:
2233 if m:
2234 return templateutil.hybriddict(
2234 return templateutil.hybriddict(
2235 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2235 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2236 )
2236 )
2237 else:
2237 else:
2238 tags = ctx.repo().nodetags(ctx.node())
2238 tags = ctx.repo().nodetags(ctx.node())
2239 for t in tags:
2239 for t in tags:
2240 if _differentialrevisiontagre.match(t):
2240 if _differentialrevisiontagre.match(t):
2241 url = ctx.repo().ui.config(b'phabricator', b'url')
2241 url = ctx.repo().ui.config(b'phabricator', b'url')
2242 if not url.endswith(b'/'):
2242 if not url.endswith(b'/'):
2243 url += b'/'
2243 url += b'/'
2244 url += t
2244 url += t
2245
2245
2246 return templateutil.hybriddict({b'url': url, b'id': t,})
2246 return templateutil.hybriddict({b'url': url, b'id': t,})
2247 return None
2247 return None
2248
2248
2249
2249
2250 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2250 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2251 def template_status(context, mapping):
2251 def template_status(context, mapping):
2252 """:phabstatus: String. Status of Phabricator differential.
2252 """:phabstatus: String. Status of Phabricator differential.
2253 """
2253 """
2254 ctx = context.resource(mapping, b'ctx')
2254 ctx = context.resource(mapping, b'ctx')
2255 repo = context.resource(mapping, b'repo')
2255 repo = context.resource(mapping, b'repo')
2256 ui = context.resource(mapping, b'ui')
2256 ui = context.resource(mapping, b'ui')
2257
2257
2258 rev = ctx.rev()
2258 rev = ctx.rev()
2259 try:
2259 try:
2260 drevid = getdrevmap(repo, [rev])[rev]
2260 drevid = getdrevmap(repo, [rev])[rev]
2261 except KeyError:
2261 except KeyError:
2262 return None
2262 return None
2263 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2263 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2264 for drev in drevs:
2264 for drev in drevs:
2265 if int(drev[b'id']) == drevid:
2265 if int(drev[b'id']) == drevid:
2266 return templateutil.hybriddict(
2266 return templateutil.hybriddict(
2267 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2267 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2268 )
2268 )
2269 return None
2269 return None
2270
2270
2271
2271
2272 @show.showview(b'phabstatus', csettopic=b'work')
2272 @show.showview(b'phabstatus', csettopic=b'work')
2273 def phabstatusshowview(ui, repo, displayer):
2273 def phabstatusshowview(ui, repo, displayer):
2274 """Phabricator differiential status"""
2274 """Phabricator differiential status"""
2275 revs = repo.revs('sort(_underway(), topo)')
2275 revs = repo.revs('sort(_underway(), topo)')
2276 drevmap = getdrevmap(repo, revs)
2276 drevmap = getdrevmap(repo, revs)
2277 unknownrevs, drevids, revsbydrevid = [], set(), {}
2277 unknownrevs, drevids, revsbydrevid = [], set(), {}
2278 for rev, drevid in pycompat.iteritems(drevmap):
2278 for rev, drevid in pycompat.iteritems(drevmap):
2279 if drevid is not None:
2279 if drevid is not None:
2280 drevids.add(drevid)
2280 drevids.add(drevid)
2281 revsbydrevid.setdefault(drevid, set()).add(rev)
2281 revsbydrevid.setdefault(drevid, set()).add(rev)
2282 else:
2282 else:
2283 unknownrevs.append(rev)
2283 unknownrevs.append(rev)
2284
2284
2285 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2285 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2286 drevsbyrev = {}
2286 drevsbyrev = {}
2287 for drev in drevs:
2287 for drev in drevs:
2288 for rev in revsbydrevid[int(drev[b'id'])]:
2288 for rev in revsbydrevid[int(drev[b'id'])]:
2289 drevsbyrev[rev] = drev
2289 drevsbyrev[rev] = drev
2290
2290
2291 def phabstatus(ctx):
2291 def phabstatus(ctx):
2292 drev = drevsbyrev[ctx.rev()]
2292 drev = drevsbyrev[ctx.rev()]
2293 status = ui.label(
2293 status = ui.label(
2294 b'%(statusName)s' % drev,
2294 b'%(statusName)s' % drev,
2295 b'phabricator.status.%s' % _getstatusname(drev),
2295 b'phabricator.status.%s' % _getstatusname(drev),
2296 )
2296 )
2297 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2297 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2298
2298
2299 revs -= smartset.baseset(unknownrevs)
2299 revs -= smartset.baseset(unknownrevs)
2300 revdag = graphmod.dagwalker(repo, revs)
2300 revdag = graphmod.dagwalker(repo, revs)
2301
2301
2302 ui.setconfig(b'experimental', b'graphshorten', True)
2302 ui.setconfig(b'experimental', b'graphshorten', True)
2303 displayer._exthook = phabstatus
2303 displayer._exthook = phabstatus
2304 nodelen = show.longestshortest(repo, revs)
2304 nodelen = show.longestshortest(repo, revs)
2305 logcmdutil.displaygraph(
2305 logcmdutil.displaygraph(
2306 ui,
2306 ui,
2307 repo,
2307 repo,
2308 revdag,
2308 revdag,
2309 displayer,
2309 displayer,
2310 graphmod.asciiedges,
2310 graphmod.asciiedges,
2311 props={b'nodelen': nodelen},
2311 props={b'nodelen': nodelen},
2312 )
2312 )
General Comments 0
You need to be logged in to leave comments. Login now