##// END OF EJS Templates
phabricator: color the status in the "phabstatus" view...
Matt Harbison -
r44310:b0867b77 default
parent child Browse files
Show More
@@ -1,1746 +1,1756 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 encoding,
64 encoding,
65 error,
65 error,
66 exthelper,
66 exthelper,
67 graphmod,
67 graphmod,
68 httpconnection as httpconnectionmod,
68 httpconnection as httpconnectionmod,
69 logcmdutil,
69 logcmdutil,
70 match,
70 match,
71 mdiff,
71 mdiff,
72 obsutil,
72 obsutil,
73 parser,
73 parser,
74 patch,
74 patch,
75 phases,
75 phases,
76 pycompat,
76 pycompat,
77 scmutil,
77 scmutil,
78 smartset,
78 smartset,
79 tags,
79 tags,
80 templatefilters,
80 templatefilters,
81 templateutil,
81 templateutil,
82 url as urlmod,
82 url as urlmod,
83 util,
83 util,
84 )
84 )
85 from mercurial.utils import (
85 from mercurial.utils import (
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89 from . import show
89 from . import show
90
90
91
91
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # be specifying the version(s) of Mercurial they are tested with, or
94 # be specifying the version(s) of Mercurial they are tested with, or
95 # leave the attribute unspecified.
95 # leave the attribute unspecified.
96 testedwith = b'ships-with-hg-core'
96 testedwith = b'ships-with-hg-core'
97
97
98 eh = exthelper.exthelper()
98 eh = exthelper.exthelper()
99
99
100 cmdtable = eh.cmdtable
100 cmdtable = eh.cmdtable
101 command = eh.command
101 command = eh.command
102 configtable = eh.configtable
102 configtable = eh.configtable
103 templatekeyword = eh.templatekeyword
103 templatekeyword = eh.templatekeyword
104
104
105 # developer config: phabricator.batchsize
105 # developer config: phabricator.batchsize
106 eh.configitem(
106 eh.configitem(
107 b'phabricator', b'batchsize', default=12,
107 b'phabricator', b'batchsize', default=12,
108 )
108 )
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'callsign', default=None,
110 b'phabricator', b'callsign', default=None,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'curlcmd', default=None,
113 b'phabricator', b'curlcmd', default=None,
114 )
114 )
115 # developer config: phabricator.repophid
115 # developer config: phabricator.repophid
116 eh.configitem(
116 eh.configitem(
117 b'phabricator', b'repophid', default=None,
117 b'phabricator', b'repophid', default=None,
118 )
118 )
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'url', default=None,
120 b'phabricator', b'url', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabsend', b'confirm', default=False,
123 b'phabsend', b'confirm', default=False,
124 )
124 )
125
125
126 colortable = {
126 colortable = {
127 b'phabricator.action.created': b'green',
127 b'phabricator.action.created': b'green',
128 b'phabricator.action.skipped': b'magenta',
128 b'phabricator.action.skipped': b'magenta',
129 b'phabricator.action.updated': b'magenta',
129 b'phabricator.action.updated': b'magenta',
130 b'phabricator.desc': b'',
130 b'phabricator.desc': b'',
131 b'phabricator.drev': b'bold',
131 b'phabricator.drev': b'bold',
132 b'phabricator.node': b'',
132 b'phabricator.node': b'',
133 b'phabricator.status.abandoned': b'magenta dim',
134 b'phabricator.status.accepted': b'green bold',
135 b'phabricator.status.closed': b'green',
136 b'phabricator.status.needsreview': b'yellow',
137 b'phabricator.status.needsrevision': b'red',
138 b'phabricator.status.changesplanned': b'red',
133 }
139 }
134
140
135 _VCR_FLAGS = [
141 _VCR_FLAGS = [
136 (
142 (
137 b'',
143 b'',
138 b'test-vcr',
144 b'test-vcr',
139 b'',
145 b'',
140 _(
146 _(
141 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
147 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
142 b', otherwise will mock all http requests using the specified vcr file.'
148 b', otherwise will mock all http requests using the specified vcr file.'
143 b' (ADVANCED)'
149 b' (ADVANCED)'
144 ),
150 ),
145 ),
151 ),
146 ]
152 ]
147
153
148
154
149 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
155 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
150 fullflags = flags + _VCR_FLAGS
156 fullflags = flags + _VCR_FLAGS
151
157
152 def hgmatcher(r1, r2):
158 def hgmatcher(r1, r2):
153 if r1.uri != r2.uri or r1.method != r2.method:
159 if r1.uri != r2.uri or r1.method != r2.method:
154 return False
160 return False
155 r1params = util.urlreq.parseqs(r1.body)
161 r1params = util.urlreq.parseqs(r1.body)
156 r2params = util.urlreq.parseqs(r2.body)
162 r2params = util.urlreq.parseqs(r2.body)
157 for key in r1params:
163 for key in r1params:
158 if key not in r2params:
164 if key not in r2params:
159 return False
165 return False
160 value = r1params[key][0]
166 value = r1params[key][0]
161 # we want to compare json payloads without worrying about ordering
167 # we want to compare json payloads without worrying about ordering
162 if value.startswith(b'{') and value.endswith(b'}'):
168 if value.startswith(b'{') and value.endswith(b'}'):
163 r1json = pycompat.json_loads(value)
169 r1json = pycompat.json_loads(value)
164 r2json = pycompat.json_loads(r2params[key][0])
170 r2json = pycompat.json_loads(r2params[key][0])
165 if r1json != r2json:
171 if r1json != r2json:
166 return False
172 return False
167 elif r2params[key][0] != value:
173 elif r2params[key][0] != value:
168 return False
174 return False
169 return True
175 return True
170
176
171 def sanitiserequest(request):
177 def sanitiserequest(request):
172 request.body = re.sub(
178 request.body = re.sub(
173 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
179 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
174 )
180 )
175 return request
181 return request
176
182
177 def sanitiseresponse(response):
183 def sanitiseresponse(response):
178 if 'set-cookie' in response['headers']:
184 if 'set-cookie' in response['headers']:
179 del response['headers']['set-cookie']
185 del response['headers']['set-cookie']
180 return response
186 return response
181
187
182 def decorate(fn):
188 def decorate(fn):
183 def inner(*args, **kwargs):
189 def inner(*args, **kwargs):
184 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
190 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
185 if cassette:
191 if cassette:
186 import hgdemandimport
192 import hgdemandimport
187
193
188 with hgdemandimport.deactivated():
194 with hgdemandimport.deactivated():
189 import vcr as vcrmod
195 import vcr as vcrmod
190 import vcr.stubs as stubs
196 import vcr.stubs as stubs
191
197
192 vcr = vcrmod.VCR(
198 vcr = vcrmod.VCR(
193 serializer='json',
199 serializer='json',
194 before_record_request=sanitiserequest,
200 before_record_request=sanitiserequest,
195 before_record_response=sanitiseresponse,
201 before_record_response=sanitiseresponse,
196 custom_patches=[
202 custom_patches=[
197 (
203 (
198 urlmod,
204 urlmod,
199 'httpconnection',
205 'httpconnection',
200 stubs.VCRHTTPConnection,
206 stubs.VCRHTTPConnection,
201 ),
207 ),
202 (
208 (
203 urlmod,
209 urlmod,
204 'httpsconnection',
210 'httpsconnection',
205 stubs.VCRHTTPSConnection,
211 stubs.VCRHTTPSConnection,
206 ),
212 ),
207 ],
213 ],
208 )
214 )
209 vcr.register_matcher('hgmatcher', hgmatcher)
215 vcr.register_matcher('hgmatcher', hgmatcher)
210 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
216 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
211 return fn(*args, **kwargs)
217 return fn(*args, **kwargs)
212 return fn(*args, **kwargs)
218 return fn(*args, **kwargs)
213
219
214 inner.__name__ = fn.__name__
220 inner.__name__ = fn.__name__
215 inner.__doc__ = fn.__doc__
221 inner.__doc__ = fn.__doc__
216 return command(
222 return command(
217 name,
223 name,
218 fullflags,
224 fullflags,
219 spec,
225 spec,
220 helpcategory=helpcategory,
226 helpcategory=helpcategory,
221 optionalrepo=optionalrepo,
227 optionalrepo=optionalrepo,
222 )(inner)
228 )(inner)
223
229
224 return decorate
230 return decorate
225
231
226
232
227 def urlencodenested(params):
233 def urlencodenested(params):
228 """like urlencode, but works with nested parameters.
234 """like urlencode, but works with nested parameters.
229
235
230 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
236 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
231 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
237 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
232 urlencode. Note: the encoding is consistent with PHP's http_build_query.
238 urlencode. Note: the encoding is consistent with PHP's http_build_query.
233 """
239 """
234 flatparams = util.sortdict()
240 flatparams = util.sortdict()
235
241
236 def process(prefix, obj):
242 def process(prefix, obj):
237 if isinstance(obj, bool):
243 if isinstance(obj, bool):
238 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
244 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
239 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
245 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
240 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
246 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
241 if items is None:
247 if items is None:
242 flatparams[prefix] = obj
248 flatparams[prefix] = obj
243 else:
249 else:
244 for k, v in items(obj):
250 for k, v in items(obj):
245 if prefix:
251 if prefix:
246 process(b'%s[%s]' % (prefix, k), v)
252 process(b'%s[%s]' % (prefix, k), v)
247 else:
253 else:
248 process(k, v)
254 process(k, v)
249
255
250 process(b'', params)
256 process(b'', params)
251 return util.urlreq.urlencode(flatparams)
257 return util.urlreq.urlencode(flatparams)
252
258
253
259
254 def readurltoken(ui):
260 def readurltoken(ui):
255 """return conduit url, token and make sure they exist
261 """return conduit url, token and make sure they exist
256
262
257 Currently read from [auth] config section. In the future, it might
263 Currently read from [auth] config section. In the future, it might
258 make sense to read from .arcconfig and .arcrc as well.
264 make sense to read from .arcconfig and .arcrc as well.
259 """
265 """
260 url = ui.config(b'phabricator', b'url')
266 url = ui.config(b'phabricator', b'url')
261 if not url:
267 if not url:
262 raise error.Abort(
268 raise error.Abort(
263 _(b'config %s.%s is required') % (b'phabricator', b'url')
269 _(b'config %s.%s is required') % (b'phabricator', b'url')
264 )
270 )
265
271
266 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
272 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
267 token = None
273 token = None
268
274
269 if res:
275 if res:
270 group, auth = res
276 group, auth = res
271
277
272 ui.debug(b"using auth.%s.* for authentication\n" % group)
278 ui.debug(b"using auth.%s.* for authentication\n" % group)
273
279
274 token = auth.get(b'phabtoken')
280 token = auth.get(b'phabtoken')
275
281
276 if not token:
282 if not token:
277 raise error.Abort(
283 raise error.Abort(
278 _(b'Can\'t find conduit token associated to %s') % (url,)
284 _(b'Can\'t find conduit token associated to %s') % (url,)
279 )
285 )
280
286
281 return url, token
287 return url, token
282
288
283
289
284 def callconduit(ui, name, params):
290 def callconduit(ui, name, params):
285 """call Conduit API, params is a dict. return json.loads result, or None"""
291 """call Conduit API, params is a dict. return json.loads result, or None"""
286 host, token = readurltoken(ui)
292 host, token = readurltoken(ui)
287 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
293 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
288 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
294 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
289 params = params.copy()
295 params = params.copy()
290 params[b'__conduit__'] = {
296 params[b'__conduit__'] = {
291 b'token': token,
297 b'token': token,
292 }
298 }
293 rawdata = {
299 rawdata = {
294 b'params': templatefilters.json(params),
300 b'params': templatefilters.json(params),
295 b'output': b'json',
301 b'output': b'json',
296 b'__conduit__': 1,
302 b'__conduit__': 1,
297 }
303 }
298 data = urlencodenested(rawdata)
304 data = urlencodenested(rawdata)
299 curlcmd = ui.config(b'phabricator', b'curlcmd')
305 curlcmd = ui.config(b'phabricator', b'curlcmd')
300 if curlcmd:
306 if curlcmd:
301 sin, sout = procutil.popen2(
307 sin, sout = procutil.popen2(
302 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
308 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
303 )
309 )
304 sin.write(data)
310 sin.write(data)
305 sin.close()
311 sin.close()
306 body = sout.read()
312 body = sout.read()
307 else:
313 else:
308 urlopener = urlmod.opener(ui, authinfo)
314 urlopener = urlmod.opener(ui, authinfo)
309 request = util.urlreq.request(pycompat.strurl(url), data=data)
315 request = util.urlreq.request(pycompat.strurl(url), data=data)
310 with contextlib.closing(urlopener.open(request)) as rsp:
316 with contextlib.closing(urlopener.open(request)) as rsp:
311 body = rsp.read()
317 body = rsp.read()
312 ui.debug(b'Conduit Response: %s\n' % body)
318 ui.debug(b'Conduit Response: %s\n' % body)
313 parsed = pycompat.rapply(
319 parsed = pycompat.rapply(
314 lambda x: encoding.unitolocal(x)
320 lambda x: encoding.unitolocal(x)
315 if isinstance(x, pycompat.unicode)
321 if isinstance(x, pycompat.unicode)
316 else x,
322 else x,
317 # json.loads only accepts bytes from py3.6+
323 # json.loads only accepts bytes from py3.6+
318 pycompat.json_loads(encoding.unifromlocal(body)),
324 pycompat.json_loads(encoding.unifromlocal(body)),
319 )
325 )
320 if parsed.get(b'error_code'):
326 if parsed.get(b'error_code'):
321 msg = _(b'Conduit Error (%s): %s') % (
327 msg = _(b'Conduit Error (%s): %s') % (
322 parsed[b'error_code'],
328 parsed[b'error_code'],
323 parsed[b'error_info'],
329 parsed[b'error_info'],
324 )
330 )
325 raise error.Abort(msg)
331 raise error.Abort(msg)
326 return parsed[b'result']
332 return parsed[b'result']
327
333
328
334
329 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
335 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
330 def debugcallconduit(ui, repo, name):
336 def debugcallconduit(ui, repo, name):
331 """call Conduit API
337 """call Conduit API
332
338
333 Call parameters are read from stdin as a JSON blob. Result will be written
339 Call parameters are read from stdin as a JSON blob. Result will be written
334 to stdout as a JSON blob.
340 to stdout as a JSON blob.
335 """
341 """
336 # json.loads only accepts bytes from 3.6+
342 # json.loads only accepts bytes from 3.6+
337 rawparams = encoding.unifromlocal(ui.fin.read())
343 rawparams = encoding.unifromlocal(ui.fin.read())
338 # json.loads only returns unicode strings
344 # json.loads only returns unicode strings
339 params = pycompat.rapply(
345 params = pycompat.rapply(
340 lambda x: encoding.unitolocal(x)
346 lambda x: encoding.unitolocal(x)
341 if isinstance(x, pycompat.unicode)
347 if isinstance(x, pycompat.unicode)
342 else x,
348 else x,
343 pycompat.json_loads(rawparams),
349 pycompat.json_loads(rawparams),
344 )
350 )
345 # json.dumps only accepts unicode strings
351 # json.dumps only accepts unicode strings
346 result = pycompat.rapply(
352 result = pycompat.rapply(
347 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
353 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
348 callconduit(ui, name, params),
354 callconduit(ui, name, params),
349 )
355 )
350 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
356 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
351 ui.write(b'%s\n' % encoding.unitolocal(s))
357 ui.write(b'%s\n' % encoding.unitolocal(s))
352
358
353
359
354 def getrepophid(repo):
360 def getrepophid(repo):
355 """given callsign, return repository PHID or None"""
361 """given callsign, return repository PHID or None"""
356 # developer config: phabricator.repophid
362 # developer config: phabricator.repophid
357 repophid = repo.ui.config(b'phabricator', b'repophid')
363 repophid = repo.ui.config(b'phabricator', b'repophid')
358 if repophid:
364 if repophid:
359 return repophid
365 return repophid
360 callsign = repo.ui.config(b'phabricator', b'callsign')
366 callsign = repo.ui.config(b'phabricator', b'callsign')
361 if not callsign:
367 if not callsign:
362 return None
368 return None
363 query = callconduit(
369 query = callconduit(
364 repo.ui,
370 repo.ui,
365 b'diffusion.repository.search',
371 b'diffusion.repository.search',
366 {b'constraints': {b'callsigns': [callsign]}},
372 {b'constraints': {b'callsigns': [callsign]}},
367 )
373 )
368 if len(query[b'data']) == 0:
374 if len(query[b'data']) == 0:
369 return None
375 return None
370 repophid = query[b'data'][0][b'phid']
376 repophid = query[b'data'][0][b'phid']
371 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
377 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
372 return repophid
378 return repophid
373
379
374
380
375 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
381 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
376 _differentialrevisiondescre = re.compile(
382 _differentialrevisiondescre = re.compile(
377 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
383 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
378 )
384 )
379
385
380
386
381 def getoldnodedrevmap(repo, nodelist):
387 def getoldnodedrevmap(repo, nodelist):
382 """find previous nodes that has been sent to Phabricator
388 """find previous nodes that has been sent to Phabricator
383
389
384 return {node: (oldnode, Differential diff, Differential Revision ID)}
390 return {node: (oldnode, Differential diff, Differential Revision ID)}
385 for node in nodelist with known previous sent versions, or associated
391 for node in nodelist with known previous sent versions, or associated
386 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
392 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
387 be ``None``.
393 be ``None``.
388
394
389 Examines commit messages like "Differential Revision:" to get the
395 Examines commit messages like "Differential Revision:" to get the
390 association information.
396 association information.
391
397
392 If such commit message line is not found, examines all precursors and their
398 If such commit message line is not found, examines all precursors and their
393 tags. Tags with format like "D1234" are considered a match and the node
399 tags. Tags with format like "D1234" are considered a match and the node
394 with that tag, and the number after "D" (ex. 1234) will be returned.
400 with that tag, and the number after "D" (ex. 1234) will be returned.
395
401
396 The ``old node``, if not None, is guaranteed to be the last diff of
402 The ``old node``, if not None, is guaranteed to be the last diff of
397 corresponding Differential Revision, and exist in the repo.
403 corresponding Differential Revision, and exist in the repo.
398 """
404 """
399 unfi = repo.unfiltered()
405 unfi = repo.unfiltered()
400 has_node = unfi.changelog.index.has_node
406 has_node = unfi.changelog.index.has_node
401
407
402 result = {} # {node: (oldnode?, lastdiff?, drev)}
408 result = {} # {node: (oldnode?, lastdiff?, drev)}
403 toconfirm = {} # {node: (force, {precnode}, drev)}
409 toconfirm = {} # {node: (force, {precnode}, drev)}
404 for node in nodelist:
410 for node in nodelist:
405 ctx = unfi[node]
411 ctx = unfi[node]
406 # For tags like "D123", put them into "toconfirm" to verify later
412 # For tags like "D123", put them into "toconfirm" to verify later
407 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
413 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
408 for n in precnodes:
414 for n in precnodes:
409 if has_node(n):
415 if has_node(n):
410 for tag in unfi.nodetags(n):
416 for tag in unfi.nodetags(n):
411 m = _differentialrevisiontagre.match(tag)
417 m = _differentialrevisiontagre.match(tag)
412 if m:
418 if m:
413 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
419 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
414 break
420 break
415 else:
421 else:
416 continue # move to next predecessor
422 continue # move to next predecessor
417 break # found a tag, stop
423 break # found a tag, stop
418 else:
424 else:
419 # Check commit message
425 # Check commit message
420 m = _differentialrevisiondescre.search(ctx.description())
426 m = _differentialrevisiondescre.search(ctx.description())
421 if m:
427 if m:
422 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
428 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
423
429
424 # Double check if tags are genuine by collecting all old nodes from
430 # Double check if tags are genuine by collecting all old nodes from
425 # Phabricator, and expect precursors overlap with it.
431 # Phabricator, and expect precursors overlap with it.
426 if toconfirm:
432 if toconfirm:
427 drevs = [drev for force, precs, drev in toconfirm.values()]
433 drevs = [drev for force, precs, drev in toconfirm.values()]
428 alldiffs = callconduit(
434 alldiffs = callconduit(
429 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
435 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
430 )
436 )
431 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
437 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
432 for newnode, (force, precset, drev) in toconfirm.items():
438 for newnode, (force, precset, drev) in toconfirm.items():
433 diffs = [
439 diffs = [
434 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
440 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
435 ]
441 ]
436
442
437 # "precursors" as known by Phabricator
443 # "precursors" as known by Phabricator
438 phprecset = set(getnode(d) for d in diffs)
444 phprecset = set(getnode(d) for d in diffs)
439
445
440 # Ignore if precursors (Phabricator and local repo) do not overlap,
446 # Ignore if precursors (Phabricator and local repo) do not overlap,
441 # and force is not set (when commit message says nothing)
447 # and force is not set (when commit message says nothing)
442 if not force and not bool(phprecset & precset):
448 if not force and not bool(phprecset & precset):
443 tagname = b'D%d' % drev
449 tagname = b'D%d' % drev
444 tags.tag(
450 tags.tag(
445 repo,
451 repo,
446 tagname,
452 tagname,
447 nullid,
453 nullid,
448 message=None,
454 message=None,
449 user=None,
455 user=None,
450 date=None,
456 date=None,
451 local=True,
457 local=True,
452 )
458 )
453 unfi.ui.warn(
459 unfi.ui.warn(
454 _(
460 _(
455 b'D%d: local tag removed - does not match '
461 b'D%d: local tag removed - does not match '
456 b'Differential history\n'
462 b'Differential history\n'
457 )
463 )
458 % drev
464 % drev
459 )
465 )
460 continue
466 continue
461
467
462 # Find the last node using Phabricator metadata, and make sure it
468 # Find the last node using Phabricator metadata, and make sure it
463 # exists in the repo
469 # exists in the repo
464 oldnode = lastdiff = None
470 oldnode = lastdiff = None
465 if diffs:
471 if diffs:
466 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
472 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
467 oldnode = getnode(lastdiff)
473 oldnode = getnode(lastdiff)
468 if oldnode and not has_node(oldnode):
474 if oldnode and not has_node(oldnode):
469 oldnode = None
475 oldnode = None
470
476
471 result[newnode] = (oldnode, lastdiff, drev)
477 result[newnode] = (oldnode, lastdiff, drev)
472
478
473 return result
479 return result
474
480
475
481
476 def getdrevmap(repo, revs):
482 def getdrevmap(repo, revs):
477 """Return a dict mapping each rev in `revs` to their Differential Revision
483 """Return a dict mapping each rev in `revs` to their Differential Revision
478 ID or None.
484 ID or None.
479 """
485 """
480 result = {}
486 result = {}
481 for rev in revs:
487 for rev in revs:
482 result[rev] = None
488 result[rev] = None
483 ctx = repo[rev]
489 ctx = repo[rev]
484 # Check commit message
490 # Check commit message
485 m = _differentialrevisiondescre.search(ctx.description())
491 m = _differentialrevisiondescre.search(ctx.description())
486 if m:
492 if m:
487 result[rev] = int(m.group('id'))
493 result[rev] = int(m.group('id'))
488 continue
494 continue
489 # Check tags
495 # Check tags
490 for tag in repo.nodetags(ctx.node()):
496 for tag in repo.nodetags(ctx.node()):
491 m = _differentialrevisiontagre.match(tag)
497 m = _differentialrevisiontagre.match(tag)
492 if m:
498 if m:
493 result[rev] = int(m.group(1))
499 result[rev] = int(m.group(1))
494 break
500 break
495
501
496 return result
502 return result
497
503
498
504
499 def getdiff(ctx, diffopts):
505 def getdiff(ctx, diffopts):
500 """plain-text diff without header (user, commit message, etc)"""
506 """plain-text diff without header (user, commit message, etc)"""
501 output = util.stringio()
507 output = util.stringio()
502 for chunk, _label in patch.diffui(
508 for chunk, _label in patch.diffui(
503 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
509 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
504 ):
510 ):
505 output.write(chunk)
511 output.write(chunk)
506 return output.getvalue()
512 return output.getvalue()
507
513
508
514
509 class DiffChangeType(object):
515 class DiffChangeType(object):
510 ADD = 1
516 ADD = 1
511 CHANGE = 2
517 CHANGE = 2
512 DELETE = 3
518 DELETE = 3
513 MOVE_AWAY = 4
519 MOVE_AWAY = 4
514 COPY_AWAY = 5
520 COPY_AWAY = 5
515 MOVE_HERE = 6
521 MOVE_HERE = 6
516 COPY_HERE = 7
522 COPY_HERE = 7
517 MULTICOPY = 8
523 MULTICOPY = 8
518
524
519
525
520 class DiffFileType(object):
526 class DiffFileType(object):
521 TEXT = 1
527 TEXT = 1
522 IMAGE = 2
528 IMAGE = 2
523 BINARY = 3
529 BINARY = 3
524
530
525
531
526 @attr.s
532 @attr.s
527 class phabhunk(dict):
533 class phabhunk(dict):
528 """Represents a Differential hunk, which is owned by a Differential change
534 """Represents a Differential hunk, which is owned by a Differential change
529 """
535 """
530
536
531 oldOffset = attr.ib(default=0) # camelcase-required
537 oldOffset = attr.ib(default=0) # camelcase-required
532 oldLength = attr.ib(default=0) # camelcase-required
538 oldLength = attr.ib(default=0) # camelcase-required
533 newOffset = attr.ib(default=0) # camelcase-required
539 newOffset = attr.ib(default=0) # camelcase-required
534 newLength = attr.ib(default=0) # camelcase-required
540 newLength = attr.ib(default=0) # camelcase-required
535 corpus = attr.ib(default='')
541 corpus = attr.ib(default='')
536 # These get added to the phabchange's equivalents
542 # These get added to the phabchange's equivalents
537 addLines = attr.ib(default=0) # camelcase-required
543 addLines = attr.ib(default=0) # camelcase-required
538 delLines = attr.ib(default=0) # camelcase-required
544 delLines = attr.ib(default=0) # camelcase-required
539
545
540
546
541 @attr.s
547 @attr.s
542 class phabchange(object):
548 class phabchange(object):
543 """Represents a Differential change, owns Differential hunks and owned by a
549 """Represents a Differential change, owns Differential hunks and owned by a
544 Differential diff. Each one represents one file in a diff.
550 Differential diff. Each one represents one file in a diff.
545 """
551 """
546
552
547 currentPath = attr.ib(default=None) # camelcase-required
553 currentPath = attr.ib(default=None) # camelcase-required
548 oldPath = attr.ib(default=None) # camelcase-required
554 oldPath = attr.ib(default=None) # camelcase-required
549 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
555 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
550 metadata = attr.ib(default=attr.Factory(dict))
556 metadata = attr.ib(default=attr.Factory(dict))
551 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
557 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
558 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
553 type = attr.ib(default=DiffChangeType.CHANGE)
559 type = attr.ib(default=DiffChangeType.CHANGE)
554 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
560 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
555 commitHash = attr.ib(default=None) # camelcase-required
561 commitHash = attr.ib(default=None) # camelcase-required
556 addLines = attr.ib(default=0) # camelcase-required
562 addLines = attr.ib(default=0) # camelcase-required
557 delLines = attr.ib(default=0) # camelcase-required
563 delLines = attr.ib(default=0) # camelcase-required
558 hunks = attr.ib(default=attr.Factory(list))
564 hunks = attr.ib(default=attr.Factory(list))
559
565
560 def copynewmetadatatoold(self):
566 def copynewmetadatatoold(self):
561 for key in list(self.metadata.keys()):
567 for key in list(self.metadata.keys()):
562 newkey = key.replace(b'new:', b'old:')
568 newkey = key.replace(b'new:', b'old:')
563 self.metadata[newkey] = self.metadata[key]
569 self.metadata[newkey] = self.metadata[key]
564
570
565 def addoldmode(self, value):
571 def addoldmode(self, value):
566 self.oldProperties[b'unix:filemode'] = value
572 self.oldProperties[b'unix:filemode'] = value
567
573
568 def addnewmode(self, value):
574 def addnewmode(self, value):
569 self.newProperties[b'unix:filemode'] = value
575 self.newProperties[b'unix:filemode'] = value
570
576
571 def addhunk(self, hunk):
577 def addhunk(self, hunk):
572 if not isinstance(hunk, phabhunk):
578 if not isinstance(hunk, phabhunk):
573 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
579 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
574 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
580 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
575 # It's useful to include these stats since the Phab web UI shows them,
581 # It's useful to include these stats since the Phab web UI shows them,
576 # and uses them to estimate how large a change a Revision is. Also used
582 # and uses them to estimate how large a change a Revision is. Also used
577 # in email subjects for the [+++--] bit.
583 # in email subjects for the [+++--] bit.
578 self.addLines += hunk.addLines
584 self.addLines += hunk.addLines
579 self.delLines += hunk.delLines
585 self.delLines += hunk.delLines
580
586
581
587
582 @attr.s
588 @attr.s
583 class phabdiff(object):
589 class phabdiff(object):
584 """Represents a Differential diff, owns Differential changes. Corresponds
590 """Represents a Differential diff, owns Differential changes. Corresponds
585 to a commit.
591 to a commit.
586 """
592 """
587
593
588 # Doesn't seem to be any reason to send this (output of uname -n)
594 # Doesn't seem to be any reason to send this (output of uname -n)
589 sourceMachine = attr.ib(default=b'') # camelcase-required
595 sourceMachine = attr.ib(default=b'') # camelcase-required
590 sourcePath = attr.ib(default=b'/') # camelcase-required
596 sourcePath = attr.ib(default=b'/') # camelcase-required
591 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
597 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
592 sourceControlPath = attr.ib(default=b'/') # camelcase-required
598 sourceControlPath = attr.ib(default=b'/') # camelcase-required
593 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
599 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
594 branch = attr.ib(default=b'default')
600 branch = attr.ib(default=b'default')
595 bookmark = attr.ib(default=None)
601 bookmark = attr.ib(default=None)
596 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
602 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
597 lintStatus = attr.ib(default=b'none') # camelcase-required
603 lintStatus = attr.ib(default=b'none') # camelcase-required
598 unitStatus = attr.ib(default=b'none') # camelcase-required
604 unitStatus = attr.ib(default=b'none') # camelcase-required
599 changes = attr.ib(default=attr.Factory(dict))
605 changes = attr.ib(default=attr.Factory(dict))
600 repositoryPHID = attr.ib(default=None) # camelcase-required
606 repositoryPHID = attr.ib(default=None) # camelcase-required
601
607
602 def addchange(self, change):
608 def addchange(self, change):
603 if not isinstance(change, phabchange):
609 if not isinstance(change, phabchange):
604 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
610 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
605 self.changes[change.currentPath] = pycompat.byteskwargs(
611 self.changes[change.currentPath] = pycompat.byteskwargs(
606 attr.asdict(change)
612 attr.asdict(change)
607 )
613 )
608
614
609
615
610 def maketext(pchange, ctx, fname):
616 def maketext(pchange, ctx, fname):
611 """populate the phabchange for a text file"""
617 """populate the phabchange for a text file"""
612 repo = ctx.repo()
618 repo = ctx.repo()
613 fmatcher = match.exact([fname])
619 fmatcher = match.exact([fname])
614 diffopts = mdiff.diffopts(git=True, context=32767)
620 diffopts = mdiff.diffopts(git=True, context=32767)
615 _pfctx, _fctx, header, fhunks = next(
621 _pfctx, _fctx, header, fhunks = next(
616 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
622 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
617 )
623 )
618
624
619 for fhunk in fhunks:
625 for fhunk in fhunks:
620 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
626 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
621 corpus = b''.join(lines[1:])
627 corpus = b''.join(lines[1:])
622 shunk = list(header)
628 shunk = list(header)
623 shunk.extend(lines)
629 shunk.extend(lines)
624 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
630 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
625 patch.diffstatdata(util.iterlines(shunk))
631 patch.diffstatdata(util.iterlines(shunk))
626 )
632 )
627 pchange.addhunk(
633 pchange.addhunk(
628 phabhunk(
634 phabhunk(
629 oldOffset,
635 oldOffset,
630 oldLength,
636 oldLength,
631 newOffset,
637 newOffset,
632 newLength,
638 newLength,
633 corpus,
639 corpus,
634 addLines,
640 addLines,
635 delLines,
641 delLines,
636 )
642 )
637 )
643 )
638
644
639
645
640 def uploadchunks(fctx, fphid):
646 def uploadchunks(fctx, fphid):
641 """upload large binary files as separate chunks.
647 """upload large binary files as separate chunks.
642 Phab requests chunking over 8MiB, and splits into 4MiB chunks
648 Phab requests chunking over 8MiB, and splits into 4MiB chunks
643 """
649 """
644 ui = fctx.repo().ui
650 ui = fctx.repo().ui
645 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
651 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
646 with ui.makeprogress(
652 with ui.makeprogress(
647 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
653 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
648 ) as progress:
654 ) as progress:
649 for chunk in chunks:
655 for chunk in chunks:
650 progress.increment()
656 progress.increment()
651 if chunk[b'complete']:
657 if chunk[b'complete']:
652 continue
658 continue
653 bstart = int(chunk[b'byteStart'])
659 bstart = int(chunk[b'byteStart'])
654 bend = int(chunk[b'byteEnd'])
660 bend = int(chunk[b'byteEnd'])
655 callconduit(
661 callconduit(
656 ui,
662 ui,
657 b'file.uploadchunk',
663 b'file.uploadchunk',
658 {
664 {
659 b'filePHID': fphid,
665 b'filePHID': fphid,
660 b'byteStart': bstart,
666 b'byteStart': bstart,
661 b'data': base64.b64encode(fctx.data()[bstart:bend]),
667 b'data': base64.b64encode(fctx.data()[bstart:bend]),
662 b'dataEncoding': b'base64',
668 b'dataEncoding': b'base64',
663 },
669 },
664 )
670 )
665
671
666
672
667 def uploadfile(fctx):
673 def uploadfile(fctx):
668 """upload binary files to Phabricator"""
674 """upload binary files to Phabricator"""
669 repo = fctx.repo()
675 repo = fctx.repo()
670 ui = repo.ui
676 ui = repo.ui
671 fname = fctx.path()
677 fname = fctx.path()
672 size = fctx.size()
678 size = fctx.size()
673 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
679 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
674
680
675 # an allocate call is required first to see if an upload is even required
681 # an allocate call is required first to see if an upload is even required
676 # (Phab might already have it) and to determine if chunking is needed
682 # (Phab might already have it) and to determine if chunking is needed
677 allocateparams = {
683 allocateparams = {
678 b'name': fname,
684 b'name': fname,
679 b'contentLength': size,
685 b'contentLength': size,
680 b'contentHash': fhash,
686 b'contentHash': fhash,
681 }
687 }
682 filealloc = callconduit(ui, b'file.allocate', allocateparams)
688 filealloc = callconduit(ui, b'file.allocate', allocateparams)
683 fphid = filealloc[b'filePHID']
689 fphid = filealloc[b'filePHID']
684
690
685 if filealloc[b'upload']:
691 if filealloc[b'upload']:
686 ui.write(_(b'uploading %s\n') % bytes(fctx))
692 ui.write(_(b'uploading %s\n') % bytes(fctx))
687 if not fphid:
693 if not fphid:
688 uploadparams = {
694 uploadparams = {
689 b'name': fname,
695 b'name': fname,
690 b'data_base64': base64.b64encode(fctx.data()),
696 b'data_base64': base64.b64encode(fctx.data()),
691 }
697 }
692 fphid = callconduit(ui, b'file.upload', uploadparams)
698 fphid = callconduit(ui, b'file.upload', uploadparams)
693 else:
699 else:
694 uploadchunks(fctx, fphid)
700 uploadchunks(fctx, fphid)
695 else:
701 else:
696 ui.debug(b'server already has %s\n' % bytes(fctx))
702 ui.debug(b'server already has %s\n' % bytes(fctx))
697
703
698 if not fphid:
704 if not fphid:
699 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
705 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
700
706
701 return fphid
707 return fphid
702
708
703
709
704 def addoldbinary(pchange, fctx, originalfname):
710 def addoldbinary(pchange, fctx, originalfname):
705 """add the metadata for the previous version of a binary file to the
711 """add the metadata for the previous version of a binary file to the
706 phabchange for the new version
712 phabchange for the new version
707 """
713 """
708 oldfctx = fctx.p1()[originalfname]
714 oldfctx = fctx.p1()[originalfname]
709 if fctx.cmp(oldfctx):
715 if fctx.cmp(oldfctx):
710 # Files differ, add the old one
716 # Files differ, add the old one
711 pchange.metadata[b'old:file:size'] = oldfctx.size()
717 pchange.metadata[b'old:file:size'] = oldfctx.size()
712 mimeguess, _enc = mimetypes.guess_type(
718 mimeguess, _enc = mimetypes.guess_type(
713 encoding.unifromlocal(oldfctx.path())
719 encoding.unifromlocal(oldfctx.path())
714 )
720 )
715 if mimeguess:
721 if mimeguess:
716 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
722 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
717 mimeguess
723 mimeguess
718 )
724 )
719 fphid = uploadfile(oldfctx)
725 fphid = uploadfile(oldfctx)
720 pchange.metadata[b'old:binary-phid'] = fphid
726 pchange.metadata[b'old:binary-phid'] = fphid
721 else:
727 else:
722 # If it's left as IMAGE/BINARY web UI might try to display it
728 # If it's left as IMAGE/BINARY web UI might try to display it
723 pchange.fileType = DiffFileType.TEXT
729 pchange.fileType = DiffFileType.TEXT
724 pchange.copynewmetadatatoold()
730 pchange.copynewmetadatatoold()
725
731
726
732
727 def makebinary(pchange, fctx):
733 def makebinary(pchange, fctx):
728 """populate the phabchange for a binary file"""
734 """populate the phabchange for a binary file"""
729 pchange.fileType = DiffFileType.BINARY
735 pchange.fileType = DiffFileType.BINARY
730 fphid = uploadfile(fctx)
736 fphid = uploadfile(fctx)
731 pchange.metadata[b'new:binary-phid'] = fphid
737 pchange.metadata[b'new:binary-phid'] = fphid
732 pchange.metadata[b'new:file:size'] = fctx.size()
738 pchange.metadata[b'new:file:size'] = fctx.size()
733 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
739 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
734 if mimeguess:
740 if mimeguess:
735 mimeguess = pycompat.bytestr(mimeguess)
741 mimeguess = pycompat.bytestr(mimeguess)
736 pchange.metadata[b'new:file:mime-type'] = mimeguess
742 pchange.metadata[b'new:file:mime-type'] = mimeguess
737 if mimeguess.startswith(b'image/'):
743 if mimeguess.startswith(b'image/'):
738 pchange.fileType = DiffFileType.IMAGE
744 pchange.fileType = DiffFileType.IMAGE
739
745
740
746
741 # Copied from mercurial/patch.py
747 # Copied from mercurial/patch.py
742 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
748 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
743
749
744
750
745 def notutf8(fctx):
751 def notutf8(fctx):
746 """detect non-UTF-8 text files since Phabricator requires them to be marked
752 """detect non-UTF-8 text files since Phabricator requires them to be marked
747 as binary
753 as binary
748 """
754 """
749 try:
755 try:
750 fctx.data().decode('utf-8')
756 fctx.data().decode('utf-8')
751 if fctx.parents():
757 if fctx.parents():
752 fctx.p1().data().decode('utf-8')
758 fctx.p1().data().decode('utf-8')
753 return False
759 return False
754 except UnicodeDecodeError:
760 except UnicodeDecodeError:
755 fctx.repo().ui.write(
761 fctx.repo().ui.write(
756 _(b'file %s detected as non-UTF-8, marked as binary\n')
762 _(b'file %s detected as non-UTF-8, marked as binary\n')
757 % fctx.path()
763 % fctx.path()
758 )
764 )
759 return True
765 return True
760
766
761
767
762 def addremoved(pdiff, ctx, removed):
768 def addremoved(pdiff, ctx, removed):
763 """add removed files to the phabdiff. Shouldn't include moves"""
769 """add removed files to the phabdiff. Shouldn't include moves"""
764 for fname in removed:
770 for fname in removed:
765 pchange = phabchange(
771 pchange = phabchange(
766 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
772 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
767 )
773 )
768 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
774 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
769 fctx = ctx.p1()[fname]
775 fctx = ctx.p1()[fname]
770 if not (fctx.isbinary() or notutf8(fctx)):
776 if not (fctx.isbinary() or notutf8(fctx)):
771 maketext(pchange, ctx, fname)
777 maketext(pchange, ctx, fname)
772
778
773 pdiff.addchange(pchange)
779 pdiff.addchange(pchange)
774
780
775
781
776 def addmodified(pdiff, ctx, modified):
782 def addmodified(pdiff, ctx, modified):
777 """add modified files to the phabdiff"""
783 """add modified files to the phabdiff"""
778 for fname in modified:
784 for fname in modified:
779 fctx = ctx[fname]
785 fctx = ctx[fname]
780 pchange = phabchange(currentPath=fname, oldPath=fname)
786 pchange = phabchange(currentPath=fname, oldPath=fname)
781 filemode = gitmode[ctx[fname].flags()]
787 filemode = gitmode[ctx[fname].flags()]
782 originalmode = gitmode[ctx.p1()[fname].flags()]
788 originalmode = gitmode[ctx.p1()[fname].flags()]
783 if filemode != originalmode:
789 if filemode != originalmode:
784 pchange.addoldmode(originalmode)
790 pchange.addoldmode(originalmode)
785 pchange.addnewmode(filemode)
791 pchange.addnewmode(filemode)
786
792
787 if fctx.isbinary() or notutf8(fctx):
793 if fctx.isbinary() or notutf8(fctx):
788 makebinary(pchange, fctx)
794 makebinary(pchange, fctx)
789 addoldbinary(pchange, fctx, fname)
795 addoldbinary(pchange, fctx, fname)
790 else:
796 else:
791 maketext(pchange, ctx, fname)
797 maketext(pchange, ctx, fname)
792
798
793 pdiff.addchange(pchange)
799 pdiff.addchange(pchange)
794
800
795
801
796 def addadded(pdiff, ctx, added, removed):
802 def addadded(pdiff, ctx, added, removed):
797 """add file adds to the phabdiff, both new files and copies/moves"""
803 """add file adds to the phabdiff, both new files and copies/moves"""
798 # Keep track of files that've been recorded as moved/copied, so if there are
804 # Keep track of files that've been recorded as moved/copied, so if there are
799 # additional copies we can mark them (moves get removed from removed)
805 # additional copies we can mark them (moves get removed from removed)
800 copiedchanges = {}
806 copiedchanges = {}
801 movedchanges = {}
807 movedchanges = {}
802 for fname in added:
808 for fname in added:
803 fctx = ctx[fname]
809 fctx = ctx[fname]
804 pchange = phabchange(currentPath=fname)
810 pchange = phabchange(currentPath=fname)
805
811
806 filemode = gitmode[ctx[fname].flags()]
812 filemode = gitmode[ctx[fname].flags()]
807 renamed = fctx.renamed()
813 renamed = fctx.renamed()
808
814
809 if renamed:
815 if renamed:
810 originalfname = renamed[0]
816 originalfname = renamed[0]
811 originalmode = gitmode[ctx.p1()[originalfname].flags()]
817 originalmode = gitmode[ctx.p1()[originalfname].flags()]
812 pchange.oldPath = originalfname
818 pchange.oldPath = originalfname
813
819
814 if originalfname in removed:
820 if originalfname in removed:
815 origpchange = phabchange(
821 origpchange = phabchange(
816 currentPath=originalfname,
822 currentPath=originalfname,
817 oldPath=originalfname,
823 oldPath=originalfname,
818 type=DiffChangeType.MOVE_AWAY,
824 type=DiffChangeType.MOVE_AWAY,
819 awayPaths=[fname],
825 awayPaths=[fname],
820 )
826 )
821 movedchanges[originalfname] = origpchange
827 movedchanges[originalfname] = origpchange
822 removed.remove(originalfname)
828 removed.remove(originalfname)
823 pchange.type = DiffChangeType.MOVE_HERE
829 pchange.type = DiffChangeType.MOVE_HERE
824 elif originalfname in movedchanges:
830 elif originalfname in movedchanges:
825 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
831 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
826 movedchanges[originalfname].awayPaths.append(fname)
832 movedchanges[originalfname].awayPaths.append(fname)
827 pchange.type = DiffChangeType.COPY_HERE
833 pchange.type = DiffChangeType.COPY_HERE
828 else: # pure copy
834 else: # pure copy
829 if originalfname not in copiedchanges:
835 if originalfname not in copiedchanges:
830 origpchange = phabchange(
836 origpchange = phabchange(
831 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
837 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
832 )
838 )
833 copiedchanges[originalfname] = origpchange
839 copiedchanges[originalfname] = origpchange
834 else:
840 else:
835 origpchange = copiedchanges[originalfname]
841 origpchange = copiedchanges[originalfname]
836 origpchange.awayPaths.append(fname)
842 origpchange.awayPaths.append(fname)
837 pchange.type = DiffChangeType.COPY_HERE
843 pchange.type = DiffChangeType.COPY_HERE
838
844
839 if filemode != originalmode:
845 if filemode != originalmode:
840 pchange.addoldmode(originalmode)
846 pchange.addoldmode(originalmode)
841 pchange.addnewmode(filemode)
847 pchange.addnewmode(filemode)
842 else: # Brand-new file
848 else: # Brand-new file
843 pchange.addnewmode(gitmode[fctx.flags()])
849 pchange.addnewmode(gitmode[fctx.flags()])
844 pchange.type = DiffChangeType.ADD
850 pchange.type = DiffChangeType.ADD
845
851
846 if fctx.isbinary() or notutf8(fctx):
852 if fctx.isbinary() or notutf8(fctx):
847 makebinary(pchange, fctx)
853 makebinary(pchange, fctx)
848 if renamed:
854 if renamed:
849 addoldbinary(pchange, fctx, originalfname)
855 addoldbinary(pchange, fctx, originalfname)
850 else:
856 else:
851 maketext(pchange, ctx, fname)
857 maketext(pchange, ctx, fname)
852
858
853 pdiff.addchange(pchange)
859 pdiff.addchange(pchange)
854
860
855 for _path, copiedchange in copiedchanges.items():
861 for _path, copiedchange in copiedchanges.items():
856 pdiff.addchange(copiedchange)
862 pdiff.addchange(copiedchange)
857 for _path, movedchange in movedchanges.items():
863 for _path, movedchange in movedchanges.items():
858 pdiff.addchange(movedchange)
864 pdiff.addchange(movedchange)
859
865
860
866
861 def creatediff(ctx):
867 def creatediff(ctx):
862 """create a Differential Diff"""
868 """create a Differential Diff"""
863 repo = ctx.repo()
869 repo = ctx.repo()
864 repophid = getrepophid(repo)
870 repophid = getrepophid(repo)
865 # Create a "Differential Diff" via "differential.creatediff" API
871 # Create a "Differential Diff" via "differential.creatediff" API
866 pdiff = phabdiff(
872 pdiff = phabdiff(
867 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
873 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
868 branch=b'%s' % ctx.branch(),
874 branch=b'%s' % ctx.branch(),
869 )
875 )
870 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
876 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
871 # addadded will remove moved files from removed, so addremoved won't get
877 # addadded will remove moved files from removed, so addremoved won't get
872 # them
878 # them
873 addadded(pdiff, ctx, added, removed)
879 addadded(pdiff, ctx, added, removed)
874 addmodified(pdiff, ctx, modified)
880 addmodified(pdiff, ctx, modified)
875 addremoved(pdiff, ctx, removed)
881 addremoved(pdiff, ctx, removed)
876 if repophid:
882 if repophid:
877 pdiff.repositoryPHID = repophid
883 pdiff.repositoryPHID = repophid
878 diff = callconduit(
884 diff = callconduit(
879 repo.ui,
885 repo.ui,
880 b'differential.creatediff',
886 b'differential.creatediff',
881 pycompat.byteskwargs(attr.asdict(pdiff)),
887 pycompat.byteskwargs(attr.asdict(pdiff)),
882 )
888 )
883 if not diff:
889 if not diff:
884 raise error.Abort(_(b'cannot create diff for %s') % ctx)
890 raise error.Abort(_(b'cannot create diff for %s') % ctx)
885 return diff
891 return diff
886
892
887
893
888 def writediffproperties(ctx, diff):
894 def writediffproperties(ctx, diff):
889 """write metadata to diff so patches could be applied losslessly"""
895 """write metadata to diff so patches could be applied losslessly"""
890 # creatediff returns with a diffid but query returns with an id
896 # creatediff returns with a diffid but query returns with an id
891 diffid = diff.get(b'diffid', diff.get(b'id'))
897 diffid = diff.get(b'diffid', diff.get(b'id'))
892 params = {
898 params = {
893 b'diff_id': diffid,
899 b'diff_id': diffid,
894 b'name': b'hg:meta',
900 b'name': b'hg:meta',
895 b'data': templatefilters.json(
901 b'data': templatefilters.json(
896 {
902 {
897 b'user': ctx.user(),
903 b'user': ctx.user(),
898 b'date': b'%d %d' % ctx.date(),
904 b'date': b'%d %d' % ctx.date(),
899 b'branch': ctx.branch(),
905 b'branch': ctx.branch(),
900 b'node': ctx.hex(),
906 b'node': ctx.hex(),
901 b'parent': ctx.p1().hex(),
907 b'parent': ctx.p1().hex(),
902 }
908 }
903 ),
909 ),
904 }
910 }
905 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
911 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
906
912
907 params = {
913 params = {
908 b'diff_id': diffid,
914 b'diff_id': diffid,
909 b'name': b'local:commits',
915 b'name': b'local:commits',
910 b'data': templatefilters.json(
916 b'data': templatefilters.json(
911 {
917 {
912 ctx.hex(): {
918 ctx.hex(): {
913 b'author': stringutil.person(ctx.user()),
919 b'author': stringutil.person(ctx.user()),
914 b'authorEmail': stringutil.email(ctx.user()),
920 b'authorEmail': stringutil.email(ctx.user()),
915 b'time': int(ctx.date()[0]),
921 b'time': int(ctx.date()[0]),
916 b'commit': ctx.hex(),
922 b'commit': ctx.hex(),
917 b'parents': [ctx.p1().hex()],
923 b'parents': [ctx.p1().hex()],
918 b'branch': ctx.branch(),
924 b'branch': ctx.branch(),
919 },
925 },
920 }
926 }
921 ),
927 ),
922 }
928 }
923 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
929 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
924
930
925
931
926 def createdifferentialrevision(
932 def createdifferentialrevision(
927 ctx,
933 ctx,
928 revid=None,
934 revid=None,
929 parentrevphid=None,
935 parentrevphid=None,
930 oldnode=None,
936 oldnode=None,
931 olddiff=None,
937 olddiff=None,
932 actions=None,
938 actions=None,
933 comment=None,
939 comment=None,
934 ):
940 ):
935 """create or update a Differential Revision
941 """create or update a Differential Revision
936
942
937 If revid is None, create a new Differential Revision, otherwise update
943 If revid is None, create a new Differential Revision, otherwise update
938 revid. If parentrevphid is not None, set it as a dependency.
944 revid. If parentrevphid is not None, set it as a dependency.
939
945
940 If oldnode is not None, check if the patch content (without commit message
946 If oldnode is not None, check if the patch content (without commit message
941 and metadata) has changed before creating another diff.
947 and metadata) has changed before creating another diff.
942
948
943 If actions is not None, they will be appended to the transaction.
949 If actions is not None, they will be appended to the transaction.
944 """
950 """
945 repo = ctx.repo()
951 repo = ctx.repo()
946 if oldnode:
952 if oldnode:
947 diffopts = mdiff.diffopts(git=True, context=32767)
953 diffopts = mdiff.diffopts(git=True, context=32767)
948 oldctx = repo.unfiltered()[oldnode]
954 oldctx = repo.unfiltered()[oldnode]
949 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
955 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
950 else:
956 else:
951 neednewdiff = True
957 neednewdiff = True
952
958
953 transactions = []
959 transactions = []
954 if neednewdiff:
960 if neednewdiff:
955 diff = creatediff(ctx)
961 diff = creatediff(ctx)
956 transactions.append({b'type': b'update', b'value': diff[b'phid']})
962 transactions.append({b'type': b'update', b'value': diff[b'phid']})
957 if comment:
963 if comment:
958 transactions.append({b'type': b'comment', b'value': comment})
964 transactions.append({b'type': b'comment', b'value': comment})
959 else:
965 else:
960 # Even if we don't need to upload a new diff because the patch content
966 # Even if we don't need to upload a new diff because the patch content
961 # does not change. We might still need to update its metadata so
967 # does not change. We might still need to update its metadata so
962 # pushers could know the correct node metadata.
968 # pushers could know the correct node metadata.
963 assert olddiff
969 assert olddiff
964 diff = olddiff
970 diff = olddiff
965 writediffproperties(ctx, diff)
971 writediffproperties(ctx, diff)
966
972
967 # Set the parent Revision every time, so commit re-ordering is picked-up
973 # Set the parent Revision every time, so commit re-ordering is picked-up
968 if parentrevphid:
974 if parentrevphid:
969 transactions.append(
975 transactions.append(
970 {b'type': b'parents.set', b'value': [parentrevphid]}
976 {b'type': b'parents.set', b'value': [parentrevphid]}
971 )
977 )
972
978
973 if actions:
979 if actions:
974 transactions += actions
980 transactions += actions
975
981
976 # Parse commit message and update related fields.
982 # Parse commit message and update related fields.
977 desc = ctx.description()
983 desc = ctx.description()
978 info = callconduit(
984 info = callconduit(
979 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
985 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
980 )
986 )
981 for k, v in info[b'fields'].items():
987 for k, v in info[b'fields'].items():
982 if k in [b'title', b'summary', b'testPlan']:
988 if k in [b'title', b'summary', b'testPlan']:
983 transactions.append({b'type': k, b'value': v})
989 transactions.append({b'type': k, b'value': v})
984
990
985 params = {b'transactions': transactions}
991 params = {b'transactions': transactions}
986 if revid is not None:
992 if revid is not None:
987 # Update an existing Differential Revision
993 # Update an existing Differential Revision
988 params[b'objectIdentifier'] = revid
994 params[b'objectIdentifier'] = revid
989
995
990 revision = callconduit(repo.ui, b'differential.revision.edit', params)
996 revision = callconduit(repo.ui, b'differential.revision.edit', params)
991 if not revision:
997 if not revision:
992 raise error.Abort(_(b'cannot create revision for %s') % ctx)
998 raise error.Abort(_(b'cannot create revision for %s') % ctx)
993
999
994 return revision, diff
1000 return revision, diff
995
1001
996
1002
997 def userphids(repo, names):
1003 def userphids(repo, names):
998 """convert user names to PHIDs"""
1004 """convert user names to PHIDs"""
999 names = [name.lower() for name in names]
1005 names = [name.lower() for name in names]
1000 query = {b'constraints': {b'usernames': names}}
1006 query = {b'constraints': {b'usernames': names}}
1001 result = callconduit(repo.ui, b'user.search', query)
1007 result = callconduit(repo.ui, b'user.search', query)
1002 # username not found is not an error of the API. So check if we have missed
1008 # username not found is not an error of the API. So check if we have missed
1003 # some names here.
1009 # some names here.
1004 data = result[b'data']
1010 data = result[b'data']
1005 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1011 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1006 unresolved = set(names) - resolved
1012 unresolved = set(names) - resolved
1007 if unresolved:
1013 if unresolved:
1008 raise error.Abort(
1014 raise error.Abort(
1009 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1015 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1010 )
1016 )
1011 return [entry[b'phid'] for entry in data]
1017 return [entry[b'phid'] for entry in data]
1012
1018
1013
1019
1014 @vcrcommand(
1020 @vcrcommand(
1015 b'phabsend',
1021 b'phabsend',
1016 [
1022 [
1017 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1023 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1018 (b'', b'amend', True, _(b'update commit messages')),
1024 (b'', b'amend', True, _(b'update commit messages')),
1019 (b'', b'reviewer', [], _(b'specify reviewers')),
1025 (b'', b'reviewer', [], _(b'specify reviewers')),
1020 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1026 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1021 (
1027 (
1022 b'm',
1028 b'm',
1023 b'comment',
1029 b'comment',
1024 b'',
1030 b'',
1025 _(b'add a comment to Revisions with new/updated Diffs'),
1031 _(b'add a comment to Revisions with new/updated Diffs'),
1026 ),
1032 ),
1027 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1033 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1028 ],
1034 ],
1029 _(b'REV [OPTIONS]'),
1035 _(b'REV [OPTIONS]'),
1030 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1036 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1031 )
1037 )
1032 def phabsend(ui, repo, *revs, **opts):
1038 def phabsend(ui, repo, *revs, **opts):
1033 """upload changesets to Phabricator
1039 """upload changesets to Phabricator
1034
1040
1035 If there are multiple revisions specified, they will be send as a stack
1041 If there are multiple revisions specified, they will be send as a stack
1036 with a linear dependencies relationship using the order specified by the
1042 with a linear dependencies relationship using the order specified by the
1037 revset.
1043 revset.
1038
1044
1039 For the first time uploading changesets, local tags will be created to
1045 For the first time uploading changesets, local tags will be created to
1040 maintain the association. After the first time, phabsend will check
1046 maintain the association. After the first time, phabsend will check
1041 obsstore and tags information so it can figure out whether to update an
1047 obsstore and tags information so it can figure out whether to update an
1042 existing Differential Revision, or create a new one.
1048 existing Differential Revision, or create a new one.
1043
1049
1044 If --amend is set, update commit messages so they have the
1050 If --amend is set, update commit messages so they have the
1045 ``Differential Revision`` URL, remove related tags. This is similar to what
1051 ``Differential Revision`` URL, remove related tags. This is similar to what
1046 arcanist will do, and is more desired in author-push workflows. Otherwise,
1052 arcanist will do, and is more desired in author-push workflows. Otherwise,
1047 use local tags to record the ``Differential Revision`` association.
1053 use local tags to record the ``Differential Revision`` association.
1048
1054
1049 The --confirm option lets you confirm changesets before sending them. You
1055 The --confirm option lets you confirm changesets before sending them. You
1050 can also add following to your configuration file to make it default
1056 can also add following to your configuration file to make it default
1051 behaviour::
1057 behaviour::
1052
1058
1053 [phabsend]
1059 [phabsend]
1054 confirm = true
1060 confirm = true
1055
1061
1056 phabsend will check obsstore and the above association to decide whether to
1062 phabsend will check obsstore and the above association to decide whether to
1057 update an existing Differential Revision, or create a new one.
1063 update an existing Differential Revision, or create a new one.
1058 """
1064 """
1059 opts = pycompat.byteskwargs(opts)
1065 opts = pycompat.byteskwargs(opts)
1060 revs = list(revs) + opts.get(b'rev', [])
1066 revs = list(revs) + opts.get(b'rev', [])
1061 revs = scmutil.revrange(repo, revs)
1067 revs = scmutil.revrange(repo, revs)
1062
1068
1063 if not revs:
1069 if not revs:
1064 raise error.Abort(_(b'phabsend requires at least one changeset'))
1070 raise error.Abort(_(b'phabsend requires at least one changeset'))
1065 if opts.get(b'amend'):
1071 if opts.get(b'amend'):
1066 cmdutil.checkunfinished(repo)
1072 cmdutil.checkunfinished(repo)
1067
1073
1068 # {newnode: (oldnode, olddiff, olddrev}
1074 # {newnode: (oldnode, olddiff, olddrev}
1069 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1075 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1070
1076
1071 confirm = ui.configbool(b'phabsend', b'confirm')
1077 confirm = ui.configbool(b'phabsend', b'confirm')
1072 confirm |= bool(opts.get(b'confirm'))
1078 confirm |= bool(opts.get(b'confirm'))
1073 if confirm:
1079 if confirm:
1074 confirmed = _confirmbeforesend(repo, revs, oldmap)
1080 confirmed = _confirmbeforesend(repo, revs, oldmap)
1075 if not confirmed:
1081 if not confirmed:
1076 raise error.Abort(_(b'phabsend cancelled'))
1082 raise error.Abort(_(b'phabsend cancelled'))
1077
1083
1078 actions = []
1084 actions = []
1079 reviewers = opts.get(b'reviewer', [])
1085 reviewers = opts.get(b'reviewer', [])
1080 blockers = opts.get(b'blocker', [])
1086 blockers = opts.get(b'blocker', [])
1081 phids = []
1087 phids = []
1082 if reviewers:
1088 if reviewers:
1083 phids.extend(userphids(repo, reviewers))
1089 phids.extend(userphids(repo, reviewers))
1084 if blockers:
1090 if blockers:
1085 phids.extend(
1091 phids.extend(
1086 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1092 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1087 )
1093 )
1088 if phids:
1094 if phids:
1089 actions.append({b'type': b'reviewers.add', b'value': phids})
1095 actions.append({b'type': b'reviewers.add', b'value': phids})
1090
1096
1091 drevids = [] # [int]
1097 drevids = [] # [int]
1092 diffmap = {} # {newnode: diff}
1098 diffmap = {} # {newnode: diff}
1093
1099
1094 # Send patches one by one so we know their Differential Revision PHIDs and
1100 # Send patches one by one so we know their Differential Revision PHIDs and
1095 # can provide dependency relationship
1101 # can provide dependency relationship
1096 lastrevphid = None
1102 lastrevphid = None
1097 for rev in revs:
1103 for rev in revs:
1098 ui.debug(b'sending rev %d\n' % rev)
1104 ui.debug(b'sending rev %d\n' % rev)
1099 ctx = repo[rev]
1105 ctx = repo[rev]
1100
1106
1101 # Get Differential Revision ID
1107 # Get Differential Revision ID
1102 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1108 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1103 if oldnode != ctx.node() or opts.get(b'amend'):
1109 if oldnode != ctx.node() or opts.get(b'amend'):
1104 # Create or update Differential Revision
1110 # Create or update Differential Revision
1105 revision, diff = createdifferentialrevision(
1111 revision, diff = createdifferentialrevision(
1106 ctx,
1112 ctx,
1107 revid,
1113 revid,
1108 lastrevphid,
1114 lastrevphid,
1109 oldnode,
1115 oldnode,
1110 olddiff,
1116 olddiff,
1111 actions,
1117 actions,
1112 opts.get(b'comment'),
1118 opts.get(b'comment'),
1113 )
1119 )
1114 diffmap[ctx.node()] = diff
1120 diffmap[ctx.node()] = diff
1115 newrevid = int(revision[b'object'][b'id'])
1121 newrevid = int(revision[b'object'][b'id'])
1116 newrevphid = revision[b'object'][b'phid']
1122 newrevphid = revision[b'object'][b'phid']
1117 if revid:
1123 if revid:
1118 action = b'updated'
1124 action = b'updated'
1119 else:
1125 else:
1120 action = b'created'
1126 action = b'created'
1121
1127
1122 # Create a local tag to note the association, if commit message
1128 # Create a local tag to note the association, if commit message
1123 # does not have it already
1129 # does not have it already
1124 m = _differentialrevisiondescre.search(ctx.description())
1130 m = _differentialrevisiondescre.search(ctx.description())
1125 if not m or int(m.group('id')) != newrevid:
1131 if not m or int(m.group('id')) != newrevid:
1126 tagname = b'D%d' % newrevid
1132 tagname = b'D%d' % newrevid
1127 tags.tag(
1133 tags.tag(
1128 repo,
1134 repo,
1129 tagname,
1135 tagname,
1130 ctx.node(),
1136 ctx.node(),
1131 message=None,
1137 message=None,
1132 user=None,
1138 user=None,
1133 date=None,
1139 date=None,
1134 local=True,
1140 local=True,
1135 )
1141 )
1136 else:
1142 else:
1137 # Nothing changed. But still set "newrevphid" so the next revision
1143 # Nothing changed. But still set "newrevphid" so the next revision
1138 # could depend on this one and "newrevid" for the summary line.
1144 # could depend on this one and "newrevid" for the summary line.
1139 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1145 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1140 newrevid = revid
1146 newrevid = revid
1141 action = b'skipped'
1147 action = b'skipped'
1142
1148
1143 actiondesc = ui.label(
1149 actiondesc = ui.label(
1144 {
1150 {
1145 b'created': _(b'created'),
1151 b'created': _(b'created'),
1146 b'skipped': _(b'skipped'),
1152 b'skipped': _(b'skipped'),
1147 b'updated': _(b'updated'),
1153 b'updated': _(b'updated'),
1148 }[action],
1154 }[action],
1149 b'phabricator.action.%s' % action,
1155 b'phabricator.action.%s' % action,
1150 )
1156 )
1151 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1157 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1152 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1158 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1153 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1159 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1154 ui.write(
1160 ui.write(
1155 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1161 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1156 )
1162 )
1157 drevids.append(newrevid)
1163 drevids.append(newrevid)
1158 lastrevphid = newrevphid
1164 lastrevphid = newrevphid
1159
1165
1160 # Update commit messages and remove tags
1166 # Update commit messages and remove tags
1161 if opts.get(b'amend'):
1167 if opts.get(b'amend'):
1162 unfi = repo.unfiltered()
1168 unfi = repo.unfiltered()
1163 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1169 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1164 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1170 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1165 wnode = unfi[b'.'].node()
1171 wnode = unfi[b'.'].node()
1166 mapping = {} # {oldnode: [newnode]}
1172 mapping = {} # {oldnode: [newnode]}
1167 for i, rev in enumerate(revs):
1173 for i, rev in enumerate(revs):
1168 old = unfi[rev]
1174 old = unfi[rev]
1169 drevid = drevids[i]
1175 drevid = drevids[i]
1170 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1176 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1171 newdesc = getdescfromdrev(drev)
1177 newdesc = getdescfromdrev(drev)
1172 # Make sure commit message contain "Differential Revision"
1178 # Make sure commit message contain "Differential Revision"
1173 if old.description() != newdesc:
1179 if old.description() != newdesc:
1174 if old.phase() == phases.public:
1180 if old.phase() == phases.public:
1175 ui.warn(
1181 ui.warn(
1176 _(b"warning: not updating public commit %s\n")
1182 _(b"warning: not updating public commit %s\n")
1177 % scmutil.formatchangeid(old)
1183 % scmutil.formatchangeid(old)
1178 )
1184 )
1179 continue
1185 continue
1180 parents = [
1186 parents = [
1181 mapping.get(old.p1().node(), (old.p1(),))[0],
1187 mapping.get(old.p1().node(), (old.p1(),))[0],
1182 mapping.get(old.p2().node(), (old.p2(),))[0],
1188 mapping.get(old.p2().node(), (old.p2(),))[0],
1183 ]
1189 ]
1184 new = context.metadataonlyctx(
1190 new = context.metadataonlyctx(
1185 repo,
1191 repo,
1186 old,
1192 old,
1187 parents=parents,
1193 parents=parents,
1188 text=newdesc,
1194 text=newdesc,
1189 user=old.user(),
1195 user=old.user(),
1190 date=old.date(),
1196 date=old.date(),
1191 extra=old.extra(),
1197 extra=old.extra(),
1192 )
1198 )
1193
1199
1194 newnode = new.commit()
1200 newnode = new.commit()
1195
1201
1196 mapping[old.node()] = [newnode]
1202 mapping[old.node()] = [newnode]
1197 # Update diff property
1203 # Update diff property
1198 # If it fails just warn and keep going, otherwise the DREV
1204 # If it fails just warn and keep going, otherwise the DREV
1199 # associations will be lost
1205 # associations will be lost
1200 try:
1206 try:
1201 writediffproperties(unfi[newnode], diffmap[old.node()])
1207 writediffproperties(unfi[newnode], diffmap[old.node()])
1202 except util.urlerr.urlerror:
1208 except util.urlerr.urlerror:
1203 ui.warnnoi18n(
1209 ui.warnnoi18n(
1204 b'Failed to update metadata for D%d\n' % drevid
1210 b'Failed to update metadata for D%d\n' % drevid
1205 )
1211 )
1206 # Remove local tags since it's no longer necessary
1212 # Remove local tags since it's no longer necessary
1207 tagname = b'D%d' % drevid
1213 tagname = b'D%d' % drevid
1208 if tagname in repo.tags():
1214 if tagname in repo.tags():
1209 tags.tag(
1215 tags.tag(
1210 repo,
1216 repo,
1211 tagname,
1217 tagname,
1212 nullid,
1218 nullid,
1213 message=None,
1219 message=None,
1214 user=None,
1220 user=None,
1215 date=None,
1221 date=None,
1216 local=True,
1222 local=True,
1217 )
1223 )
1218 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1224 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1219 if wnode in mapping:
1225 if wnode in mapping:
1220 unfi.setparents(mapping[wnode][0])
1226 unfi.setparents(mapping[wnode][0])
1221
1227
1222
1228
1223 # Map from "hg:meta" keys to header understood by "hg import". The order is
1229 # Map from "hg:meta" keys to header understood by "hg import". The order is
1224 # consistent with "hg export" output.
1230 # consistent with "hg export" output.
1225 _metanamemap = util.sortdict(
1231 _metanamemap = util.sortdict(
1226 [
1232 [
1227 (b'user', b'User'),
1233 (b'user', b'User'),
1228 (b'date', b'Date'),
1234 (b'date', b'Date'),
1229 (b'branch', b'Branch'),
1235 (b'branch', b'Branch'),
1230 (b'node', b'Node ID'),
1236 (b'node', b'Node ID'),
1231 (b'parent', b'Parent '),
1237 (b'parent', b'Parent '),
1232 ]
1238 ]
1233 )
1239 )
1234
1240
1235
1241
1236 def _confirmbeforesend(repo, revs, oldmap):
1242 def _confirmbeforesend(repo, revs, oldmap):
1237 url, token = readurltoken(repo.ui)
1243 url, token = readurltoken(repo.ui)
1238 ui = repo.ui
1244 ui = repo.ui
1239 for rev in revs:
1245 for rev in revs:
1240 ctx = repo[rev]
1246 ctx = repo[rev]
1241 desc = ctx.description().splitlines()[0]
1247 desc = ctx.description().splitlines()[0]
1242 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1248 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1243 if drevid:
1249 if drevid:
1244 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1250 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1245 else:
1251 else:
1246 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1252 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1247
1253
1248 ui.write(
1254 ui.write(
1249 _(b'%s - %s: %s\n')
1255 _(b'%s - %s: %s\n')
1250 % (
1256 % (
1251 drevdesc,
1257 drevdesc,
1252 ui.label(bytes(ctx), b'phabricator.node'),
1258 ui.label(bytes(ctx), b'phabricator.node'),
1253 ui.label(desc, b'phabricator.desc'),
1259 ui.label(desc, b'phabricator.desc'),
1254 )
1260 )
1255 )
1261 )
1256
1262
1257 if ui.promptchoice(
1263 if ui.promptchoice(
1258 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1264 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1259 ):
1265 ):
1260 return False
1266 return False
1261
1267
1262 return True
1268 return True
1263
1269
1264
1270
1265 _knownstatusnames = {
1271 _knownstatusnames = {
1266 b'accepted',
1272 b'accepted',
1267 b'needsreview',
1273 b'needsreview',
1268 b'needsrevision',
1274 b'needsrevision',
1269 b'closed',
1275 b'closed',
1270 b'abandoned',
1276 b'abandoned',
1271 b'changesplanned',
1277 b'changesplanned',
1272 }
1278 }
1273
1279
1274
1280
1275 def _getstatusname(drev):
1281 def _getstatusname(drev):
1276 """get normalized status name from a Differential Revision"""
1282 """get normalized status name from a Differential Revision"""
1277 return drev[b'statusName'].replace(b' ', b'').lower()
1283 return drev[b'statusName'].replace(b' ', b'').lower()
1278
1284
1279
1285
1280 # Small language to specify differential revisions. Support symbols: (), :X,
1286 # Small language to specify differential revisions. Support symbols: (), :X,
1281 # +, and -.
1287 # +, and -.
1282
1288
1283 _elements = {
1289 _elements = {
1284 # token-type: binding-strength, primary, prefix, infix, suffix
1290 # token-type: binding-strength, primary, prefix, infix, suffix
1285 b'(': (12, None, (b'group', 1, b')'), None, None),
1291 b'(': (12, None, (b'group', 1, b')'), None, None),
1286 b':': (8, None, (b'ancestors', 8), None, None),
1292 b':': (8, None, (b'ancestors', 8), None, None),
1287 b'&': (5, None, None, (b'and_', 5), None),
1293 b'&': (5, None, None, (b'and_', 5), None),
1288 b'+': (4, None, None, (b'add', 4), None),
1294 b'+': (4, None, None, (b'add', 4), None),
1289 b'-': (4, None, None, (b'sub', 4), None),
1295 b'-': (4, None, None, (b'sub', 4), None),
1290 b')': (0, None, None, None, None),
1296 b')': (0, None, None, None, None),
1291 b'symbol': (0, b'symbol', None, None, None),
1297 b'symbol': (0, b'symbol', None, None, None),
1292 b'end': (0, None, None, None, None),
1298 b'end': (0, None, None, None, None),
1293 }
1299 }
1294
1300
1295
1301
1296 def _tokenize(text):
1302 def _tokenize(text):
1297 view = memoryview(text) # zero-copy slice
1303 view = memoryview(text) # zero-copy slice
1298 special = b'():+-& '
1304 special = b'():+-& '
1299 pos = 0
1305 pos = 0
1300 length = len(text)
1306 length = len(text)
1301 while pos < length:
1307 while pos < length:
1302 symbol = b''.join(
1308 symbol = b''.join(
1303 itertools.takewhile(
1309 itertools.takewhile(
1304 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1310 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1305 )
1311 )
1306 )
1312 )
1307 if symbol:
1313 if symbol:
1308 yield (b'symbol', symbol, pos)
1314 yield (b'symbol', symbol, pos)
1309 pos += len(symbol)
1315 pos += len(symbol)
1310 else: # special char, ignore space
1316 else: # special char, ignore space
1311 if text[pos : pos + 1] != b' ':
1317 if text[pos : pos + 1] != b' ':
1312 yield (text[pos : pos + 1], None, pos)
1318 yield (text[pos : pos + 1], None, pos)
1313 pos += 1
1319 pos += 1
1314 yield (b'end', None, pos)
1320 yield (b'end', None, pos)
1315
1321
1316
1322
1317 def _parse(text):
1323 def _parse(text):
1318 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1324 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1319 if pos != len(text):
1325 if pos != len(text):
1320 raise error.ParseError(b'invalid token', pos)
1326 raise error.ParseError(b'invalid token', pos)
1321 return tree
1327 return tree
1322
1328
1323
1329
1324 def _parsedrev(symbol):
1330 def _parsedrev(symbol):
1325 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1331 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1326 if symbol.startswith(b'D') and symbol[1:].isdigit():
1332 if symbol.startswith(b'D') and symbol[1:].isdigit():
1327 return int(symbol[1:])
1333 return int(symbol[1:])
1328 if symbol.isdigit():
1334 if symbol.isdigit():
1329 return int(symbol)
1335 return int(symbol)
1330
1336
1331
1337
1332 def _prefetchdrevs(tree):
1338 def _prefetchdrevs(tree):
1333 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1339 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1334 drevs = set()
1340 drevs = set()
1335 ancestordrevs = set()
1341 ancestordrevs = set()
1336 op = tree[0]
1342 op = tree[0]
1337 if op == b'symbol':
1343 if op == b'symbol':
1338 r = _parsedrev(tree[1])
1344 r = _parsedrev(tree[1])
1339 if r:
1345 if r:
1340 drevs.add(r)
1346 drevs.add(r)
1341 elif op == b'ancestors':
1347 elif op == b'ancestors':
1342 r, a = _prefetchdrevs(tree[1])
1348 r, a = _prefetchdrevs(tree[1])
1343 drevs.update(r)
1349 drevs.update(r)
1344 ancestordrevs.update(r)
1350 ancestordrevs.update(r)
1345 ancestordrevs.update(a)
1351 ancestordrevs.update(a)
1346 else:
1352 else:
1347 for t in tree[1:]:
1353 for t in tree[1:]:
1348 r, a = _prefetchdrevs(t)
1354 r, a = _prefetchdrevs(t)
1349 drevs.update(r)
1355 drevs.update(r)
1350 ancestordrevs.update(a)
1356 ancestordrevs.update(a)
1351 return drevs, ancestordrevs
1357 return drevs, ancestordrevs
1352
1358
1353
1359
1354 def querydrev(repo, spec):
1360 def querydrev(repo, spec):
1355 """return a list of "Differential Revision" dicts
1361 """return a list of "Differential Revision" dicts
1356
1362
1357 spec is a string using a simple query language, see docstring in phabread
1363 spec is a string using a simple query language, see docstring in phabread
1358 for details.
1364 for details.
1359
1365
1360 A "Differential Revision dict" looks like:
1366 A "Differential Revision dict" looks like:
1361
1367
1362 {
1368 {
1363 "id": "2",
1369 "id": "2",
1364 "phid": "PHID-DREV-672qvysjcczopag46qty",
1370 "phid": "PHID-DREV-672qvysjcczopag46qty",
1365 "title": "example",
1371 "title": "example",
1366 "uri": "https://phab.example.com/D2",
1372 "uri": "https://phab.example.com/D2",
1367 "dateCreated": "1499181406",
1373 "dateCreated": "1499181406",
1368 "dateModified": "1499182103",
1374 "dateModified": "1499182103",
1369 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1375 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1370 "status": "0",
1376 "status": "0",
1371 "statusName": "Needs Review",
1377 "statusName": "Needs Review",
1372 "properties": [],
1378 "properties": [],
1373 "branch": null,
1379 "branch": null,
1374 "summary": "",
1380 "summary": "",
1375 "testPlan": "",
1381 "testPlan": "",
1376 "lineCount": "2",
1382 "lineCount": "2",
1377 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1383 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1378 "diffs": [
1384 "diffs": [
1379 "3",
1385 "3",
1380 "4",
1386 "4",
1381 ],
1387 ],
1382 "commits": [],
1388 "commits": [],
1383 "reviewers": [],
1389 "reviewers": [],
1384 "ccs": [],
1390 "ccs": [],
1385 "hashes": [],
1391 "hashes": [],
1386 "auxiliary": {
1392 "auxiliary": {
1387 "phabricator:projects": [],
1393 "phabricator:projects": [],
1388 "phabricator:depends-on": [
1394 "phabricator:depends-on": [
1389 "PHID-DREV-gbapp366kutjebt7agcd"
1395 "PHID-DREV-gbapp366kutjebt7agcd"
1390 ]
1396 ]
1391 },
1397 },
1392 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1398 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1393 "sourcePath": null
1399 "sourcePath": null
1394 }
1400 }
1395 """
1401 """
1396
1402
1397 def fetch(params):
1403 def fetch(params):
1398 """params -> single drev or None"""
1404 """params -> single drev or None"""
1399 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1405 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1400 if key in prefetched:
1406 if key in prefetched:
1401 return prefetched[key]
1407 return prefetched[key]
1402 drevs = callconduit(repo.ui, b'differential.query', params)
1408 drevs = callconduit(repo.ui, b'differential.query', params)
1403 # Fill prefetched with the result
1409 # Fill prefetched with the result
1404 for drev in drevs:
1410 for drev in drevs:
1405 prefetched[drev[b'phid']] = drev
1411 prefetched[drev[b'phid']] = drev
1406 prefetched[int(drev[b'id'])] = drev
1412 prefetched[int(drev[b'id'])] = drev
1407 if key not in prefetched:
1413 if key not in prefetched:
1408 raise error.Abort(
1414 raise error.Abort(
1409 _(b'cannot get Differential Revision %r') % params
1415 _(b'cannot get Differential Revision %r') % params
1410 )
1416 )
1411 return prefetched[key]
1417 return prefetched[key]
1412
1418
1413 def getstack(topdrevids):
1419 def getstack(topdrevids):
1414 """given a top, get a stack from the bottom, [id] -> [id]"""
1420 """given a top, get a stack from the bottom, [id] -> [id]"""
1415 visited = set()
1421 visited = set()
1416 result = []
1422 result = []
1417 queue = [{b'ids': [i]} for i in topdrevids]
1423 queue = [{b'ids': [i]} for i in topdrevids]
1418 while queue:
1424 while queue:
1419 params = queue.pop()
1425 params = queue.pop()
1420 drev = fetch(params)
1426 drev = fetch(params)
1421 if drev[b'id'] in visited:
1427 if drev[b'id'] in visited:
1422 continue
1428 continue
1423 visited.add(drev[b'id'])
1429 visited.add(drev[b'id'])
1424 result.append(int(drev[b'id']))
1430 result.append(int(drev[b'id']))
1425 auxiliary = drev.get(b'auxiliary', {})
1431 auxiliary = drev.get(b'auxiliary', {})
1426 depends = auxiliary.get(b'phabricator:depends-on', [])
1432 depends = auxiliary.get(b'phabricator:depends-on', [])
1427 for phid in depends:
1433 for phid in depends:
1428 queue.append({b'phids': [phid]})
1434 queue.append({b'phids': [phid]})
1429 result.reverse()
1435 result.reverse()
1430 return smartset.baseset(result)
1436 return smartset.baseset(result)
1431
1437
1432 # Initialize prefetch cache
1438 # Initialize prefetch cache
1433 prefetched = {} # {id or phid: drev}
1439 prefetched = {} # {id or phid: drev}
1434
1440
1435 tree = _parse(spec)
1441 tree = _parse(spec)
1436 drevs, ancestordrevs = _prefetchdrevs(tree)
1442 drevs, ancestordrevs = _prefetchdrevs(tree)
1437
1443
1438 # developer config: phabricator.batchsize
1444 # developer config: phabricator.batchsize
1439 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1445 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1440
1446
1441 # Prefetch Differential Revisions in batch
1447 # Prefetch Differential Revisions in batch
1442 tofetch = set(drevs)
1448 tofetch = set(drevs)
1443 for r in ancestordrevs:
1449 for r in ancestordrevs:
1444 tofetch.update(range(max(1, r - batchsize), r + 1))
1450 tofetch.update(range(max(1, r - batchsize), r + 1))
1445 if drevs:
1451 if drevs:
1446 fetch({b'ids': list(tofetch)})
1452 fetch({b'ids': list(tofetch)})
1447 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1453 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1448
1454
1449 # Walk through the tree, return smartsets
1455 # Walk through the tree, return smartsets
1450 def walk(tree):
1456 def walk(tree):
1451 op = tree[0]
1457 op = tree[0]
1452 if op == b'symbol':
1458 if op == b'symbol':
1453 drev = _parsedrev(tree[1])
1459 drev = _parsedrev(tree[1])
1454 if drev:
1460 if drev:
1455 return smartset.baseset([drev])
1461 return smartset.baseset([drev])
1456 elif tree[1] in _knownstatusnames:
1462 elif tree[1] in _knownstatusnames:
1457 drevs = [
1463 drevs = [
1458 r
1464 r
1459 for r in validids
1465 for r in validids
1460 if _getstatusname(prefetched[r]) == tree[1]
1466 if _getstatusname(prefetched[r]) == tree[1]
1461 ]
1467 ]
1462 return smartset.baseset(drevs)
1468 return smartset.baseset(drevs)
1463 else:
1469 else:
1464 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1470 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1465 elif op in {b'and_', b'add', b'sub'}:
1471 elif op in {b'and_', b'add', b'sub'}:
1466 assert len(tree) == 3
1472 assert len(tree) == 3
1467 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1473 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1468 elif op == b'group':
1474 elif op == b'group':
1469 return walk(tree[1])
1475 return walk(tree[1])
1470 elif op == b'ancestors':
1476 elif op == b'ancestors':
1471 return getstack(walk(tree[1]))
1477 return getstack(walk(tree[1]))
1472 else:
1478 else:
1473 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1479 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1474
1480
1475 return [prefetched[r] for r in walk(tree)]
1481 return [prefetched[r] for r in walk(tree)]
1476
1482
1477
1483
1478 def getdescfromdrev(drev):
1484 def getdescfromdrev(drev):
1479 """get description (commit message) from "Differential Revision"
1485 """get description (commit message) from "Differential Revision"
1480
1486
1481 This is similar to differential.getcommitmessage API. But we only care
1487 This is similar to differential.getcommitmessage API. But we only care
1482 about limited fields: title, summary, test plan, and URL.
1488 about limited fields: title, summary, test plan, and URL.
1483 """
1489 """
1484 title = drev[b'title']
1490 title = drev[b'title']
1485 summary = drev[b'summary'].rstrip()
1491 summary = drev[b'summary'].rstrip()
1486 testplan = drev[b'testPlan'].rstrip()
1492 testplan = drev[b'testPlan'].rstrip()
1487 if testplan:
1493 if testplan:
1488 testplan = b'Test Plan:\n%s' % testplan
1494 testplan = b'Test Plan:\n%s' % testplan
1489 uri = b'Differential Revision: %s' % drev[b'uri']
1495 uri = b'Differential Revision: %s' % drev[b'uri']
1490 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1496 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1491
1497
1492
1498
1493 def getdiffmeta(diff):
1499 def getdiffmeta(diff):
1494 """get commit metadata (date, node, user, p1) from a diff object
1500 """get commit metadata (date, node, user, p1) from a diff object
1495
1501
1496 The metadata could be "hg:meta", sent by phabsend, like:
1502 The metadata could be "hg:meta", sent by phabsend, like:
1497
1503
1498 "properties": {
1504 "properties": {
1499 "hg:meta": {
1505 "hg:meta": {
1500 "date": "1499571514 25200",
1506 "date": "1499571514 25200",
1501 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1507 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1502 "user": "Foo Bar <foo@example.com>",
1508 "user": "Foo Bar <foo@example.com>",
1503 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1509 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1504 }
1510 }
1505 }
1511 }
1506
1512
1507 Or converted from "local:commits", sent by "arc", like:
1513 Or converted from "local:commits", sent by "arc", like:
1508
1514
1509 "properties": {
1515 "properties": {
1510 "local:commits": {
1516 "local:commits": {
1511 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1517 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1512 "author": "Foo Bar",
1518 "author": "Foo Bar",
1513 "time": 1499546314,
1519 "time": 1499546314,
1514 "branch": "default",
1520 "branch": "default",
1515 "tag": "",
1521 "tag": "",
1516 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1522 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1517 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1523 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1518 "local": "1000",
1524 "local": "1000",
1519 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1525 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1520 "summary": "...",
1526 "summary": "...",
1521 "message": "...",
1527 "message": "...",
1522 "authorEmail": "foo@example.com"
1528 "authorEmail": "foo@example.com"
1523 }
1529 }
1524 }
1530 }
1525 }
1531 }
1526
1532
1527 Note: metadata extracted from "local:commits" will lose time zone
1533 Note: metadata extracted from "local:commits" will lose time zone
1528 information.
1534 information.
1529 """
1535 """
1530 props = diff.get(b'properties') or {}
1536 props = diff.get(b'properties') or {}
1531 meta = props.get(b'hg:meta')
1537 meta = props.get(b'hg:meta')
1532 if not meta:
1538 if not meta:
1533 if props.get(b'local:commits'):
1539 if props.get(b'local:commits'):
1534 commit = sorted(props[b'local:commits'].values())[0]
1540 commit = sorted(props[b'local:commits'].values())[0]
1535 meta = {}
1541 meta = {}
1536 if b'author' in commit and b'authorEmail' in commit:
1542 if b'author' in commit and b'authorEmail' in commit:
1537 meta[b'user'] = b'%s <%s>' % (
1543 meta[b'user'] = b'%s <%s>' % (
1538 commit[b'author'],
1544 commit[b'author'],
1539 commit[b'authorEmail'],
1545 commit[b'authorEmail'],
1540 )
1546 )
1541 if b'time' in commit:
1547 if b'time' in commit:
1542 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1548 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1543 if b'branch' in commit:
1549 if b'branch' in commit:
1544 meta[b'branch'] = commit[b'branch']
1550 meta[b'branch'] = commit[b'branch']
1545 node = commit.get(b'commit', commit.get(b'rev'))
1551 node = commit.get(b'commit', commit.get(b'rev'))
1546 if node:
1552 if node:
1547 meta[b'node'] = node
1553 meta[b'node'] = node
1548 if len(commit.get(b'parents', ())) >= 1:
1554 if len(commit.get(b'parents', ())) >= 1:
1549 meta[b'parent'] = commit[b'parents'][0]
1555 meta[b'parent'] = commit[b'parents'][0]
1550 else:
1556 else:
1551 meta = {}
1557 meta = {}
1552 if b'date' not in meta and b'dateCreated' in diff:
1558 if b'date' not in meta and b'dateCreated' in diff:
1553 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1559 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1554 if b'branch' not in meta and diff.get(b'branch'):
1560 if b'branch' not in meta and diff.get(b'branch'):
1555 meta[b'branch'] = diff[b'branch']
1561 meta[b'branch'] = diff[b'branch']
1556 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1562 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1557 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1563 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1558 return meta
1564 return meta
1559
1565
1560
1566
1561 def readpatch(repo, drevs, write):
1567 def readpatch(repo, drevs, write):
1562 """generate plain-text patch readable by 'hg import'
1568 """generate plain-text patch readable by 'hg import'
1563
1569
1564 write is usually ui.write. drevs is what "querydrev" returns, results of
1570 write is usually ui.write. drevs is what "querydrev" returns, results of
1565 "differential.query".
1571 "differential.query".
1566 """
1572 """
1567 # Prefetch hg:meta property for all diffs
1573 # Prefetch hg:meta property for all diffs
1568 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1574 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1569 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1575 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1570
1576
1571 # Generate patch for each drev
1577 # Generate patch for each drev
1572 for drev in drevs:
1578 for drev in drevs:
1573 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1579 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1574
1580
1575 diffid = max(int(v) for v in drev[b'diffs'])
1581 diffid = max(int(v) for v in drev[b'diffs'])
1576 body = callconduit(
1582 body = callconduit(
1577 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1583 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1578 )
1584 )
1579 desc = getdescfromdrev(drev)
1585 desc = getdescfromdrev(drev)
1580 header = b'# HG changeset patch\n'
1586 header = b'# HG changeset patch\n'
1581
1587
1582 # Try to preserve metadata from hg:meta property. Write hg patch
1588 # Try to preserve metadata from hg:meta property. Write hg patch
1583 # headers that can be read by the "import" command. See patchheadermap
1589 # headers that can be read by the "import" command. See patchheadermap
1584 # and extract in mercurial/patch.py for supported headers.
1590 # and extract in mercurial/patch.py for supported headers.
1585 meta = getdiffmeta(diffs[b'%d' % diffid])
1591 meta = getdiffmeta(diffs[b'%d' % diffid])
1586 for k in _metanamemap.keys():
1592 for k in _metanamemap.keys():
1587 if k in meta:
1593 if k in meta:
1588 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1594 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1589
1595
1590 content = b'%s%s\n%s' % (header, desc, body)
1596 content = b'%s%s\n%s' % (header, desc, body)
1591 write(content)
1597 write(content)
1592
1598
1593
1599
1594 @vcrcommand(
1600 @vcrcommand(
1595 b'phabread',
1601 b'phabread',
1596 [(b'', b'stack', False, _(b'read dependencies'))],
1602 [(b'', b'stack', False, _(b'read dependencies'))],
1597 _(b'DREVSPEC [OPTIONS]'),
1603 _(b'DREVSPEC [OPTIONS]'),
1598 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1599 )
1605 )
1600 def phabread(ui, repo, spec, **opts):
1606 def phabread(ui, repo, spec, **opts):
1601 """print patches from Phabricator suitable for importing
1607 """print patches from Phabricator suitable for importing
1602
1608
1603 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1609 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1604 the number ``123``. It could also have common operators like ``+``, ``-``,
1610 the number ``123``. It could also have common operators like ``+``, ``-``,
1605 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1611 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1606 select a stack.
1612 select a stack.
1607
1613
1608 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1614 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1609 could be used to filter patches by status. For performance reason, they
1615 could be used to filter patches by status. For performance reason, they
1610 only represent a subset of non-status selections and cannot be used alone.
1616 only represent a subset of non-status selections and cannot be used alone.
1611
1617
1612 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1618 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1613 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1619 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1614 stack up to D9.
1620 stack up to D9.
1615
1621
1616 If --stack is given, follow dependencies information and read all patches.
1622 If --stack is given, follow dependencies information and read all patches.
1617 It is equivalent to the ``:`` operator.
1623 It is equivalent to the ``:`` operator.
1618 """
1624 """
1619 opts = pycompat.byteskwargs(opts)
1625 opts = pycompat.byteskwargs(opts)
1620 if opts.get(b'stack'):
1626 if opts.get(b'stack'):
1621 spec = b':(%s)' % spec
1627 spec = b':(%s)' % spec
1622 drevs = querydrev(repo, spec)
1628 drevs = querydrev(repo, spec)
1623 readpatch(repo, drevs, ui.write)
1629 readpatch(repo, drevs, ui.write)
1624
1630
1625
1631
1626 @vcrcommand(
1632 @vcrcommand(
1627 b'phabupdate',
1633 b'phabupdate',
1628 [
1634 [
1629 (b'', b'accept', False, _(b'accept revisions')),
1635 (b'', b'accept', False, _(b'accept revisions')),
1630 (b'', b'reject', False, _(b'reject revisions')),
1636 (b'', b'reject', False, _(b'reject revisions')),
1631 (b'', b'abandon', False, _(b'abandon revisions')),
1637 (b'', b'abandon', False, _(b'abandon revisions')),
1632 (b'', b'reclaim', False, _(b'reclaim revisions')),
1638 (b'', b'reclaim', False, _(b'reclaim revisions')),
1633 (b'm', b'comment', b'', _(b'comment on the last revision')),
1639 (b'm', b'comment', b'', _(b'comment on the last revision')),
1634 ],
1640 ],
1635 _(b'DREVSPEC [OPTIONS]'),
1641 _(b'DREVSPEC [OPTIONS]'),
1636 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1642 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1637 )
1643 )
1638 def phabupdate(ui, repo, spec, **opts):
1644 def phabupdate(ui, repo, spec, **opts):
1639 """update Differential Revision in batch
1645 """update Differential Revision in batch
1640
1646
1641 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1647 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1642 """
1648 """
1643 opts = pycompat.byteskwargs(opts)
1649 opts = pycompat.byteskwargs(opts)
1644 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1650 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1645 if len(flags) > 1:
1651 if len(flags) > 1:
1646 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1652 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1647
1653
1648 actions = []
1654 actions = []
1649 for f in flags:
1655 for f in flags:
1650 actions.append({b'type': f, b'value': True})
1656 actions.append({b'type': f, b'value': True})
1651
1657
1652 drevs = querydrev(repo, spec)
1658 drevs = querydrev(repo, spec)
1653 for i, drev in enumerate(drevs):
1659 for i, drev in enumerate(drevs):
1654 if i + 1 == len(drevs) and opts.get(b'comment'):
1660 if i + 1 == len(drevs) and opts.get(b'comment'):
1655 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1661 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1656 if actions:
1662 if actions:
1657 params = {
1663 params = {
1658 b'objectIdentifier': drev[b'phid'],
1664 b'objectIdentifier': drev[b'phid'],
1659 b'transactions': actions,
1665 b'transactions': actions,
1660 }
1666 }
1661 callconduit(ui, b'differential.revision.edit', params)
1667 callconduit(ui, b'differential.revision.edit', params)
1662
1668
1663
1669
1664 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1670 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1665 def template_review(context, mapping):
1671 def template_review(context, mapping):
1666 """:phabreview: Object describing the review for this changeset.
1672 """:phabreview: Object describing the review for this changeset.
1667 Has attributes `url` and `id`.
1673 Has attributes `url` and `id`.
1668 """
1674 """
1669 ctx = context.resource(mapping, b'ctx')
1675 ctx = context.resource(mapping, b'ctx')
1670 m = _differentialrevisiondescre.search(ctx.description())
1676 m = _differentialrevisiondescre.search(ctx.description())
1671 if m:
1677 if m:
1672 return templateutil.hybriddict(
1678 return templateutil.hybriddict(
1673 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1679 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1674 )
1680 )
1675 else:
1681 else:
1676 tags = ctx.repo().nodetags(ctx.node())
1682 tags = ctx.repo().nodetags(ctx.node())
1677 for t in tags:
1683 for t in tags:
1678 if _differentialrevisiontagre.match(t):
1684 if _differentialrevisiontagre.match(t):
1679 url = ctx.repo().ui.config(b'phabricator', b'url')
1685 url = ctx.repo().ui.config(b'phabricator', b'url')
1680 if not url.endswith(b'/'):
1686 if not url.endswith(b'/'):
1681 url += b'/'
1687 url += b'/'
1682 url += t
1688 url += t
1683
1689
1684 return templateutil.hybriddict({b'url': url, b'id': t,})
1690 return templateutil.hybriddict({b'url': url, b'id': t,})
1685 return None
1691 return None
1686
1692
1687
1693
1688 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1694 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1689 def template_status(context, mapping):
1695 def template_status(context, mapping):
1690 """:phabstatus: String. Status of Phabricator differential.
1696 """:phabstatus: String. Status of Phabricator differential.
1691 """
1697 """
1692 ctx = context.resource(mapping, b'ctx')
1698 ctx = context.resource(mapping, b'ctx')
1693 repo = context.resource(mapping, b'repo')
1699 repo = context.resource(mapping, b'repo')
1694 ui = context.resource(mapping, b'ui')
1700 ui = context.resource(mapping, b'ui')
1695
1701
1696 rev = ctx.rev()
1702 rev = ctx.rev()
1697 try:
1703 try:
1698 drevid = getdrevmap(repo, [rev])[rev]
1704 drevid = getdrevmap(repo, [rev])[rev]
1699 except KeyError:
1705 except KeyError:
1700 return None
1706 return None
1701 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1707 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1702 for drev in drevs:
1708 for drev in drevs:
1703 if int(drev[b'id']) == drevid:
1709 if int(drev[b'id']) == drevid:
1704 return templateutil.hybriddict(
1710 return templateutil.hybriddict(
1705 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1711 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1706 )
1712 )
1707 return None
1713 return None
1708
1714
1709
1715
1710 @show.showview(b'phabstatus', csettopic=b'work')
1716 @show.showview(b'phabstatus', csettopic=b'work')
1711 def phabstatusshowview(ui, repo, displayer):
1717 def phabstatusshowview(ui, repo, displayer):
1712 """Phabricator differiential status"""
1718 """Phabricator differiential status"""
1713 revs = repo.revs('sort(_underway(), topo)')
1719 revs = repo.revs('sort(_underway(), topo)')
1714 drevmap = getdrevmap(repo, revs)
1720 drevmap = getdrevmap(repo, revs)
1715 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1721 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1716 for rev, drevid in pycompat.iteritems(drevmap):
1722 for rev, drevid in pycompat.iteritems(drevmap):
1717 if drevid is not None:
1723 if drevid is not None:
1718 drevids.add(drevid)
1724 drevids.add(drevid)
1719 revsbydrevid.setdefault(drevid, set([])).add(rev)
1725 revsbydrevid.setdefault(drevid, set([])).add(rev)
1720 else:
1726 else:
1721 unknownrevs.append(rev)
1727 unknownrevs.append(rev)
1722
1728
1723 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1729 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1724 drevsbyrev = {}
1730 drevsbyrev = {}
1725 for drev in drevs:
1731 for drev in drevs:
1726 for rev in revsbydrevid[int(drev[b'id'])]:
1732 for rev in revsbydrevid[int(drev[b'id'])]:
1727 drevsbyrev[rev] = drev
1733 drevsbyrev[rev] = drev
1728
1734
1729 def phabstatus(ctx):
1735 def phabstatus(ctx):
1730 drev = drevsbyrev[ctx.rev()]
1736 drev = drevsbyrev[ctx.rev()]
1731 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1737 status = ui.label(
1738 b'%(statusName)s' % drev,
1739 b'phabricator.status.%s' % _getstatusname(drev),
1740 )
1741 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1732
1742
1733 revs -= smartset.baseset(unknownrevs)
1743 revs -= smartset.baseset(unknownrevs)
1734 revdag = graphmod.dagwalker(repo, revs)
1744 revdag = graphmod.dagwalker(repo, revs)
1735
1745
1736 ui.setconfig(b'experimental', b'graphshorten', True)
1746 ui.setconfig(b'experimental', b'graphshorten', True)
1737 displayer._exthook = phabstatus
1747 displayer._exthook = phabstatus
1738 nodelen = show.longestshortest(repo, revs)
1748 nodelen = show.longestshortest(repo, revs)
1739 logcmdutil.displaygraph(
1749 logcmdutil.displaygraph(
1740 ui,
1750 ui,
1741 repo,
1751 repo,
1742 revdag,
1752 revdag,
1743 displayer,
1753 displayer,
1744 graphmod.asciiedges,
1754 graphmod.asciiedges,
1745 props={b'nodelen': nodelen},
1755 props={b'nodelen': nodelen},
1746 )
1756 )
General Comments 0
You need to be logged in to leave comments. Login now