##// END OF EJS Templates
phabricator: use the `http.timeout` config for conduit call...
marmoute -
r46584:4d70444c default
parent child Browse files
Show More
@@ -1,2368 +1,2377
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 # retry failed command N time (default 0). Useful when using the extension
39 39 # over flakly connection.
40 40 #
41 41 # We wait `retry.interval` between each retry, in seconds.
42 42 # (default 1 second).
43 43 retry = 3
44 44 retry.interval = 10
45 45
46 # the retry option can combine well with the http.timeout one.
47 #
48 # For example to give up on http request after 20 seconds:
49 [http]
50 timeout=20
51
46 52 [auth]
47 53 example.schemes = https
48 54 example.prefix = phab.example.com
49 55
50 56 # API token. Get it from https://$HOST/conduit/login/
51 57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
52 58 """
53 59
54 60 from __future__ import absolute_import
55 61
56 62 import base64
57 63 import contextlib
58 64 import hashlib
59 65 import itertools
60 66 import json
61 67 import mimetypes
62 68 import operator
63 69 import re
64 70 import time
65 71
66 72 from mercurial.node import bin, nullid, short
67 73 from mercurial.i18n import _
68 74 from mercurial.pycompat import getattr
69 75 from mercurial.thirdparty import attr
70 76 from mercurial import (
71 77 cmdutil,
72 78 context,
73 79 copies,
74 80 encoding,
75 81 error,
76 82 exthelper,
77 83 graphmod,
78 84 httpconnection as httpconnectionmod,
79 85 localrepo,
80 86 logcmdutil,
81 87 match,
82 88 mdiff,
83 89 obsutil,
84 90 parser,
85 91 patch,
86 92 phases,
87 93 pycompat,
88 94 rewriteutil,
89 95 scmutil,
90 96 smartset,
91 97 tags,
92 98 templatefilters,
93 99 templateutil,
94 100 url as urlmod,
95 101 util,
96 102 )
97 103 from mercurial.utils import (
98 104 procutil,
99 105 stringutil,
100 106 )
101 107 from . import show
102 108
103 109
104 110 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
105 111 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
106 112 # be specifying the version(s) of Mercurial they are tested with, or
107 113 # leave the attribute unspecified.
108 114 testedwith = b'ships-with-hg-core'
109 115
110 116 eh = exthelper.exthelper()
111 117
112 118 cmdtable = eh.cmdtable
113 119 command = eh.command
114 120 configtable = eh.configtable
115 121 templatekeyword = eh.templatekeyword
116 122 uisetup = eh.finaluisetup
117 123
118 124 # developer config: phabricator.batchsize
119 125 eh.configitem(
120 126 b'phabricator',
121 127 b'batchsize',
122 128 default=12,
123 129 )
124 130 eh.configitem(
125 131 b'phabricator',
126 132 b'callsign',
127 133 default=None,
128 134 )
129 135 eh.configitem(
130 136 b'phabricator',
131 137 b'curlcmd',
132 138 default=None,
133 139 )
134 140 # developer config: phabricator.debug
135 141 eh.configitem(
136 142 b'phabricator',
137 143 b'debug',
138 144 default=False,
139 145 )
140 146 # developer config: phabricator.repophid
141 147 eh.configitem(
142 148 b'phabricator',
143 149 b'repophid',
144 150 default=None,
145 151 )
146 152 eh.configitem(
147 153 b'phabricator',
148 154 b'retry',
149 155 default=0,
150 156 )
151 157 eh.configitem(
152 158 b'phabricator',
153 159 b'retry.interval',
154 160 default=1,
155 161 )
156 162 eh.configitem(
157 163 b'phabricator',
158 164 b'url',
159 165 default=None,
160 166 )
161 167 eh.configitem(
162 168 b'phabsend',
163 169 b'confirm',
164 170 default=False,
165 171 )
166 172 eh.configitem(
167 173 b'phabimport',
168 174 b'secret',
169 175 default=False,
170 176 )
171 177 eh.configitem(
172 178 b'phabimport',
173 179 b'obsolete',
174 180 default=False,
175 181 )
176 182
177 183 colortable = {
178 184 b'phabricator.action.created': b'green',
179 185 b'phabricator.action.skipped': b'magenta',
180 186 b'phabricator.action.updated': b'magenta',
181 187 b'phabricator.drev': b'bold',
182 188 b'phabricator.status.abandoned': b'magenta dim',
183 189 b'phabricator.status.accepted': b'green bold',
184 190 b'phabricator.status.closed': b'green',
185 191 b'phabricator.status.needsreview': b'yellow',
186 192 b'phabricator.status.needsrevision': b'red',
187 193 b'phabricator.status.changesplanned': b'red',
188 194 }
189 195
190 196 _VCR_FLAGS = [
191 197 (
192 198 b'',
193 199 b'test-vcr',
194 200 b'',
195 201 _(
196 202 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
197 203 b', otherwise will mock all http requests using the specified vcr file.'
198 204 b' (ADVANCED)'
199 205 ),
200 206 ),
201 207 ]
202 208
203 209
204 210 @eh.wrapfunction(localrepo, "loadhgrc")
205 211 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
206 212 """Load ``.arcconfig`` content into a ui instance on repository open."""
207 213 result = False
208 214 arcconfig = {}
209 215
210 216 try:
211 217 # json.loads only accepts bytes from 3.6+
212 218 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
213 219 # json.loads only returns unicode strings
214 220 arcconfig = pycompat.rapply(
215 221 lambda x: encoding.unitolocal(x)
216 222 if isinstance(x, pycompat.unicode)
217 223 else x,
218 224 pycompat.json_loads(rawparams),
219 225 )
220 226
221 227 result = True
222 228 except ValueError:
223 229 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
224 230 except IOError:
225 231 pass
226 232
227 233 cfg = util.sortdict()
228 234
229 235 if b"repository.callsign" in arcconfig:
230 236 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
231 237
232 238 if b"phabricator.uri" in arcconfig:
233 239 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
234 240
235 241 if cfg:
236 242 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
237 243
238 244 return (
239 245 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
240 246 ) # Load .hg/hgrc
241 247
242 248
243 249 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
244 250 fullflags = flags + _VCR_FLAGS
245 251
246 252 def hgmatcher(r1, r2):
247 253 if r1.uri != r2.uri or r1.method != r2.method:
248 254 return False
249 255 r1params = util.urlreq.parseqs(r1.body)
250 256 r2params = util.urlreq.parseqs(r2.body)
251 257 for key in r1params:
252 258 if key not in r2params:
253 259 return False
254 260 value = r1params[key][0]
255 261 # we want to compare json payloads without worrying about ordering
256 262 if value.startswith(b'{') and value.endswith(b'}'):
257 263 r1json = pycompat.json_loads(value)
258 264 r2json = pycompat.json_loads(r2params[key][0])
259 265 if r1json != r2json:
260 266 return False
261 267 elif r2params[key][0] != value:
262 268 return False
263 269 return True
264 270
265 271 def sanitiserequest(request):
266 272 request.body = re.sub(
267 273 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
268 274 )
269 275 return request
270 276
271 277 def sanitiseresponse(response):
272 278 if 'set-cookie' in response['headers']:
273 279 del response['headers']['set-cookie']
274 280 return response
275 281
276 282 def decorate(fn):
277 283 def inner(*args, **kwargs):
278 284 vcr = kwargs.pop('test_vcr')
279 285 if vcr:
280 286 cassette = pycompat.fsdecode(vcr)
281 287 import hgdemandimport
282 288
283 289 with hgdemandimport.deactivated():
284 290 import vcr as vcrmod
285 291 import vcr.stubs as stubs
286 292
287 293 vcr = vcrmod.VCR(
288 294 serializer='json',
289 295 before_record_request=sanitiserequest,
290 296 before_record_response=sanitiseresponse,
291 297 custom_patches=[
292 298 (
293 299 urlmod,
294 300 'httpconnection',
295 301 stubs.VCRHTTPConnection,
296 302 ),
297 303 (
298 304 urlmod,
299 305 'httpsconnection',
300 306 stubs.VCRHTTPSConnection,
301 307 ),
302 308 ],
303 309 )
304 310 vcr.register_matcher('hgmatcher', hgmatcher)
305 311 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
306 312 return fn(*args, **kwargs)
307 313 return fn(*args, **kwargs)
308 314
309 315 cmd = util.checksignature(inner, depth=2)
310 316 cmd.__name__ = fn.__name__
311 317 cmd.__doc__ = fn.__doc__
312 318
313 319 return command(
314 320 name,
315 321 fullflags,
316 322 spec,
317 323 helpcategory=helpcategory,
318 324 optionalrepo=optionalrepo,
319 325 )(cmd)
320 326
321 327 return decorate
322 328
323 329
324 330 def _debug(ui, *msg, **opts):
325 331 """write debug output for Phabricator if ``phabricator.debug`` is set
326 332
327 333 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
328 334 printed with the --debug argument.
329 335 """
330 336 if ui.configbool(b"phabricator", b"debug"):
331 337 flag = ui.debugflag
332 338 try:
333 339 ui.debugflag = True
334 340 ui.write(*msg, **opts)
335 341 finally:
336 342 ui.debugflag = flag
337 343
338 344
339 345 def urlencodenested(params):
340 346 """like urlencode, but works with nested parameters.
341 347
342 348 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
343 349 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
344 350 urlencode. Note: the encoding is consistent with PHP's http_build_query.
345 351 """
346 352 flatparams = util.sortdict()
347 353
348 354 def process(prefix, obj):
349 355 if isinstance(obj, bool):
350 356 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
351 357 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
352 358 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
353 359 if items is None:
354 360 flatparams[prefix] = obj
355 361 else:
356 362 for k, v in items(obj):
357 363 if prefix:
358 364 process(b'%s[%s]' % (prefix, k), v)
359 365 else:
360 366 process(k, v)
361 367
362 368 process(b'', params)
363 369 return util.urlreq.urlencode(flatparams)
364 370
365 371
366 372 def readurltoken(ui):
367 373 """return conduit url, token and make sure they exist
368 374
369 375 Currently read from [auth] config section. In the future, it might
370 376 make sense to read from .arcconfig and .arcrc as well.
371 377 """
372 378 url = ui.config(b'phabricator', b'url')
373 379 if not url:
374 380 raise error.Abort(
375 381 _(b'config %s.%s is required') % (b'phabricator', b'url')
376 382 )
377 383
378 384 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
379 385 token = None
380 386
381 387 if res:
382 388 group, auth = res
383 389
384 390 ui.debug(b"using auth.%s.* for authentication\n" % group)
385 391
386 392 token = auth.get(b'phabtoken')
387 393
388 394 if not token:
389 395 raise error.Abort(
390 396 _(b'Can\'t find conduit token associated to %s') % (url,)
391 397 )
392 398
393 399 return url, token
394 400
395 401
396 402 def callconduit(ui, name, params):
397 403 """call Conduit API, params is a dict. return json.loads result, or None"""
398 404 host, token = readurltoken(ui)
399 405 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
400 406 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
401 407 params = params.copy()
402 408 params[b'__conduit__'] = {
403 409 b'token': token,
404 410 }
405 411 rawdata = {
406 412 b'params': templatefilters.json(params),
407 413 b'output': b'json',
408 414 b'__conduit__': 1,
409 415 }
410 416 data = urlencodenested(rawdata)
411 417 curlcmd = ui.config(b'phabricator', b'curlcmd')
412 418 if curlcmd:
413 419 sin, sout = procutil.popen2(
414 420 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
415 421 )
416 422 sin.write(data)
417 423 sin.close()
418 424 body = sout.read()
419 425 else:
420 426 urlopener = urlmod.opener(ui, authinfo)
421 427 request = util.urlreq.request(pycompat.strurl(url), data=data)
422 428 max_try = ui.configint(b'phabricator', b'retry') + 1
429 timeout = ui.configwith(float, b'http', b'timeout')
423 430 for try_count in range(max_try):
424 431 try:
425 with contextlib.closing(urlopener.open(request)) as rsp:
432 with contextlib.closing(
433 urlopener.open(request, timeout=timeout)
434 ) as rsp:
426 435 body = rsp.read()
427 436 break
428 437 except util.urlerr.urlerror as err:
429 438 if try_count == max_try - 1:
430 439 raise
431 440 ui.debug(
432 441 b'Conduit Request failed (try %d/%d): %r\n'
433 442 % (try_count + 1, max_try, err)
434 443 )
435 444 # failing request might come from overloaded server
436 445 retry_interval = ui.configint(b'phabricator', b'retry.interval')
437 446 time.sleep(retry_interval)
438 447 ui.debug(b'Conduit Response: %s\n' % body)
439 448 parsed = pycompat.rapply(
440 449 lambda x: encoding.unitolocal(x)
441 450 if isinstance(x, pycompat.unicode)
442 451 else x,
443 452 # json.loads only accepts bytes from py3.6+
444 453 pycompat.json_loads(encoding.unifromlocal(body)),
445 454 )
446 455 if parsed.get(b'error_code'):
447 456 msg = _(b'Conduit Error (%s): %s') % (
448 457 parsed[b'error_code'],
449 458 parsed[b'error_info'],
450 459 )
451 460 raise error.Abort(msg)
452 461 return parsed[b'result']
453 462
454 463
455 464 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
456 465 def debugcallconduit(ui, repo, name):
457 466 """call Conduit API
458 467
459 468 Call parameters are read from stdin as a JSON blob. Result will be written
460 469 to stdout as a JSON blob.
461 470 """
462 471 # json.loads only accepts bytes from 3.6+
463 472 rawparams = encoding.unifromlocal(ui.fin.read())
464 473 # json.loads only returns unicode strings
465 474 params = pycompat.rapply(
466 475 lambda x: encoding.unitolocal(x)
467 476 if isinstance(x, pycompat.unicode)
468 477 else x,
469 478 pycompat.json_loads(rawparams),
470 479 )
471 480 # json.dumps only accepts unicode strings
472 481 result = pycompat.rapply(
473 482 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
474 483 callconduit(ui, name, params),
475 484 )
476 485 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
477 486 ui.write(b'%s\n' % encoding.unitolocal(s))
478 487
479 488
480 489 def getrepophid(repo):
481 490 """given callsign, return repository PHID or None"""
482 491 # developer config: phabricator.repophid
483 492 repophid = repo.ui.config(b'phabricator', b'repophid')
484 493 if repophid:
485 494 return repophid
486 495 callsign = repo.ui.config(b'phabricator', b'callsign')
487 496 if not callsign:
488 497 return None
489 498 query = callconduit(
490 499 repo.ui,
491 500 b'diffusion.repository.search',
492 501 {b'constraints': {b'callsigns': [callsign]}},
493 502 )
494 503 if len(query[b'data']) == 0:
495 504 return None
496 505 repophid = query[b'data'][0][b'phid']
497 506 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
498 507 return repophid
499 508
500 509
501 510 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
502 511 _differentialrevisiondescre = re.compile(
503 512 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
504 513 )
505 514
506 515
507 516 def getoldnodedrevmap(repo, nodelist):
508 517 """find previous nodes that has been sent to Phabricator
509 518
510 519 return {node: (oldnode, Differential diff, Differential Revision ID)}
511 520 for node in nodelist with known previous sent versions, or associated
512 521 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
513 522 be ``None``.
514 523
515 524 Examines commit messages like "Differential Revision:" to get the
516 525 association information.
517 526
518 527 If such commit message line is not found, examines all precursors and their
519 528 tags. Tags with format like "D1234" are considered a match and the node
520 529 with that tag, and the number after "D" (ex. 1234) will be returned.
521 530
522 531 The ``old node``, if not None, is guaranteed to be the last diff of
523 532 corresponding Differential Revision, and exist in the repo.
524 533 """
525 534 unfi = repo.unfiltered()
526 535 has_node = unfi.changelog.index.has_node
527 536
528 537 result = {} # {node: (oldnode?, lastdiff?, drev)}
529 538 # ordered for test stability when printing new -> old mapping below
530 539 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
531 540 for node in nodelist:
532 541 ctx = unfi[node]
533 542 # For tags like "D123", put them into "toconfirm" to verify later
534 543 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
535 544 for n in precnodes:
536 545 if has_node(n):
537 546 for tag in unfi.nodetags(n):
538 547 m = _differentialrevisiontagre.match(tag)
539 548 if m:
540 549 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
541 550 break
542 551 else:
543 552 continue # move to next predecessor
544 553 break # found a tag, stop
545 554 else:
546 555 # Check commit message
547 556 m = _differentialrevisiondescre.search(ctx.description())
548 557 if m:
549 558 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
550 559
551 560 # Double check if tags are genuine by collecting all old nodes from
552 561 # Phabricator, and expect precursors overlap with it.
553 562 if toconfirm:
554 563 drevs = [drev for force, precs, drev in toconfirm.values()]
555 564 alldiffs = callconduit(
556 565 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
557 566 )
558 567
559 568 def getnodes(d, precset):
560 569 # Ignore other nodes that were combined into the Differential
561 570 # that aren't predecessors of the current local node.
562 571 return [n for n in getlocalcommits(d) if n in precset]
563 572
564 573 for newnode, (force, precset, drev) in toconfirm.items():
565 574 diffs = [
566 575 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
567 576 ]
568 577
569 578 # local predecessors known by Phabricator
570 579 phprecset = {n for d in diffs for n in getnodes(d, precset)}
571 580
572 581 # Ignore if precursors (Phabricator and local repo) do not overlap,
573 582 # and force is not set (when commit message says nothing)
574 583 if not force and not phprecset:
575 584 tagname = b'D%d' % drev
576 585 tags.tag(
577 586 repo,
578 587 tagname,
579 588 nullid,
580 589 message=None,
581 590 user=None,
582 591 date=None,
583 592 local=True,
584 593 )
585 594 unfi.ui.warn(
586 595 _(
587 596 b'D%d: local tag removed - does not match '
588 597 b'Differential history\n'
589 598 )
590 599 % drev
591 600 )
592 601 continue
593 602
594 603 # Find the last node using Phabricator metadata, and make sure it
595 604 # exists in the repo
596 605 oldnode = lastdiff = None
597 606 if diffs:
598 607 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
599 608 oldnodes = getnodes(lastdiff, precset)
600 609
601 610 _debug(
602 611 unfi.ui,
603 612 b"%s mapped to old nodes %s\n"
604 613 % (
605 614 short(newnode),
606 615 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
607 616 ),
608 617 )
609 618
610 619 # If this commit was the result of `hg fold` after submission,
611 620 # and now resubmitted with --fold, the easiest thing to do is
612 621 # to leave the node clear. This only results in creating a new
613 622 # diff for the _same_ Differential Revision if this commit is
614 623 # the first or last in the selected range. If we picked a node
615 624 # from the list instead, it would have to be the lowest if at
616 625 # the beginning of the --fold range, or the highest at the end.
617 626 # Otherwise, one or more of the nodes wouldn't be considered in
618 627 # the diff, and the Differential wouldn't be properly updated.
619 628 # If this commit is the result of `hg split` in the same
620 629 # scenario, there is a single oldnode here (and multiple
621 630 # newnodes mapped to it). That makes it the same as the normal
622 631 # case, as the edges of the newnode range cleanly maps to one
623 632 # oldnode each.
624 633 if len(oldnodes) == 1:
625 634 oldnode = oldnodes[0]
626 635 if oldnode and not has_node(oldnode):
627 636 oldnode = None
628 637
629 638 result[newnode] = (oldnode, lastdiff, drev)
630 639
631 640 return result
632 641
633 642
634 643 def getdrevmap(repo, revs):
635 644 """Return a dict mapping each rev in `revs` to their Differential Revision
636 645 ID or None.
637 646 """
638 647 result = {}
639 648 for rev in revs:
640 649 result[rev] = None
641 650 ctx = repo[rev]
642 651 # Check commit message
643 652 m = _differentialrevisiondescre.search(ctx.description())
644 653 if m:
645 654 result[rev] = int(m.group('id'))
646 655 continue
647 656 # Check tags
648 657 for tag in repo.nodetags(ctx.node()):
649 658 m = _differentialrevisiontagre.match(tag)
650 659 if m:
651 660 result[rev] = int(m.group(1))
652 661 break
653 662
654 663 return result
655 664
656 665
657 666 def getdiff(basectx, ctx, diffopts):
658 667 """plain-text diff without header (user, commit message, etc)"""
659 668 output = util.stringio()
660 669 for chunk, _label in patch.diffui(
661 670 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
662 671 ):
663 672 output.write(chunk)
664 673 return output.getvalue()
665 674
666 675
667 676 class DiffChangeType(object):
668 677 ADD = 1
669 678 CHANGE = 2
670 679 DELETE = 3
671 680 MOVE_AWAY = 4
672 681 COPY_AWAY = 5
673 682 MOVE_HERE = 6
674 683 COPY_HERE = 7
675 684 MULTICOPY = 8
676 685
677 686
678 687 class DiffFileType(object):
679 688 TEXT = 1
680 689 IMAGE = 2
681 690 BINARY = 3
682 691
683 692
684 693 @attr.s
685 694 class phabhunk(dict):
686 695 """Represents a Differential hunk, which is owned by a Differential change"""
687 696
688 697 oldOffset = attr.ib(default=0) # camelcase-required
689 698 oldLength = attr.ib(default=0) # camelcase-required
690 699 newOffset = attr.ib(default=0) # camelcase-required
691 700 newLength = attr.ib(default=0) # camelcase-required
692 701 corpus = attr.ib(default='')
693 702 # These get added to the phabchange's equivalents
694 703 addLines = attr.ib(default=0) # camelcase-required
695 704 delLines = attr.ib(default=0) # camelcase-required
696 705
697 706
698 707 @attr.s
699 708 class phabchange(object):
700 709 """Represents a Differential change, owns Differential hunks and owned by a
701 710 Differential diff. Each one represents one file in a diff.
702 711 """
703 712
704 713 currentPath = attr.ib(default=None) # camelcase-required
705 714 oldPath = attr.ib(default=None) # camelcase-required
706 715 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
707 716 metadata = attr.ib(default=attr.Factory(dict))
708 717 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
709 718 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
710 719 type = attr.ib(default=DiffChangeType.CHANGE)
711 720 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
712 721 commitHash = attr.ib(default=None) # camelcase-required
713 722 addLines = attr.ib(default=0) # camelcase-required
714 723 delLines = attr.ib(default=0) # camelcase-required
715 724 hunks = attr.ib(default=attr.Factory(list))
716 725
717 726 def copynewmetadatatoold(self):
718 727 for key in list(self.metadata.keys()):
719 728 newkey = key.replace(b'new:', b'old:')
720 729 self.metadata[newkey] = self.metadata[key]
721 730
722 731 def addoldmode(self, value):
723 732 self.oldProperties[b'unix:filemode'] = value
724 733
725 734 def addnewmode(self, value):
726 735 self.newProperties[b'unix:filemode'] = value
727 736
728 737 def addhunk(self, hunk):
729 738 if not isinstance(hunk, phabhunk):
730 739 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
731 740 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
732 741 # It's useful to include these stats since the Phab web UI shows them,
733 742 # and uses them to estimate how large a change a Revision is. Also used
734 743 # in email subjects for the [+++--] bit.
735 744 self.addLines += hunk.addLines
736 745 self.delLines += hunk.delLines
737 746
738 747
739 748 @attr.s
740 749 class phabdiff(object):
741 750 """Represents a Differential diff, owns Differential changes. Corresponds
742 751 to a commit.
743 752 """
744 753
745 754 # Doesn't seem to be any reason to send this (output of uname -n)
746 755 sourceMachine = attr.ib(default=b'') # camelcase-required
747 756 sourcePath = attr.ib(default=b'/') # camelcase-required
748 757 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
749 758 sourceControlPath = attr.ib(default=b'/') # camelcase-required
750 759 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
751 760 branch = attr.ib(default=b'default')
752 761 bookmark = attr.ib(default=None)
753 762 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
754 763 lintStatus = attr.ib(default=b'none') # camelcase-required
755 764 unitStatus = attr.ib(default=b'none') # camelcase-required
756 765 changes = attr.ib(default=attr.Factory(dict))
757 766 repositoryPHID = attr.ib(default=None) # camelcase-required
758 767
759 768 def addchange(self, change):
760 769 if not isinstance(change, phabchange):
761 770 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
762 771 self.changes[change.currentPath] = pycompat.byteskwargs(
763 772 attr.asdict(change)
764 773 )
765 774
766 775
767 776 def maketext(pchange, basectx, ctx, fname):
768 777 """populate the phabchange for a text file"""
769 778 repo = ctx.repo()
770 779 fmatcher = match.exact([fname])
771 780 diffopts = mdiff.diffopts(git=True, context=32767)
772 781 _pfctx, _fctx, header, fhunks = next(
773 782 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
774 783 )
775 784
776 785 for fhunk in fhunks:
777 786 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
778 787 corpus = b''.join(lines[1:])
779 788 shunk = list(header)
780 789 shunk.extend(lines)
781 790 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
782 791 patch.diffstatdata(util.iterlines(shunk))
783 792 )
784 793 pchange.addhunk(
785 794 phabhunk(
786 795 oldOffset,
787 796 oldLength,
788 797 newOffset,
789 798 newLength,
790 799 corpus,
791 800 addLines,
792 801 delLines,
793 802 )
794 803 )
795 804
796 805
797 806 def uploadchunks(fctx, fphid):
798 807 """upload large binary files as separate chunks.
799 808 Phab requests chunking over 8MiB, and splits into 4MiB chunks
800 809 """
801 810 ui = fctx.repo().ui
802 811 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
803 812 with ui.makeprogress(
804 813 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
805 814 ) as progress:
806 815 for chunk in chunks:
807 816 progress.increment()
808 817 if chunk[b'complete']:
809 818 continue
810 819 bstart = int(chunk[b'byteStart'])
811 820 bend = int(chunk[b'byteEnd'])
812 821 callconduit(
813 822 ui,
814 823 b'file.uploadchunk',
815 824 {
816 825 b'filePHID': fphid,
817 826 b'byteStart': bstart,
818 827 b'data': base64.b64encode(fctx.data()[bstart:bend]),
819 828 b'dataEncoding': b'base64',
820 829 },
821 830 )
822 831
823 832
824 833 def uploadfile(fctx):
825 834 """upload binary files to Phabricator"""
826 835 repo = fctx.repo()
827 836 ui = repo.ui
828 837 fname = fctx.path()
829 838 size = fctx.size()
830 839 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
831 840
832 841 # an allocate call is required first to see if an upload is even required
833 842 # (Phab might already have it) and to determine if chunking is needed
834 843 allocateparams = {
835 844 b'name': fname,
836 845 b'contentLength': size,
837 846 b'contentHash': fhash,
838 847 }
839 848 filealloc = callconduit(ui, b'file.allocate', allocateparams)
840 849 fphid = filealloc[b'filePHID']
841 850
842 851 if filealloc[b'upload']:
843 852 ui.write(_(b'uploading %s\n') % bytes(fctx))
844 853 if not fphid:
845 854 uploadparams = {
846 855 b'name': fname,
847 856 b'data_base64': base64.b64encode(fctx.data()),
848 857 }
849 858 fphid = callconduit(ui, b'file.upload', uploadparams)
850 859 else:
851 860 uploadchunks(fctx, fphid)
852 861 else:
853 862 ui.debug(b'server already has %s\n' % bytes(fctx))
854 863
855 864 if not fphid:
856 865 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
857 866
858 867 return fphid
859 868
860 869
861 870 def addoldbinary(pchange, oldfctx, fctx):
862 871 """add the metadata for the previous version of a binary file to the
863 872 phabchange for the new version
864 873
865 874 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
866 875 version of the file, or None if the file is being removed.
867 876 """
868 877 if not fctx or fctx.cmp(oldfctx):
869 878 # Files differ, add the old one
870 879 pchange.metadata[b'old:file:size'] = oldfctx.size()
871 880 mimeguess, _enc = mimetypes.guess_type(
872 881 encoding.unifromlocal(oldfctx.path())
873 882 )
874 883 if mimeguess:
875 884 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
876 885 mimeguess
877 886 )
878 887 fphid = uploadfile(oldfctx)
879 888 pchange.metadata[b'old:binary-phid'] = fphid
880 889 else:
881 890 # If it's left as IMAGE/BINARY web UI might try to display it
882 891 pchange.fileType = DiffFileType.TEXT
883 892 pchange.copynewmetadatatoold()
884 893
885 894
886 895 def makebinary(pchange, fctx):
887 896 """populate the phabchange for a binary file"""
888 897 pchange.fileType = DiffFileType.BINARY
889 898 fphid = uploadfile(fctx)
890 899 pchange.metadata[b'new:binary-phid'] = fphid
891 900 pchange.metadata[b'new:file:size'] = fctx.size()
892 901 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
893 902 if mimeguess:
894 903 mimeguess = pycompat.bytestr(mimeguess)
895 904 pchange.metadata[b'new:file:mime-type'] = mimeguess
896 905 if mimeguess.startswith(b'image/'):
897 906 pchange.fileType = DiffFileType.IMAGE
898 907
899 908
900 909 # Copied from mercurial/patch.py
901 910 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
902 911
903 912
904 913 def notutf8(fctx):
905 914 """detect non-UTF-8 text files since Phabricator requires them to be marked
906 915 as binary
907 916 """
908 917 try:
909 918 fctx.data().decode('utf-8')
910 919 return False
911 920 except UnicodeDecodeError:
912 921 fctx.repo().ui.write(
913 922 _(b'file %s detected as non-UTF-8, marked as binary\n')
914 923 % fctx.path()
915 924 )
916 925 return True
917 926
918 927
919 928 def addremoved(pdiff, basectx, ctx, removed):
920 929 """add removed files to the phabdiff. Shouldn't include moves"""
921 930 for fname in removed:
922 931 pchange = phabchange(
923 932 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
924 933 )
925 934 oldfctx = basectx.p1()[fname]
926 935 pchange.addoldmode(gitmode[oldfctx.flags()])
927 936 if not (oldfctx.isbinary() or notutf8(oldfctx)):
928 937 maketext(pchange, basectx, ctx, fname)
929 938
930 939 pdiff.addchange(pchange)
931 940
932 941
933 942 def addmodified(pdiff, basectx, ctx, modified):
934 943 """add modified files to the phabdiff"""
935 944 for fname in modified:
936 945 fctx = ctx[fname]
937 946 oldfctx = basectx.p1()[fname]
938 947 pchange = phabchange(currentPath=fname, oldPath=fname)
939 948 filemode = gitmode[fctx.flags()]
940 949 originalmode = gitmode[oldfctx.flags()]
941 950 if filemode != originalmode:
942 951 pchange.addoldmode(originalmode)
943 952 pchange.addnewmode(filemode)
944 953
945 954 if (
946 955 fctx.isbinary()
947 956 or notutf8(fctx)
948 957 or oldfctx.isbinary()
949 958 or notutf8(oldfctx)
950 959 ):
951 960 makebinary(pchange, fctx)
952 961 addoldbinary(pchange, oldfctx, fctx)
953 962 else:
954 963 maketext(pchange, basectx, ctx, fname)
955 964
956 965 pdiff.addchange(pchange)
957 966
958 967
959 968 def addadded(pdiff, basectx, ctx, added, removed):
960 969 """add file adds to the phabdiff, both new files and copies/moves"""
961 970 # Keep track of files that've been recorded as moved/copied, so if there are
962 971 # additional copies we can mark them (moves get removed from removed)
963 972 copiedchanges = {}
964 973 movedchanges = {}
965 974
966 975 copy = {}
967 976 if basectx != ctx:
968 977 copy = copies.pathcopies(basectx.p1(), ctx)
969 978
970 979 for fname in added:
971 980 fctx = ctx[fname]
972 981 oldfctx = None
973 982 pchange = phabchange(currentPath=fname)
974 983
975 984 filemode = gitmode[fctx.flags()]
976 985
977 986 if copy:
978 987 originalfname = copy.get(fname, fname)
979 988 else:
980 989 originalfname = fname
981 990 if fctx.renamed():
982 991 originalfname = fctx.renamed()[0]
983 992
984 993 renamed = fname != originalfname
985 994
986 995 if renamed:
987 996 oldfctx = basectx.p1()[originalfname]
988 997 originalmode = gitmode[oldfctx.flags()]
989 998 pchange.oldPath = originalfname
990 999
991 1000 if originalfname in removed:
992 1001 origpchange = phabchange(
993 1002 currentPath=originalfname,
994 1003 oldPath=originalfname,
995 1004 type=DiffChangeType.MOVE_AWAY,
996 1005 awayPaths=[fname],
997 1006 )
998 1007 movedchanges[originalfname] = origpchange
999 1008 removed.remove(originalfname)
1000 1009 pchange.type = DiffChangeType.MOVE_HERE
1001 1010 elif originalfname in movedchanges:
1002 1011 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1003 1012 movedchanges[originalfname].awayPaths.append(fname)
1004 1013 pchange.type = DiffChangeType.COPY_HERE
1005 1014 else: # pure copy
1006 1015 if originalfname not in copiedchanges:
1007 1016 origpchange = phabchange(
1008 1017 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1009 1018 )
1010 1019 copiedchanges[originalfname] = origpchange
1011 1020 else:
1012 1021 origpchange = copiedchanges[originalfname]
1013 1022 origpchange.awayPaths.append(fname)
1014 1023 pchange.type = DiffChangeType.COPY_HERE
1015 1024
1016 1025 if filemode != originalmode:
1017 1026 pchange.addoldmode(originalmode)
1018 1027 pchange.addnewmode(filemode)
1019 1028 else: # Brand-new file
1020 1029 pchange.addnewmode(gitmode[fctx.flags()])
1021 1030 pchange.type = DiffChangeType.ADD
1022 1031
1023 1032 if (
1024 1033 fctx.isbinary()
1025 1034 or notutf8(fctx)
1026 1035 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1027 1036 ):
1028 1037 makebinary(pchange, fctx)
1029 1038 if renamed:
1030 1039 addoldbinary(pchange, oldfctx, fctx)
1031 1040 else:
1032 1041 maketext(pchange, basectx, ctx, fname)
1033 1042
1034 1043 pdiff.addchange(pchange)
1035 1044
1036 1045 for _path, copiedchange in copiedchanges.items():
1037 1046 pdiff.addchange(copiedchange)
1038 1047 for _path, movedchange in movedchanges.items():
1039 1048 pdiff.addchange(movedchange)
1040 1049
1041 1050
1042 1051 def creatediff(basectx, ctx):
1043 1052 """create a Differential Diff"""
1044 1053 repo = ctx.repo()
1045 1054 repophid = getrepophid(repo)
1046 1055 # Create a "Differential Diff" via "differential.creatediff" API
1047 1056 pdiff = phabdiff(
1048 1057 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1049 1058 branch=b'%s' % ctx.branch(),
1050 1059 )
1051 1060 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1052 1061 # addadded will remove moved files from removed, so addremoved won't get
1053 1062 # them
1054 1063 addadded(pdiff, basectx, ctx, added, removed)
1055 1064 addmodified(pdiff, basectx, ctx, modified)
1056 1065 addremoved(pdiff, basectx, ctx, removed)
1057 1066 if repophid:
1058 1067 pdiff.repositoryPHID = repophid
1059 1068 diff = callconduit(
1060 1069 repo.ui,
1061 1070 b'differential.creatediff',
1062 1071 pycompat.byteskwargs(attr.asdict(pdiff)),
1063 1072 )
1064 1073 if not diff:
1065 1074 if basectx != ctx:
1066 1075 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1067 1076 else:
1068 1077 msg = _(b'cannot create diff for %s') % ctx
1069 1078 raise error.Abort(msg)
1070 1079 return diff
1071 1080
1072 1081
1073 1082 def writediffproperties(ctxs, diff):
1074 1083 """write metadata to diff so patches could be applied losslessly
1075 1084
1076 1085 ``ctxs`` is the list of commits that created the diff, in ascending order.
1077 1086 The list is generally a single commit, but may be several when using
1078 1087 ``phabsend --fold``.
1079 1088 """
1080 1089 # creatediff returns with a diffid but query returns with an id
1081 1090 diffid = diff.get(b'diffid', diff.get(b'id'))
1082 1091 basectx = ctxs[0]
1083 1092 tipctx = ctxs[-1]
1084 1093
1085 1094 params = {
1086 1095 b'diff_id': diffid,
1087 1096 b'name': b'hg:meta',
1088 1097 b'data': templatefilters.json(
1089 1098 {
1090 1099 b'user': tipctx.user(),
1091 1100 b'date': b'%d %d' % tipctx.date(),
1092 1101 b'branch': tipctx.branch(),
1093 1102 b'node': tipctx.hex(),
1094 1103 b'parent': basectx.p1().hex(),
1095 1104 }
1096 1105 ),
1097 1106 }
1098 1107 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1099 1108
1100 1109 commits = {}
1101 1110 for ctx in ctxs:
1102 1111 commits[ctx.hex()] = {
1103 1112 b'author': stringutil.person(ctx.user()),
1104 1113 b'authorEmail': stringutil.email(ctx.user()),
1105 1114 b'time': int(ctx.date()[0]),
1106 1115 b'commit': ctx.hex(),
1107 1116 b'parents': [ctx.p1().hex()],
1108 1117 b'branch': ctx.branch(),
1109 1118 }
1110 1119 params = {
1111 1120 b'diff_id': diffid,
1112 1121 b'name': b'local:commits',
1113 1122 b'data': templatefilters.json(commits),
1114 1123 }
1115 1124 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1116 1125
1117 1126
1118 1127 def createdifferentialrevision(
1119 1128 ctxs,
1120 1129 revid=None,
1121 1130 parentrevphid=None,
1122 1131 oldbasenode=None,
1123 1132 oldnode=None,
1124 1133 olddiff=None,
1125 1134 actions=None,
1126 1135 comment=None,
1127 1136 ):
1128 1137 """create or update a Differential Revision
1129 1138
1130 1139 If revid is None, create a new Differential Revision, otherwise update
1131 1140 revid. If parentrevphid is not None, set it as a dependency.
1132 1141
1133 1142 If there is a single commit for the new Differential Revision, ``ctxs`` will
1134 1143 be a list of that single context. Otherwise, it is a list that covers the
1135 1144 range of changes for the differential, where ``ctxs[0]`` is the first change
1136 1145 to include and ``ctxs[-1]`` is the last.
1137 1146
1138 1147 If oldnode is not None, check if the patch content (without commit message
1139 1148 and metadata) has changed before creating another diff. For a Revision with
1140 1149 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1141 1150 Revision covering multiple commits, ``oldbasenode`` corresponds to
1142 1151 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1143 1152 corresponds to ``ctxs[-1]``.
1144 1153
1145 1154 If actions is not None, they will be appended to the transaction.
1146 1155 """
1147 1156 ctx = ctxs[-1]
1148 1157 basectx = ctxs[0]
1149 1158
1150 1159 repo = ctx.repo()
1151 1160 if oldnode:
1152 1161 diffopts = mdiff.diffopts(git=True, context=32767)
1153 1162 unfi = repo.unfiltered()
1154 1163 oldctx = unfi[oldnode]
1155 1164 oldbasectx = unfi[oldbasenode]
1156 1165 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1157 1166 oldbasectx, oldctx, diffopts
1158 1167 )
1159 1168 else:
1160 1169 neednewdiff = True
1161 1170
1162 1171 transactions = []
1163 1172 if neednewdiff:
1164 1173 diff = creatediff(basectx, ctx)
1165 1174 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1166 1175 if comment:
1167 1176 transactions.append({b'type': b'comment', b'value': comment})
1168 1177 else:
1169 1178 # Even if we don't need to upload a new diff because the patch content
1170 1179 # does not change. We might still need to update its metadata so
1171 1180 # pushers could know the correct node metadata.
1172 1181 assert olddiff
1173 1182 diff = olddiff
1174 1183 writediffproperties(ctxs, diff)
1175 1184
1176 1185 # Set the parent Revision every time, so commit re-ordering is picked-up
1177 1186 if parentrevphid:
1178 1187 transactions.append(
1179 1188 {b'type': b'parents.set', b'value': [parentrevphid]}
1180 1189 )
1181 1190
1182 1191 if actions:
1183 1192 transactions += actions
1184 1193
1185 1194 # When folding multiple local commits into a single review, arcanist will
1186 1195 # take the summary line of the first commit as the title, and then
1187 1196 # concatenate the rest of the remaining messages (including each of their
1188 1197 # first lines) to the rest of the first commit message (each separated by
1189 1198 # an empty line), and use that as the summary field. Do the same here.
1190 1199 # For commits with only a one line message, there is no summary field, as
1191 1200 # this gets assigned to the title.
1192 1201 fields = util.sortdict() # sorted for stable wire protocol in tests
1193 1202
1194 1203 for i, _ctx in enumerate(ctxs):
1195 1204 # Parse commit message and update related fields.
1196 1205 desc = _ctx.description()
1197 1206 info = callconduit(
1198 1207 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1199 1208 )
1200 1209
1201 1210 for k in [b'title', b'summary', b'testPlan']:
1202 1211 v = info[b'fields'].get(k)
1203 1212 if not v:
1204 1213 continue
1205 1214
1206 1215 if i == 0:
1207 1216 # Title, summary and test plan (if present) are taken verbatim
1208 1217 # for the first commit.
1209 1218 fields[k] = v.rstrip()
1210 1219 continue
1211 1220 elif k == b'title':
1212 1221 # Add subsequent titles (i.e. the first line of the commit
1213 1222 # message) back to the summary.
1214 1223 k = b'summary'
1215 1224
1216 1225 # Append any current field to the existing composite field
1217 1226 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1218 1227
1219 1228 for k, v in fields.items():
1220 1229 transactions.append({b'type': k, b'value': v})
1221 1230
1222 1231 params = {b'transactions': transactions}
1223 1232 if revid is not None:
1224 1233 # Update an existing Differential Revision
1225 1234 params[b'objectIdentifier'] = revid
1226 1235
1227 1236 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1228 1237 if not revision:
1229 1238 if len(ctxs) == 1:
1230 1239 msg = _(b'cannot create revision for %s') % ctx
1231 1240 else:
1232 1241 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1233 1242 raise error.Abort(msg)
1234 1243
1235 1244 return revision, diff
1236 1245
1237 1246
1238 1247 def userphids(ui, names):
1239 1248 """convert user names to PHIDs"""
1240 1249 names = [name.lower() for name in names]
1241 1250 query = {b'constraints': {b'usernames': names}}
1242 1251 result = callconduit(ui, b'user.search', query)
1243 1252 # username not found is not an error of the API. So check if we have missed
1244 1253 # some names here.
1245 1254 data = result[b'data']
1246 1255 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1247 1256 unresolved = set(names) - resolved
1248 1257 if unresolved:
1249 1258 raise error.Abort(
1250 1259 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1251 1260 )
1252 1261 return [entry[b'phid'] for entry in data]
1253 1262
1254 1263
1255 1264 def _print_phabsend_action(ui, ctx, newrevid, action):
1256 1265 """print the ``action`` that occurred when posting ``ctx`` for review
1257 1266
1258 1267 This is a utility function for the sending phase of ``phabsend``, which
1259 1268 makes it easier to show a status for all local commits with `--fold``.
1260 1269 """
1261 1270 actiondesc = ui.label(
1262 1271 {
1263 1272 b'created': _(b'created'),
1264 1273 b'skipped': _(b'skipped'),
1265 1274 b'updated': _(b'updated'),
1266 1275 }[action],
1267 1276 b'phabricator.action.%s' % action,
1268 1277 )
1269 1278 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1270 1279 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1271 1280 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1272 1281
1273 1282
1274 1283 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1275 1284 """update the local commit list for the ``diff`` associated with ``drevid``
1276 1285
1277 1286 This is a utility function for the amend phase of ``phabsend``, which
1278 1287 converts failures to warning messages.
1279 1288 """
1280 1289 _debug(
1281 1290 unfi.ui,
1282 1291 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1283 1292 )
1284 1293
1285 1294 try:
1286 1295 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1287 1296 except util.urlerr.urlerror:
1288 1297 # If it fails just warn and keep going, otherwise the DREV
1289 1298 # associations will be lost
1290 1299 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1291 1300
1292 1301
1293 1302 @vcrcommand(
1294 1303 b'phabsend',
1295 1304 [
1296 1305 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1297 1306 (b'', b'amend', True, _(b'update commit messages')),
1298 1307 (b'', b'reviewer', [], _(b'specify reviewers')),
1299 1308 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1300 1309 (
1301 1310 b'm',
1302 1311 b'comment',
1303 1312 b'',
1304 1313 _(b'add a comment to Revisions with new/updated Diffs'),
1305 1314 ),
1306 1315 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1307 1316 (b'', b'fold', False, _(b'combine the revisions into one review')),
1308 1317 ],
1309 1318 _(b'REV [OPTIONS]'),
1310 1319 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1311 1320 )
1312 1321 def phabsend(ui, repo, *revs, **opts):
1313 1322 """upload changesets to Phabricator
1314 1323
1315 1324 If there are multiple revisions specified, they will be send as a stack
1316 1325 with a linear dependencies relationship using the order specified by the
1317 1326 revset.
1318 1327
1319 1328 For the first time uploading changesets, local tags will be created to
1320 1329 maintain the association. After the first time, phabsend will check
1321 1330 obsstore and tags information so it can figure out whether to update an
1322 1331 existing Differential Revision, or create a new one.
1323 1332
1324 1333 If --amend is set, update commit messages so they have the
1325 1334 ``Differential Revision`` URL, remove related tags. This is similar to what
1326 1335 arcanist will do, and is more desired in author-push workflows. Otherwise,
1327 1336 use local tags to record the ``Differential Revision`` association.
1328 1337
1329 1338 The --confirm option lets you confirm changesets before sending them. You
1330 1339 can also add following to your configuration file to make it default
1331 1340 behaviour::
1332 1341
1333 1342 [phabsend]
1334 1343 confirm = true
1335 1344
1336 1345 By default, a separate review will be created for each commit that is
1337 1346 selected, and will have the same parent/child relationship in Phabricator.
1338 1347 If ``--fold`` is set, multiple commits are rolled up into a single review
1339 1348 as if diffed from the parent of the first revision to the last. The commit
1340 1349 messages are concatenated in the summary field on Phabricator.
1341 1350
1342 1351 phabsend will check obsstore and the above association to decide whether to
1343 1352 update an existing Differential Revision, or create a new one.
1344 1353 """
1345 1354 opts = pycompat.byteskwargs(opts)
1346 1355 revs = list(revs) + opts.get(b'rev', [])
1347 1356 revs = scmutil.revrange(repo, revs)
1348 1357 revs.sort() # ascending order to preserve topological parent/child in phab
1349 1358
1350 1359 if not revs:
1351 1360 raise error.Abort(_(b'phabsend requires at least one changeset'))
1352 1361 if opts.get(b'amend'):
1353 1362 cmdutil.checkunfinished(repo)
1354 1363
1355 1364 ctxs = [repo[rev] for rev in revs]
1356 1365
1357 1366 if any(c for c in ctxs if c.obsolete()):
1358 1367 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1359 1368
1360 1369 # Ensure the local commits are an unbroken range. The semantics of the
1361 1370 # --fold option implies this, and the auto restacking of orphans requires
1362 1371 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1363 1372 # get A' as a parent.
1364 1373 def _fail_nonlinear_revs(revs, revtype):
1365 1374 badnodes = [repo[r].node() for r in revs]
1366 1375 raise error.Abort(
1367 1376 _(b"cannot phabsend multiple %s revisions: %s")
1368 1377 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1369 1378 hint=_(b"the revisions must form a linear chain"),
1370 1379 )
1371 1380
1372 1381 heads = repo.revs(b'heads(%ld)', revs)
1373 1382 if len(heads) > 1:
1374 1383 _fail_nonlinear_revs(heads, b"head")
1375 1384
1376 1385 roots = repo.revs(b'roots(%ld)', revs)
1377 1386 if len(roots) > 1:
1378 1387 _fail_nonlinear_revs(roots, b"root")
1379 1388
1380 1389 fold = opts.get(b'fold')
1381 1390 if fold:
1382 1391 if len(revs) == 1:
1383 1392 # TODO: just switch to --no-fold instead?
1384 1393 raise error.Abort(_(b"cannot fold a single revision"))
1385 1394
1386 1395 # There's no clear way to manage multiple commits with a Dxxx tag, so
1387 1396 # require the amend option. (We could append "_nnn", but then it
1388 1397 # becomes jumbled if earlier commits are added to an update.) It should
1389 1398 # lock the repo and ensure that the range is editable, but that would
1390 1399 # make the code pretty convoluted. The default behavior of `arc` is to
1391 1400 # create a new review anyway.
1392 1401 if not opts.get(b"amend"):
1393 1402 raise error.Abort(_(b"cannot fold with --no-amend"))
1394 1403
1395 1404 # It might be possible to bucketize the revisions by the DREV value, and
1396 1405 # iterate over those groups when posting, and then again when amending.
1397 1406 # But for simplicity, require all selected revisions to be for the same
1398 1407 # DREV (if present). Adding local revisions to an existing DREV is
1399 1408 # acceptable.
1400 1409 drevmatchers = [
1401 1410 _differentialrevisiondescre.search(ctx.description())
1402 1411 for ctx in ctxs
1403 1412 ]
1404 1413 if len({m.group('url') for m in drevmatchers if m}) > 1:
1405 1414 raise error.Abort(
1406 1415 _(b"cannot fold revisions with different DREV values")
1407 1416 )
1408 1417
1409 1418 # {newnode: (oldnode, olddiff, olddrev}
1410 1419 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1411 1420
1412 1421 confirm = ui.configbool(b'phabsend', b'confirm')
1413 1422 confirm |= bool(opts.get(b'confirm'))
1414 1423 if confirm:
1415 1424 confirmed = _confirmbeforesend(repo, revs, oldmap)
1416 1425 if not confirmed:
1417 1426 raise error.Abort(_(b'phabsend cancelled'))
1418 1427
1419 1428 actions = []
1420 1429 reviewers = opts.get(b'reviewer', [])
1421 1430 blockers = opts.get(b'blocker', [])
1422 1431 phids = []
1423 1432 if reviewers:
1424 1433 phids.extend(userphids(repo.ui, reviewers))
1425 1434 if blockers:
1426 1435 phids.extend(
1427 1436 map(
1428 1437 lambda phid: b'blocking(%s)' % phid,
1429 1438 userphids(repo.ui, blockers),
1430 1439 )
1431 1440 )
1432 1441 if phids:
1433 1442 actions.append({b'type': b'reviewers.add', b'value': phids})
1434 1443
1435 1444 drevids = [] # [int]
1436 1445 diffmap = {} # {newnode: diff}
1437 1446
1438 1447 # Send patches one by one so we know their Differential Revision PHIDs and
1439 1448 # can provide dependency relationship
1440 1449 lastrevphid = None
1441 1450 for ctx in ctxs:
1442 1451 if fold:
1443 1452 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1444 1453 else:
1445 1454 ui.debug(b'sending rev %d\n' % ctx.rev())
1446 1455
1447 1456 # Get Differential Revision ID
1448 1457 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1449 1458 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1450 1459
1451 1460 if fold:
1452 1461 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1453 1462 ctxs[-1].node(), (None, None, None)
1454 1463 )
1455 1464
1456 1465 if oldnode != ctx.node() or opts.get(b'amend'):
1457 1466 # Create or update Differential Revision
1458 1467 revision, diff = createdifferentialrevision(
1459 1468 ctxs if fold else [ctx],
1460 1469 revid,
1461 1470 lastrevphid,
1462 1471 oldbasenode,
1463 1472 oldnode,
1464 1473 olddiff,
1465 1474 actions,
1466 1475 opts.get(b'comment'),
1467 1476 )
1468 1477
1469 1478 if fold:
1470 1479 for ctx in ctxs:
1471 1480 diffmap[ctx.node()] = diff
1472 1481 else:
1473 1482 diffmap[ctx.node()] = diff
1474 1483
1475 1484 newrevid = int(revision[b'object'][b'id'])
1476 1485 newrevphid = revision[b'object'][b'phid']
1477 1486 if revid:
1478 1487 action = b'updated'
1479 1488 else:
1480 1489 action = b'created'
1481 1490
1482 1491 # Create a local tag to note the association, if commit message
1483 1492 # does not have it already
1484 1493 if not fold:
1485 1494 m = _differentialrevisiondescre.search(ctx.description())
1486 1495 if not m or int(m.group('id')) != newrevid:
1487 1496 tagname = b'D%d' % newrevid
1488 1497 tags.tag(
1489 1498 repo,
1490 1499 tagname,
1491 1500 ctx.node(),
1492 1501 message=None,
1493 1502 user=None,
1494 1503 date=None,
1495 1504 local=True,
1496 1505 )
1497 1506 else:
1498 1507 # Nothing changed. But still set "newrevphid" so the next revision
1499 1508 # could depend on this one and "newrevid" for the summary line.
1500 1509 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1501 1510 newrevid = revid
1502 1511 action = b'skipped'
1503 1512
1504 1513 drevids.append(newrevid)
1505 1514 lastrevphid = newrevphid
1506 1515
1507 1516 if fold:
1508 1517 for c in ctxs:
1509 1518 if oldmap.get(c.node(), (None, None, None))[2]:
1510 1519 action = b'updated'
1511 1520 else:
1512 1521 action = b'created'
1513 1522 _print_phabsend_action(ui, c, newrevid, action)
1514 1523 break
1515 1524
1516 1525 _print_phabsend_action(ui, ctx, newrevid, action)
1517 1526
1518 1527 # Update commit messages and remove tags
1519 1528 if opts.get(b'amend'):
1520 1529 unfi = repo.unfiltered()
1521 1530 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1522 1531 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1523 1532 # Eagerly evaluate commits to restabilize before creating new
1524 1533 # commits. The selected revisions are excluded because they are
1525 1534 # automatically restacked as part of the submission process.
1526 1535 restack = [
1527 1536 c
1528 1537 for c in repo.set(
1529 1538 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1530 1539 revs,
1531 1540 revs,
1532 1541 )
1533 1542 ]
1534 1543 wnode = unfi[b'.'].node()
1535 1544 mapping = {} # {oldnode: [newnode]}
1536 1545 newnodes = []
1537 1546
1538 1547 drevid = drevids[0]
1539 1548
1540 1549 for i, rev in enumerate(revs):
1541 1550 old = unfi[rev]
1542 1551 if not fold:
1543 1552 drevid = drevids[i]
1544 1553 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1545 1554
1546 1555 newdesc = get_amended_desc(drev, old, fold)
1547 1556 # Make sure commit message contain "Differential Revision"
1548 1557 if (
1549 1558 old.description() != newdesc
1550 1559 or old.p1().node() in mapping
1551 1560 or old.p2().node() in mapping
1552 1561 ):
1553 1562 if old.phase() == phases.public:
1554 1563 ui.warn(
1555 1564 _(b"warning: not updating public commit %s\n")
1556 1565 % scmutil.formatchangeid(old)
1557 1566 )
1558 1567 continue
1559 1568 parents = [
1560 1569 mapping.get(old.p1().node(), (old.p1(),))[0],
1561 1570 mapping.get(old.p2().node(), (old.p2(),))[0],
1562 1571 ]
1563 1572 newdesc = rewriteutil.update_hash_refs(
1564 1573 repo,
1565 1574 newdesc,
1566 1575 mapping,
1567 1576 )
1568 1577 new = context.metadataonlyctx(
1569 1578 repo,
1570 1579 old,
1571 1580 parents=parents,
1572 1581 text=newdesc,
1573 1582 user=old.user(),
1574 1583 date=old.date(),
1575 1584 extra=old.extra(),
1576 1585 )
1577 1586
1578 1587 newnode = new.commit()
1579 1588
1580 1589 mapping[old.node()] = [newnode]
1581 1590
1582 1591 if fold:
1583 1592 # Defer updating the (single) Diff until all nodes are
1584 1593 # collected. No tags were created, so none need to be
1585 1594 # removed.
1586 1595 newnodes.append(newnode)
1587 1596 continue
1588 1597
1589 1598 _amend_diff_properties(
1590 1599 unfi, drevid, [newnode], diffmap[old.node()]
1591 1600 )
1592 1601
1593 1602 # Remove local tags since it's no longer necessary
1594 1603 tagname = b'D%d' % drevid
1595 1604 if tagname in repo.tags():
1596 1605 tags.tag(
1597 1606 repo,
1598 1607 tagname,
1599 1608 nullid,
1600 1609 message=None,
1601 1610 user=None,
1602 1611 date=None,
1603 1612 local=True,
1604 1613 )
1605 1614 elif fold:
1606 1615 # When folding multiple commits into one review with
1607 1616 # --fold, track even the commits that weren't amended, so
1608 1617 # that their association isn't lost if the properties are
1609 1618 # rewritten below.
1610 1619 newnodes.append(old.node())
1611 1620
1612 1621 # If the submitted commits are public, no amend takes place so
1613 1622 # there are no newnodes and therefore no diff update to do.
1614 1623 if fold and newnodes:
1615 1624 diff = diffmap[old.node()]
1616 1625
1617 1626 # The diff object in diffmap doesn't have the local commits
1618 1627 # because that could be returned from differential.creatediff,
1619 1628 # not differential.querydiffs. So use the queried diff (if
1620 1629 # present), or force the amend (a new revision is being posted.)
1621 1630 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1622 1631 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1623 1632 _amend_diff_properties(unfi, drevid, newnodes, diff)
1624 1633 else:
1625 1634 _debug(
1626 1635 ui,
1627 1636 b"local commit list for D%d is already up-to-date\n"
1628 1637 % drevid,
1629 1638 )
1630 1639 elif fold:
1631 1640 _debug(ui, b"no newnodes to update\n")
1632 1641
1633 1642 # Restack any children of first-time submissions that were orphaned
1634 1643 # in the process. The ctx won't report that it is an orphan until
1635 1644 # the cleanup takes place below.
1636 1645 for old in restack:
1637 1646 parents = [
1638 1647 mapping.get(old.p1().node(), (old.p1(),))[0],
1639 1648 mapping.get(old.p2().node(), (old.p2(),))[0],
1640 1649 ]
1641 1650 new = context.metadataonlyctx(
1642 1651 repo,
1643 1652 old,
1644 1653 parents=parents,
1645 1654 text=rewriteutil.update_hash_refs(
1646 1655 repo, old.description(), mapping
1647 1656 ),
1648 1657 user=old.user(),
1649 1658 date=old.date(),
1650 1659 extra=old.extra(),
1651 1660 )
1652 1661
1653 1662 newnode = new.commit()
1654 1663
1655 1664 # Don't obsolete unselected descendants of nodes that have not
1656 1665 # been changed in this transaction- that results in an error.
1657 1666 if newnode != old.node():
1658 1667 mapping[old.node()] = [newnode]
1659 1668 _debug(
1660 1669 ui,
1661 1670 b"restabilizing %s as %s\n"
1662 1671 % (short(old.node()), short(newnode)),
1663 1672 )
1664 1673 else:
1665 1674 _debug(
1666 1675 ui,
1667 1676 b"not restabilizing unchanged %s\n" % short(old.node()),
1668 1677 )
1669 1678
1670 1679 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1671 1680 if wnode in mapping:
1672 1681 unfi.setparents(mapping[wnode][0])
1673 1682
1674 1683
1675 1684 # Map from "hg:meta" keys to header understood by "hg import". The order is
1676 1685 # consistent with "hg export" output.
1677 1686 _metanamemap = util.sortdict(
1678 1687 [
1679 1688 (b'user', b'User'),
1680 1689 (b'date', b'Date'),
1681 1690 (b'branch', b'Branch'),
1682 1691 (b'node', b'Node ID'),
1683 1692 (b'parent', b'Parent '),
1684 1693 ]
1685 1694 )
1686 1695
1687 1696
1688 1697 def _confirmbeforesend(repo, revs, oldmap):
1689 1698 url, token = readurltoken(repo.ui)
1690 1699 ui = repo.ui
1691 1700 for rev in revs:
1692 1701 ctx = repo[rev]
1693 1702 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1694 1703 if drevid:
1695 1704 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1696 1705 else:
1697 1706 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1698 1707
1699 1708 ui.write(
1700 1709 _(b'%s - %s\n')
1701 1710 % (
1702 1711 drevdesc,
1703 1712 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1704 1713 )
1705 1714 )
1706 1715
1707 1716 if ui.promptchoice(
1708 1717 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1709 1718 ):
1710 1719 return False
1711 1720
1712 1721 return True
1713 1722
1714 1723
1715 1724 _knownstatusnames = {
1716 1725 b'accepted',
1717 1726 b'needsreview',
1718 1727 b'needsrevision',
1719 1728 b'closed',
1720 1729 b'abandoned',
1721 1730 b'changesplanned',
1722 1731 }
1723 1732
1724 1733
1725 1734 def _getstatusname(drev):
1726 1735 """get normalized status name from a Differential Revision"""
1727 1736 return drev[b'statusName'].replace(b' ', b'').lower()
1728 1737
1729 1738
1730 1739 # Small language to specify differential revisions. Support symbols: (), :X,
1731 1740 # +, and -.
1732 1741
1733 1742 _elements = {
1734 1743 # token-type: binding-strength, primary, prefix, infix, suffix
1735 1744 b'(': (12, None, (b'group', 1, b')'), None, None),
1736 1745 b':': (8, None, (b'ancestors', 8), None, None),
1737 1746 b'&': (5, None, None, (b'and_', 5), None),
1738 1747 b'+': (4, None, None, (b'add', 4), None),
1739 1748 b'-': (4, None, None, (b'sub', 4), None),
1740 1749 b')': (0, None, None, None, None),
1741 1750 b'symbol': (0, b'symbol', None, None, None),
1742 1751 b'end': (0, None, None, None, None),
1743 1752 }
1744 1753
1745 1754
1746 1755 def _tokenize(text):
1747 1756 view = memoryview(text) # zero-copy slice
1748 1757 special = b'():+-& '
1749 1758 pos = 0
1750 1759 length = len(text)
1751 1760 while pos < length:
1752 1761 symbol = b''.join(
1753 1762 itertools.takewhile(
1754 1763 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1755 1764 )
1756 1765 )
1757 1766 if symbol:
1758 1767 yield (b'symbol', symbol, pos)
1759 1768 pos += len(symbol)
1760 1769 else: # special char, ignore space
1761 1770 if text[pos : pos + 1] != b' ':
1762 1771 yield (text[pos : pos + 1], None, pos)
1763 1772 pos += 1
1764 1773 yield (b'end', None, pos)
1765 1774
1766 1775
1767 1776 def _parse(text):
1768 1777 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1769 1778 if pos != len(text):
1770 1779 raise error.ParseError(b'invalid token', pos)
1771 1780 return tree
1772 1781
1773 1782
1774 1783 def _parsedrev(symbol):
1775 1784 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1776 1785 if symbol.startswith(b'D') and symbol[1:].isdigit():
1777 1786 return int(symbol[1:])
1778 1787 if symbol.isdigit():
1779 1788 return int(symbol)
1780 1789
1781 1790
1782 1791 def _prefetchdrevs(tree):
1783 1792 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1784 1793 drevs = set()
1785 1794 ancestordrevs = set()
1786 1795 op = tree[0]
1787 1796 if op == b'symbol':
1788 1797 r = _parsedrev(tree[1])
1789 1798 if r:
1790 1799 drevs.add(r)
1791 1800 elif op == b'ancestors':
1792 1801 r, a = _prefetchdrevs(tree[1])
1793 1802 drevs.update(r)
1794 1803 ancestordrevs.update(r)
1795 1804 ancestordrevs.update(a)
1796 1805 else:
1797 1806 for t in tree[1:]:
1798 1807 r, a = _prefetchdrevs(t)
1799 1808 drevs.update(r)
1800 1809 ancestordrevs.update(a)
1801 1810 return drevs, ancestordrevs
1802 1811
1803 1812
1804 1813 def querydrev(ui, spec):
1805 1814 """return a list of "Differential Revision" dicts
1806 1815
1807 1816 spec is a string using a simple query language, see docstring in phabread
1808 1817 for details.
1809 1818
1810 1819 A "Differential Revision dict" looks like:
1811 1820
1812 1821 {
1813 1822 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1814 1823 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1815 1824 "auxiliary": {
1816 1825 "phabricator:depends-on": [
1817 1826 "PHID-DREV-gbapp366kutjebt7agcd"
1818 1827 ]
1819 1828 "phabricator:projects": [],
1820 1829 },
1821 1830 "branch": "default",
1822 1831 "ccs": [],
1823 1832 "commits": [],
1824 1833 "dateCreated": "1499181406",
1825 1834 "dateModified": "1499182103",
1826 1835 "diffs": [
1827 1836 "3",
1828 1837 "4",
1829 1838 ],
1830 1839 "hashes": [],
1831 1840 "id": "2",
1832 1841 "lineCount": "2",
1833 1842 "phid": "PHID-DREV-672qvysjcczopag46qty",
1834 1843 "properties": {},
1835 1844 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1836 1845 "reviewers": [],
1837 1846 "sourcePath": null
1838 1847 "status": "0",
1839 1848 "statusName": "Needs Review",
1840 1849 "summary": "",
1841 1850 "testPlan": "",
1842 1851 "title": "example",
1843 1852 "uri": "https://phab.example.com/D2",
1844 1853 }
1845 1854 """
1846 1855 # TODO: replace differential.query and differential.querydiffs with
1847 1856 # differential.diff.search because the former (and their output) are
1848 1857 # frozen, and planned to be deprecated and removed.
1849 1858
1850 1859 def fetch(params):
1851 1860 """params -> single drev or None"""
1852 1861 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1853 1862 if key in prefetched:
1854 1863 return prefetched[key]
1855 1864 drevs = callconduit(ui, b'differential.query', params)
1856 1865 # Fill prefetched with the result
1857 1866 for drev in drevs:
1858 1867 prefetched[drev[b'phid']] = drev
1859 1868 prefetched[int(drev[b'id'])] = drev
1860 1869 if key not in prefetched:
1861 1870 raise error.Abort(
1862 1871 _(b'cannot get Differential Revision %r') % params
1863 1872 )
1864 1873 return prefetched[key]
1865 1874
1866 1875 def getstack(topdrevids):
1867 1876 """given a top, get a stack from the bottom, [id] -> [id]"""
1868 1877 visited = set()
1869 1878 result = []
1870 1879 queue = [{b'ids': [i]} for i in topdrevids]
1871 1880 while queue:
1872 1881 params = queue.pop()
1873 1882 drev = fetch(params)
1874 1883 if drev[b'id'] in visited:
1875 1884 continue
1876 1885 visited.add(drev[b'id'])
1877 1886 result.append(int(drev[b'id']))
1878 1887 auxiliary = drev.get(b'auxiliary', {})
1879 1888 depends = auxiliary.get(b'phabricator:depends-on', [])
1880 1889 for phid in depends:
1881 1890 queue.append({b'phids': [phid]})
1882 1891 result.reverse()
1883 1892 return smartset.baseset(result)
1884 1893
1885 1894 # Initialize prefetch cache
1886 1895 prefetched = {} # {id or phid: drev}
1887 1896
1888 1897 tree = _parse(spec)
1889 1898 drevs, ancestordrevs = _prefetchdrevs(tree)
1890 1899
1891 1900 # developer config: phabricator.batchsize
1892 1901 batchsize = ui.configint(b'phabricator', b'batchsize')
1893 1902
1894 1903 # Prefetch Differential Revisions in batch
1895 1904 tofetch = set(drevs)
1896 1905 for r in ancestordrevs:
1897 1906 tofetch.update(range(max(1, r - batchsize), r + 1))
1898 1907 if drevs:
1899 1908 fetch({b'ids': list(tofetch)})
1900 1909 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1901 1910
1902 1911 # Walk through the tree, return smartsets
1903 1912 def walk(tree):
1904 1913 op = tree[0]
1905 1914 if op == b'symbol':
1906 1915 drev = _parsedrev(tree[1])
1907 1916 if drev:
1908 1917 return smartset.baseset([drev])
1909 1918 elif tree[1] in _knownstatusnames:
1910 1919 drevs = [
1911 1920 r
1912 1921 for r in validids
1913 1922 if _getstatusname(prefetched[r]) == tree[1]
1914 1923 ]
1915 1924 return smartset.baseset(drevs)
1916 1925 else:
1917 1926 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1918 1927 elif op in {b'and_', b'add', b'sub'}:
1919 1928 assert len(tree) == 3
1920 1929 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1921 1930 elif op == b'group':
1922 1931 return walk(tree[1])
1923 1932 elif op == b'ancestors':
1924 1933 return getstack(walk(tree[1]))
1925 1934 else:
1926 1935 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1927 1936
1928 1937 return [prefetched[r] for r in walk(tree)]
1929 1938
1930 1939
1931 1940 def getdescfromdrev(drev):
1932 1941 """get description (commit message) from "Differential Revision"
1933 1942
1934 1943 This is similar to differential.getcommitmessage API. But we only care
1935 1944 about limited fields: title, summary, test plan, and URL.
1936 1945 """
1937 1946 title = drev[b'title']
1938 1947 summary = drev[b'summary'].rstrip()
1939 1948 testplan = drev[b'testPlan'].rstrip()
1940 1949 if testplan:
1941 1950 testplan = b'Test Plan:\n%s' % testplan
1942 1951 uri = b'Differential Revision: %s' % drev[b'uri']
1943 1952 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1944 1953
1945 1954
1946 1955 def get_amended_desc(drev, ctx, folded):
1947 1956 """similar to ``getdescfromdrev``, but supports a folded series of commits
1948 1957
1949 1958 This is used when determining if an individual commit needs to have its
1950 1959 message amended after posting it for review. The determination is made for
1951 1960 each individual commit, even when they were folded into one review.
1952 1961 """
1953 1962 if not folded:
1954 1963 return getdescfromdrev(drev)
1955 1964
1956 1965 uri = b'Differential Revision: %s' % drev[b'uri']
1957 1966
1958 1967 # Since the commit messages were combined when posting multiple commits
1959 1968 # with --fold, the fields can't be read from Phabricator here, or *all*
1960 1969 # affected local revisions will end up with the same commit message after
1961 1970 # the URI is amended in. Append in the DREV line, or update it if it
1962 1971 # exists. At worst, this means commit message or test plan updates on
1963 1972 # Phabricator aren't propagated back to the repository, but that seems
1964 1973 # reasonable for the case where local commits are effectively combined
1965 1974 # in Phabricator.
1966 1975 m = _differentialrevisiondescre.search(ctx.description())
1967 1976 if not m:
1968 1977 return b'\n\n'.join([ctx.description(), uri])
1969 1978
1970 1979 return _differentialrevisiondescre.sub(uri, ctx.description())
1971 1980
1972 1981
1973 1982 def getlocalcommits(diff):
1974 1983 """get the set of local commits from a diff object
1975 1984
1976 1985 See ``getdiffmeta()`` for an example diff object.
1977 1986 """
1978 1987 props = diff.get(b'properties') or {}
1979 1988 commits = props.get(b'local:commits') or {}
1980 1989 if len(commits) > 1:
1981 1990 return {bin(c) for c in commits.keys()}
1982 1991
1983 1992 # Storing the diff metadata predates storing `local:commits`, so continue
1984 1993 # to use that in the --no-fold case.
1985 1994 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1986 1995
1987 1996
1988 1997 def getdiffmeta(diff):
1989 1998 """get commit metadata (date, node, user, p1) from a diff object
1990 1999
1991 2000 The metadata could be "hg:meta", sent by phabsend, like:
1992 2001
1993 2002 "properties": {
1994 2003 "hg:meta": {
1995 2004 "branch": "default",
1996 2005 "date": "1499571514 25200",
1997 2006 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1998 2007 "user": "Foo Bar <foo@example.com>",
1999 2008 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2000 2009 }
2001 2010 }
2002 2011
2003 2012 Or converted from "local:commits", sent by "arc", like:
2004 2013
2005 2014 "properties": {
2006 2015 "local:commits": {
2007 2016 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2008 2017 "author": "Foo Bar",
2009 2018 "authorEmail": "foo@example.com"
2010 2019 "branch": "default",
2011 2020 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2012 2021 "local": "1000",
2013 2022 "message": "...",
2014 2023 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2015 2024 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2016 2025 "summary": "...",
2017 2026 "tag": "",
2018 2027 "time": 1499546314,
2019 2028 }
2020 2029 }
2021 2030 }
2022 2031
2023 2032 Note: metadata extracted from "local:commits" will lose time zone
2024 2033 information.
2025 2034 """
2026 2035 props = diff.get(b'properties') or {}
2027 2036 meta = props.get(b'hg:meta')
2028 2037 if not meta:
2029 2038 if props.get(b'local:commits'):
2030 2039 commit = sorted(props[b'local:commits'].values())[0]
2031 2040 meta = {}
2032 2041 if b'author' in commit and b'authorEmail' in commit:
2033 2042 meta[b'user'] = b'%s <%s>' % (
2034 2043 commit[b'author'],
2035 2044 commit[b'authorEmail'],
2036 2045 )
2037 2046 if b'time' in commit:
2038 2047 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2039 2048 if b'branch' in commit:
2040 2049 meta[b'branch'] = commit[b'branch']
2041 2050 node = commit.get(b'commit', commit.get(b'rev'))
2042 2051 if node:
2043 2052 meta[b'node'] = node
2044 2053 if len(commit.get(b'parents', ())) >= 1:
2045 2054 meta[b'parent'] = commit[b'parents'][0]
2046 2055 else:
2047 2056 meta = {}
2048 2057 if b'date' not in meta and b'dateCreated' in diff:
2049 2058 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2050 2059 if b'branch' not in meta and diff.get(b'branch'):
2051 2060 meta[b'branch'] = diff[b'branch']
2052 2061 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2053 2062 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2054 2063 return meta
2055 2064
2056 2065
2057 2066 def _getdrevs(ui, stack, specs):
2058 2067 """convert user supplied DREVSPECs into "Differential Revision" dicts
2059 2068
2060 2069 See ``hg help phabread`` for how to specify each DREVSPEC.
2061 2070 """
2062 2071 if len(specs) > 0:
2063 2072
2064 2073 def _formatspec(s):
2065 2074 if stack:
2066 2075 s = b':(%s)' % s
2067 2076 return b'(%s)' % s
2068 2077
2069 2078 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2070 2079
2071 2080 drevs = querydrev(ui, spec)
2072 2081 if drevs:
2073 2082 return drevs
2074 2083
2075 2084 raise error.Abort(_(b"empty DREVSPEC set"))
2076 2085
2077 2086
2078 2087 def readpatch(ui, drevs, write):
2079 2088 """generate plain-text patch readable by 'hg import'
2080 2089
2081 2090 write takes a list of (DREV, bytes), where DREV is the differential number
2082 2091 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2083 2092 to be imported. drevs is what "querydrev" returns, results of
2084 2093 "differential.query".
2085 2094 """
2086 2095 # Prefetch hg:meta property for all diffs
2087 2096 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2088 2097 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2089 2098
2090 2099 patches = []
2091 2100
2092 2101 # Generate patch for each drev
2093 2102 for drev in drevs:
2094 2103 ui.note(_(b'reading D%s\n') % drev[b'id'])
2095 2104
2096 2105 diffid = max(int(v) for v in drev[b'diffs'])
2097 2106 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2098 2107 desc = getdescfromdrev(drev)
2099 2108 header = b'# HG changeset patch\n'
2100 2109
2101 2110 # Try to preserve metadata from hg:meta property. Write hg patch
2102 2111 # headers that can be read by the "import" command. See patchheadermap
2103 2112 # and extract in mercurial/patch.py for supported headers.
2104 2113 meta = getdiffmeta(diffs[b'%d' % diffid])
2105 2114 for k in _metanamemap.keys():
2106 2115 if k in meta:
2107 2116 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2108 2117
2109 2118 content = b'%s%s\n%s' % (header, desc, body)
2110 2119 patches.append((drev[b'id'], content))
2111 2120
2112 2121 # Write patches to the supplied callback
2113 2122 write(patches)
2114 2123
2115 2124
2116 2125 @vcrcommand(
2117 2126 b'phabread',
2118 2127 [(b'', b'stack', False, _(b'read dependencies'))],
2119 2128 _(b'DREVSPEC... [OPTIONS]'),
2120 2129 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2121 2130 optionalrepo=True,
2122 2131 )
2123 2132 def phabread(ui, repo, *specs, **opts):
2124 2133 """print patches from Phabricator suitable for importing
2125 2134
2126 2135 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2127 2136 the number ``123``. It could also have common operators like ``+``, ``-``,
2128 2137 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2129 2138 select a stack. If multiple DREVSPEC values are given, the result is the
2130 2139 union of each individually evaluated value. No attempt is currently made
2131 2140 to reorder the values to run from parent to child.
2132 2141
2133 2142 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2134 2143 could be used to filter patches by status. For performance reason, they
2135 2144 only represent a subset of non-status selections and cannot be used alone.
2136 2145
2137 2146 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2138 2147 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2139 2148 stack up to D9.
2140 2149
2141 2150 If --stack is given, follow dependencies information and read all patches.
2142 2151 It is equivalent to the ``:`` operator.
2143 2152 """
2144 2153 opts = pycompat.byteskwargs(opts)
2145 2154 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2146 2155
2147 2156 def _write(patches):
2148 2157 for drev, content in patches:
2149 2158 ui.write(content)
2150 2159
2151 2160 readpatch(ui, drevs, _write)
2152 2161
2153 2162
2154 2163 @vcrcommand(
2155 2164 b'phabimport',
2156 2165 [(b'', b'stack', False, _(b'import dependencies as well'))],
2157 2166 _(b'DREVSPEC... [OPTIONS]'),
2158 2167 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2159 2168 )
2160 2169 def phabimport(ui, repo, *specs, **opts):
2161 2170 """import patches from Phabricator for the specified Differential Revisions
2162 2171
2163 2172 The patches are read and applied starting at the parent of the working
2164 2173 directory.
2165 2174
2166 2175 See ``hg help phabread`` for how to specify DREVSPEC.
2167 2176 """
2168 2177 opts = pycompat.byteskwargs(opts)
2169 2178
2170 2179 # --bypass avoids losing exec and symlink bits when importing on Windows,
2171 2180 # and allows importing with a dirty wdir. It also aborts instead of leaving
2172 2181 # rejects.
2173 2182 opts[b'bypass'] = True
2174 2183
2175 2184 # Mandatory default values, synced with commands.import
2176 2185 opts[b'strip'] = 1
2177 2186 opts[b'prefix'] = b''
2178 2187 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2179 2188 opts[b'obsolete'] = False
2180 2189
2181 2190 if ui.configbool(b'phabimport', b'secret'):
2182 2191 opts[b'secret'] = True
2183 2192 if ui.configbool(b'phabimport', b'obsolete'):
2184 2193 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2185 2194
2186 2195 def _write(patches):
2187 2196 parents = repo[None].parents()
2188 2197
2189 2198 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2190 2199 for drev, contents in patches:
2191 2200 ui.status(_(b'applying patch from D%s\n') % drev)
2192 2201
2193 2202 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2194 2203 msg, node, rej = cmdutil.tryimportone(
2195 2204 ui,
2196 2205 repo,
2197 2206 patchdata,
2198 2207 parents,
2199 2208 opts,
2200 2209 [],
2201 2210 None, # Never update wdir to another revision
2202 2211 )
2203 2212
2204 2213 if not node:
2205 2214 raise error.Abort(_(b'D%s: no diffs found') % drev)
2206 2215
2207 2216 ui.note(msg + b'\n')
2208 2217 parents = [repo[node]]
2209 2218
2210 2219 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2211 2220
2212 2221 readpatch(repo.ui, drevs, _write)
2213 2222
2214 2223
2215 2224 @vcrcommand(
2216 2225 b'phabupdate',
2217 2226 [
2218 2227 (b'', b'accept', False, _(b'accept revisions')),
2219 2228 (b'', b'reject', False, _(b'reject revisions')),
2220 2229 (b'', b'request-review', False, _(b'request review on revisions')),
2221 2230 (b'', b'abandon', False, _(b'abandon revisions')),
2222 2231 (b'', b'reclaim', False, _(b'reclaim revisions')),
2223 2232 (b'', b'close', False, _(b'close revisions')),
2224 2233 (b'', b'reopen', False, _(b'reopen revisions')),
2225 2234 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2226 2235 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2227 2236 (b'', b'commandeer', False, _(b'commandeer revisions')),
2228 2237 (b'm', b'comment', b'', _(b'comment on the last revision')),
2229 2238 ],
2230 2239 _(b'DREVSPEC... [OPTIONS]'),
2231 2240 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2232 2241 optionalrepo=True,
2233 2242 )
2234 2243 def phabupdate(ui, repo, *specs, **opts):
2235 2244 """update Differential Revision in batch
2236 2245
2237 2246 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2238 2247 """
2239 2248 opts = pycompat.byteskwargs(opts)
2240 2249 transactions = [
2241 2250 b'abandon',
2242 2251 b'accept',
2243 2252 b'close',
2244 2253 b'commandeer',
2245 2254 b'plan-changes',
2246 2255 b'reclaim',
2247 2256 b'reject',
2248 2257 b'reopen',
2249 2258 b'request-review',
2250 2259 b'resign',
2251 2260 ]
2252 2261 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2253 2262 if len(flags) > 1:
2254 2263 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2255 2264
2256 2265 actions = []
2257 2266 for f in flags:
2258 2267 actions.append({b'type': f, b'value': True})
2259 2268
2260 2269 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2261 2270 for i, drev in enumerate(drevs):
2262 2271 if i + 1 == len(drevs) and opts.get(b'comment'):
2263 2272 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2264 2273 if actions:
2265 2274 params = {
2266 2275 b'objectIdentifier': drev[b'phid'],
2267 2276 b'transactions': actions,
2268 2277 }
2269 2278 callconduit(ui, b'differential.revision.edit', params)
2270 2279
2271 2280
2272 2281 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2273 2282 def template_review(context, mapping):
2274 2283 """:phabreview: Object describing the review for this changeset.
2275 2284 Has attributes `url` and `id`.
2276 2285 """
2277 2286 ctx = context.resource(mapping, b'ctx')
2278 2287 m = _differentialrevisiondescre.search(ctx.description())
2279 2288 if m:
2280 2289 return templateutil.hybriddict(
2281 2290 {
2282 2291 b'url': m.group('url'),
2283 2292 b'id': b"D%s" % m.group('id'),
2284 2293 }
2285 2294 )
2286 2295 else:
2287 2296 tags = ctx.repo().nodetags(ctx.node())
2288 2297 for t in tags:
2289 2298 if _differentialrevisiontagre.match(t):
2290 2299 url = ctx.repo().ui.config(b'phabricator', b'url')
2291 2300 if not url.endswith(b'/'):
2292 2301 url += b'/'
2293 2302 url += t
2294 2303
2295 2304 return templateutil.hybriddict(
2296 2305 {
2297 2306 b'url': url,
2298 2307 b'id': t,
2299 2308 }
2300 2309 )
2301 2310 return None
2302 2311
2303 2312
2304 2313 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2305 2314 def template_status(context, mapping):
2306 2315 """:phabstatus: String. Status of Phabricator differential."""
2307 2316 ctx = context.resource(mapping, b'ctx')
2308 2317 repo = context.resource(mapping, b'repo')
2309 2318 ui = context.resource(mapping, b'ui')
2310 2319
2311 2320 rev = ctx.rev()
2312 2321 try:
2313 2322 drevid = getdrevmap(repo, [rev])[rev]
2314 2323 except KeyError:
2315 2324 return None
2316 2325 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2317 2326 for drev in drevs:
2318 2327 if int(drev[b'id']) == drevid:
2319 2328 return templateutil.hybriddict(
2320 2329 {
2321 2330 b'url': drev[b'uri'],
2322 2331 b'status': drev[b'statusName'],
2323 2332 }
2324 2333 )
2325 2334 return None
2326 2335
2327 2336
2328 2337 @show.showview(b'phabstatus', csettopic=b'work')
2329 2338 def phabstatusshowview(ui, repo, displayer):
2330 2339 """Phabricator differiential status"""
2331 2340 revs = repo.revs('sort(_underway(), topo)')
2332 2341 drevmap = getdrevmap(repo, revs)
2333 2342 unknownrevs, drevids, revsbydrevid = [], set(), {}
2334 2343 for rev, drevid in pycompat.iteritems(drevmap):
2335 2344 if drevid is not None:
2336 2345 drevids.add(drevid)
2337 2346 revsbydrevid.setdefault(drevid, set()).add(rev)
2338 2347 else:
2339 2348 unknownrevs.append(rev)
2340 2349
2341 2350 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2342 2351 drevsbyrev = {}
2343 2352 for drev in drevs:
2344 2353 for rev in revsbydrevid[int(drev[b'id'])]:
2345 2354 drevsbyrev[rev] = drev
2346 2355
2347 2356 def phabstatus(ctx):
2348 2357 drev = drevsbyrev[ctx.rev()]
2349 2358 status = ui.label(
2350 2359 b'%(statusName)s' % drev,
2351 2360 b'phabricator.status.%s' % _getstatusname(drev),
2352 2361 )
2353 2362 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2354 2363
2355 2364 revs -= smartset.baseset(unknownrevs)
2356 2365 revdag = graphmod.dagwalker(repo, revs)
2357 2366
2358 2367 ui.setconfig(b'experimental', b'graphshorten', True)
2359 2368 displayer._exthook = phabstatus
2360 2369 nodelen = show.longestshortest(repo, revs)
2361 2370 logcmdutil.displaygraph(
2362 2371 ui,
2363 2372 repo,
2364 2373 revdag,
2365 2374 displayer,
2366 2375 graphmod.asciiedges,
2367 2376 props={b'nodelen': nodelen},
2368 2377 )
General Comments 0
You need to be logged in to leave comments. Login now