##// END OF EJS Templates
phabricator: adapt to the new `urlutil.url()` API...
Matt Harbison -
r47830:067f2c53 5.8 stable
parent child Browse files
Show More
@@ -1,2401 +1,2401 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 # retry failed command N time (default 0). Useful when using the extension
39 39 # over flakly connection.
40 40 #
41 41 # We wait `retry.interval` between each retry, in seconds.
42 42 # (default 1 second).
43 43 retry = 3
44 44 retry.interval = 10
45 45
46 46 # the retry option can combine well with the http.timeout one.
47 47 #
48 48 # For example to give up on http request after 20 seconds:
49 49 [http]
50 50 timeout=20
51 51
52 52 [auth]
53 53 example.schemes = https
54 54 example.prefix = phab.example.com
55 55
56 56 # API token. Get it from https://$HOST/conduit/login/
57 57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
58 58 """
59 59
60 60 from __future__ import absolute_import
61 61
62 62 import base64
63 63 import contextlib
64 64 import hashlib
65 65 import itertools
66 66 import json
67 67 import mimetypes
68 68 import operator
69 69 import re
70 70 import time
71 71
72 72 from mercurial.node import bin, nullid, short
73 73 from mercurial.i18n import _
74 74 from mercurial.pycompat import getattr
75 75 from mercurial.thirdparty import attr
76 76 from mercurial import (
77 77 cmdutil,
78 78 context,
79 79 copies,
80 80 encoding,
81 81 error,
82 82 exthelper,
83 83 graphmod,
84 84 httpconnection as httpconnectionmod,
85 85 localrepo,
86 86 logcmdutil,
87 87 match,
88 88 mdiff,
89 89 obsutil,
90 90 parser,
91 91 patch,
92 92 phases,
93 93 pycompat,
94 94 rewriteutil,
95 95 scmutil,
96 96 smartset,
97 97 tags,
98 98 templatefilters,
99 99 templateutil,
100 100 url as urlmod,
101 101 util,
102 102 )
103 103 from mercurial.utils import (
104 104 procutil,
105 105 stringutil,
106 106 urlutil,
107 107 )
108 108 from . import show
109 109
110 110
111 111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 113 # be specifying the version(s) of Mercurial they are tested with, or
114 114 # leave the attribute unspecified.
115 115 testedwith = b'ships-with-hg-core'
116 116
117 117 eh = exthelper.exthelper()
118 118
119 119 cmdtable = eh.cmdtable
120 120 command = eh.command
121 121 configtable = eh.configtable
122 122 templatekeyword = eh.templatekeyword
123 123 uisetup = eh.finaluisetup
124 124
125 125 # developer config: phabricator.batchsize
126 126 eh.configitem(
127 127 b'phabricator',
128 128 b'batchsize',
129 129 default=12,
130 130 )
131 131 eh.configitem(
132 132 b'phabricator',
133 133 b'callsign',
134 134 default=None,
135 135 )
136 136 eh.configitem(
137 137 b'phabricator',
138 138 b'curlcmd',
139 139 default=None,
140 140 )
141 141 # developer config: phabricator.debug
142 142 eh.configitem(
143 143 b'phabricator',
144 144 b'debug',
145 145 default=False,
146 146 )
147 147 # developer config: phabricator.repophid
148 148 eh.configitem(
149 149 b'phabricator',
150 150 b'repophid',
151 151 default=None,
152 152 )
153 153 eh.configitem(
154 154 b'phabricator',
155 155 b'retry',
156 156 default=0,
157 157 )
158 158 eh.configitem(
159 159 b'phabricator',
160 160 b'retry.interval',
161 161 default=1,
162 162 )
163 163 eh.configitem(
164 164 b'phabricator',
165 165 b'url',
166 166 default=None,
167 167 )
168 168 eh.configitem(
169 169 b'phabsend',
170 170 b'confirm',
171 171 default=False,
172 172 )
173 173 eh.configitem(
174 174 b'phabimport',
175 175 b'secret',
176 176 default=False,
177 177 )
178 178 eh.configitem(
179 179 b'phabimport',
180 180 b'obsolete',
181 181 default=False,
182 182 )
183 183
184 184 colortable = {
185 185 b'phabricator.action.created': b'green',
186 186 b'phabricator.action.skipped': b'magenta',
187 187 b'phabricator.action.updated': b'magenta',
188 188 b'phabricator.drev': b'bold',
189 189 b'phabricator.status.abandoned': b'magenta dim',
190 190 b'phabricator.status.accepted': b'green bold',
191 191 b'phabricator.status.closed': b'green',
192 192 b'phabricator.status.needsreview': b'yellow',
193 193 b'phabricator.status.needsrevision': b'red',
194 194 b'phabricator.status.changesplanned': b'red',
195 195 }
196 196
197 197 _VCR_FLAGS = [
198 198 (
199 199 b'',
200 200 b'test-vcr',
201 201 b'',
202 202 _(
203 203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
204 204 b', otherwise will mock all http requests using the specified vcr file.'
205 205 b' (ADVANCED)'
206 206 ),
207 207 ),
208 208 ]
209 209
210 210
211 211 @eh.wrapfunction(localrepo, "loadhgrc")
212 212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
213 213 """Load ``.arcconfig`` content into a ui instance on repository open."""
214 214 result = False
215 215 arcconfig = {}
216 216
217 217 try:
218 218 # json.loads only accepts bytes from 3.6+
219 219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
220 220 # json.loads only returns unicode strings
221 221 arcconfig = pycompat.rapply(
222 222 lambda x: encoding.unitolocal(x)
223 223 if isinstance(x, pycompat.unicode)
224 224 else x,
225 225 pycompat.json_loads(rawparams),
226 226 )
227 227
228 228 result = True
229 229 except ValueError:
230 230 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
231 231 except IOError:
232 232 pass
233 233
234 234 cfg = util.sortdict()
235 235
236 236 if b"repository.callsign" in arcconfig:
237 237 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
238 238
239 239 if b"phabricator.uri" in arcconfig:
240 240 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
241 241
242 242 if cfg:
243 243 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
244 244
245 245 return (
246 246 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
247 247 ) # Load .hg/hgrc
248 248
249 249
250 250 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
251 251 fullflags = flags + _VCR_FLAGS
252 252
253 253 def hgmatcher(r1, r2):
254 254 if r1.uri != r2.uri or r1.method != r2.method:
255 255 return False
256 256 r1params = util.urlreq.parseqs(r1.body)
257 257 r2params = util.urlreq.parseqs(r2.body)
258 258 for key in r1params:
259 259 if key not in r2params:
260 260 return False
261 261 value = r1params[key][0]
262 262 # we want to compare json payloads without worrying about ordering
263 263 if value.startswith(b'{') and value.endswith(b'}'):
264 264 r1json = pycompat.json_loads(value)
265 265 r2json = pycompat.json_loads(r2params[key][0])
266 266 if r1json != r2json:
267 267 return False
268 268 elif r2params[key][0] != value:
269 269 return False
270 270 return True
271 271
272 272 def sanitiserequest(request):
273 273 request.body = re.sub(
274 274 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
275 275 )
276 276 return request
277 277
278 278 def sanitiseresponse(response):
279 279 if 'set-cookie' in response['headers']:
280 280 del response['headers']['set-cookie']
281 281 return response
282 282
283 283 def decorate(fn):
284 284 def inner(*args, **kwargs):
285 285 vcr = kwargs.pop('test_vcr')
286 286 if vcr:
287 287 cassette = pycompat.fsdecode(vcr)
288 288 import hgdemandimport
289 289
290 290 with hgdemandimport.deactivated():
291 291 import vcr as vcrmod
292 292 import vcr.stubs as stubs
293 293
294 294 vcr = vcrmod.VCR(
295 295 serializer='json',
296 296 before_record_request=sanitiserequest,
297 297 before_record_response=sanitiseresponse,
298 298 custom_patches=[
299 299 (
300 300 urlmod,
301 301 'httpconnection',
302 302 stubs.VCRHTTPConnection,
303 303 ),
304 304 (
305 305 urlmod,
306 306 'httpsconnection',
307 307 stubs.VCRHTTPSConnection,
308 308 ),
309 309 ],
310 310 )
311 311 vcr.register_matcher('hgmatcher', hgmatcher)
312 312 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
313 313 return fn(*args, **kwargs)
314 314 return fn(*args, **kwargs)
315 315
316 316 cmd = util.checksignature(inner, depth=2)
317 317 cmd.__name__ = fn.__name__
318 318 cmd.__doc__ = fn.__doc__
319 319
320 320 return command(
321 321 name,
322 322 fullflags,
323 323 spec,
324 324 helpcategory=helpcategory,
325 325 optionalrepo=optionalrepo,
326 326 )(cmd)
327 327
328 328 return decorate
329 329
330 330
331 331 def _debug(ui, *msg, **opts):
332 332 """write debug output for Phabricator if ``phabricator.debug`` is set
333 333
334 334 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
335 335 printed with the --debug argument.
336 336 """
337 337 if ui.configbool(b"phabricator", b"debug"):
338 338 flag = ui.debugflag
339 339 try:
340 340 ui.debugflag = True
341 341 ui.write(*msg, **opts)
342 342 finally:
343 343 ui.debugflag = flag
344 344
345 345
346 346 def urlencodenested(params):
347 347 """like urlencode, but works with nested parameters.
348 348
349 349 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
350 350 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
351 351 urlencode. Note: the encoding is consistent with PHP's http_build_query.
352 352 """
353 353 flatparams = util.sortdict()
354 354
355 355 def process(prefix, obj):
356 356 if isinstance(obj, bool):
357 357 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
358 358 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
359 359 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
360 360 if items is None:
361 361 flatparams[prefix] = obj
362 362 else:
363 363 for k, v in items(obj):
364 364 if prefix:
365 365 process(b'%s[%s]' % (prefix, k), v)
366 366 else:
367 367 process(k, v)
368 368
369 369 process(b'', params)
370 370 return urlutil.urlreq.urlencode(flatparams)
371 371
372 372
373 373 def readurltoken(ui):
374 374 """return conduit url, token and make sure they exist
375 375
376 376 Currently read from [auth] config section. In the future, it might
377 377 make sense to read from .arcconfig and .arcrc as well.
378 378 """
379 379 url = ui.config(b'phabricator', b'url')
380 380 if not url:
381 381 raise error.Abort(
382 382 _(b'config %s.%s is required') % (b'phabricator', b'url')
383 383 )
384 384
385 385 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
386 386 token = None
387 387
388 388 if res:
389 389 group, auth = res
390 390
391 391 ui.debug(b"using auth.%s.* for authentication\n" % group)
392 392
393 393 token = auth.get(b'phabtoken')
394 394
395 395 if not token:
396 396 raise error.Abort(
397 397 _(b'Can\'t find conduit token associated to %s') % (url,)
398 398 )
399 399
400 400 return url, token
401 401
402 402
403 403 def callconduit(ui, name, params):
404 404 """call Conduit API, params is a dict. return json.loads result, or None"""
405 405 host, token = readurltoken(ui)
406 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
406 url, authinfo = urlutil.url(b'/'.join([host, b'api', name])).authinfo()
407 407 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
408 408 params = params.copy()
409 409 params[b'__conduit__'] = {
410 410 b'token': token,
411 411 }
412 412 rawdata = {
413 413 b'params': templatefilters.json(params),
414 414 b'output': b'json',
415 415 b'__conduit__': 1,
416 416 }
417 417 data = urlencodenested(rawdata)
418 418 curlcmd = ui.config(b'phabricator', b'curlcmd')
419 419 if curlcmd:
420 420 sin, sout = procutil.popen2(
421 421 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
422 422 )
423 423 sin.write(data)
424 424 sin.close()
425 425 body = sout.read()
426 426 else:
427 427 urlopener = urlmod.opener(ui, authinfo)
428 428 request = util.urlreq.request(pycompat.strurl(url), data=data)
429 429 max_try = ui.configint(b'phabricator', b'retry') + 1
430 430 timeout = ui.configwith(float, b'http', b'timeout')
431 431 for try_count in range(max_try):
432 432 try:
433 433 with contextlib.closing(
434 434 urlopener.open(request, timeout=timeout)
435 435 ) as rsp:
436 436 body = rsp.read()
437 437 break
438 438 except util.urlerr.urlerror as err:
439 439 if try_count == max_try - 1:
440 440 raise
441 441 ui.debug(
442 442 b'Conduit Request failed (try %d/%d): %r\n'
443 443 % (try_count + 1, max_try, err)
444 444 )
445 445 # failing request might come from overloaded server
446 446 retry_interval = ui.configint(b'phabricator', b'retry.interval')
447 447 time.sleep(retry_interval)
448 448 ui.debug(b'Conduit Response: %s\n' % body)
449 449 parsed = pycompat.rapply(
450 450 lambda x: encoding.unitolocal(x)
451 451 if isinstance(x, pycompat.unicode)
452 452 else x,
453 453 # json.loads only accepts bytes from py3.6+
454 454 pycompat.json_loads(encoding.unifromlocal(body)),
455 455 )
456 456 if parsed.get(b'error_code'):
457 457 msg = _(b'Conduit Error (%s): %s') % (
458 458 parsed[b'error_code'],
459 459 parsed[b'error_info'],
460 460 )
461 461 raise error.Abort(msg)
462 462 return parsed[b'result']
463 463
464 464
465 465 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
466 466 def debugcallconduit(ui, repo, name):
467 467 """call Conduit API
468 468
469 469 Call parameters are read from stdin as a JSON blob. Result will be written
470 470 to stdout as a JSON blob.
471 471 """
472 472 # json.loads only accepts bytes from 3.6+
473 473 rawparams = encoding.unifromlocal(ui.fin.read())
474 474 # json.loads only returns unicode strings
475 475 params = pycompat.rapply(
476 476 lambda x: encoding.unitolocal(x)
477 477 if isinstance(x, pycompat.unicode)
478 478 else x,
479 479 pycompat.json_loads(rawparams),
480 480 )
481 481 # json.dumps only accepts unicode strings
482 482 result = pycompat.rapply(
483 483 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
484 484 callconduit(ui, name, params),
485 485 )
486 486 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
487 487 ui.write(b'%s\n' % encoding.unitolocal(s))
488 488
489 489
490 490 def getrepophid(repo):
491 491 """given callsign, return repository PHID or None"""
492 492 # developer config: phabricator.repophid
493 493 repophid = repo.ui.config(b'phabricator', b'repophid')
494 494 if repophid:
495 495 return repophid
496 496 callsign = repo.ui.config(b'phabricator', b'callsign')
497 497 if not callsign:
498 498 return None
499 499 query = callconduit(
500 500 repo.ui,
501 501 b'diffusion.repository.search',
502 502 {b'constraints': {b'callsigns': [callsign]}},
503 503 )
504 504 if len(query[b'data']) == 0:
505 505 return None
506 506 repophid = query[b'data'][0][b'phid']
507 507 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
508 508 return repophid
509 509
510 510
511 511 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
512 512 _differentialrevisiondescre = re.compile(
513 513 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
514 514 )
515 515
516 516
517 517 def getoldnodedrevmap(repo, nodelist):
518 518 """find previous nodes that has been sent to Phabricator
519 519
520 520 return {node: (oldnode, Differential diff, Differential Revision ID)}
521 521 for node in nodelist with known previous sent versions, or associated
522 522 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
523 523 be ``None``.
524 524
525 525 Examines commit messages like "Differential Revision:" to get the
526 526 association information.
527 527
528 528 If such commit message line is not found, examines all precursors and their
529 529 tags. Tags with format like "D1234" are considered a match and the node
530 530 with that tag, and the number after "D" (ex. 1234) will be returned.
531 531
532 532 The ``old node``, if not None, is guaranteed to be the last diff of
533 533 corresponding Differential Revision, and exist in the repo.
534 534 """
535 535 unfi = repo.unfiltered()
536 536 has_node = unfi.changelog.index.has_node
537 537
538 538 result = {} # {node: (oldnode?, lastdiff?, drev)}
539 539 # ordered for test stability when printing new -> old mapping below
540 540 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
541 541 for node in nodelist:
542 542 ctx = unfi[node]
543 543 # For tags like "D123", put them into "toconfirm" to verify later
544 544 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
545 545 for n in precnodes:
546 546 if has_node(n):
547 547 for tag in unfi.nodetags(n):
548 548 m = _differentialrevisiontagre.match(tag)
549 549 if m:
550 550 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
551 551 break
552 552 else:
553 553 continue # move to next predecessor
554 554 break # found a tag, stop
555 555 else:
556 556 # Check commit message
557 557 m = _differentialrevisiondescre.search(ctx.description())
558 558 if m:
559 559 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
560 560
561 561 # Double check if tags are genuine by collecting all old nodes from
562 562 # Phabricator, and expect precursors overlap with it.
563 563 if toconfirm:
564 564 drevs = [drev for force, precs, drev in toconfirm.values()]
565 565 alldiffs = callconduit(
566 566 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
567 567 )
568 568
569 569 def getnodes(d, precset):
570 570 # Ignore other nodes that were combined into the Differential
571 571 # that aren't predecessors of the current local node.
572 572 return [n for n in getlocalcommits(d) if n in precset]
573 573
574 574 for newnode, (force, precset, drev) in toconfirm.items():
575 575 diffs = [
576 576 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
577 577 ]
578 578
579 579 # local predecessors known by Phabricator
580 580 phprecset = {n for d in diffs for n in getnodes(d, precset)}
581 581
582 582 # Ignore if precursors (Phabricator and local repo) do not overlap,
583 583 # and force is not set (when commit message says nothing)
584 584 if not force and not phprecset:
585 585 tagname = b'D%d' % drev
586 586 tags.tag(
587 587 repo,
588 588 tagname,
589 589 nullid,
590 590 message=None,
591 591 user=None,
592 592 date=None,
593 593 local=True,
594 594 )
595 595 unfi.ui.warn(
596 596 _(
597 597 b'D%d: local tag removed - does not match '
598 598 b'Differential history\n'
599 599 )
600 600 % drev
601 601 )
602 602 continue
603 603
604 604 # Find the last node using Phabricator metadata, and make sure it
605 605 # exists in the repo
606 606 oldnode = lastdiff = None
607 607 if diffs:
608 608 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
609 609 oldnodes = getnodes(lastdiff, precset)
610 610
611 611 _debug(
612 612 unfi.ui,
613 613 b"%s mapped to old nodes %s\n"
614 614 % (
615 615 short(newnode),
616 616 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
617 617 ),
618 618 )
619 619
620 620 # If this commit was the result of `hg fold` after submission,
621 621 # and now resubmitted with --fold, the easiest thing to do is
622 622 # to leave the node clear. This only results in creating a new
623 623 # diff for the _same_ Differential Revision if this commit is
624 624 # the first or last in the selected range. If we picked a node
625 625 # from the list instead, it would have to be the lowest if at
626 626 # the beginning of the --fold range, or the highest at the end.
627 627 # Otherwise, one or more of the nodes wouldn't be considered in
628 628 # the diff, and the Differential wouldn't be properly updated.
629 629 # If this commit is the result of `hg split` in the same
630 630 # scenario, there is a single oldnode here (and multiple
631 631 # newnodes mapped to it). That makes it the same as the normal
632 632 # case, as the edges of the newnode range cleanly maps to one
633 633 # oldnode each.
634 634 if len(oldnodes) == 1:
635 635 oldnode = oldnodes[0]
636 636 if oldnode and not has_node(oldnode):
637 637 oldnode = None
638 638
639 639 result[newnode] = (oldnode, lastdiff, drev)
640 640
641 641 return result
642 642
643 643
644 644 def getdrevmap(repo, revs):
645 645 """Return a dict mapping each rev in `revs` to their Differential Revision
646 646 ID or None.
647 647 """
648 648 result = {}
649 649 for rev in revs:
650 650 result[rev] = None
651 651 ctx = repo[rev]
652 652 # Check commit message
653 653 m = _differentialrevisiondescre.search(ctx.description())
654 654 if m:
655 655 result[rev] = int(m.group('id'))
656 656 continue
657 657 # Check tags
658 658 for tag in repo.nodetags(ctx.node()):
659 659 m = _differentialrevisiontagre.match(tag)
660 660 if m:
661 661 result[rev] = int(m.group(1))
662 662 break
663 663
664 664 return result
665 665
666 666
667 667 def getdiff(basectx, ctx, diffopts):
668 668 """plain-text diff without header (user, commit message, etc)"""
669 669 output = util.stringio()
670 670 for chunk, _label in patch.diffui(
671 671 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
672 672 ):
673 673 output.write(chunk)
674 674 return output.getvalue()
675 675
676 676
677 677 class DiffChangeType(object):
678 678 ADD = 1
679 679 CHANGE = 2
680 680 DELETE = 3
681 681 MOVE_AWAY = 4
682 682 COPY_AWAY = 5
683 683 MOVE_HERE = 6
684 684 COPY_HERE = 7
685 685 MULTICOPY = 8
686 686
687 687
688 688 class DiffFileType(object):
689 689 TEXT = 1
690 690 IMAGE = 2
691 691 BINARY = 3
692 692
693 693
694 694 @attr.s
695 695 class phabhunk(dict):
696 696 """Represents a Differential hunk, which is owned by a Differential change"""
697 697
698 698 oldOffset = attr.ib(default=0) # camelcase-required
699 699 oldLength = attr.ib(default=0) # camelcase-required
700 700 newOffset = attr.ib(default=0) # camelcase-required
701 701 newLength = attr.ib(default=0) # camelcase-required
702 702 corpus = attr.ib(default='')
703 703 # These get added to the phabchange's equivalents
704 704 addLines = attr.ib(default=0) # camelcase-required
705 705 delLines = attr.ib(default=0) # camelcase-required
706 706
707 707
708 708 @attr.s
709 709 class phabchange(object):
710 710 """Represents a Differential change, owns Differential hunks and owned by a
711 711 Differential diff. Each one represents one file in a diff.
712 712 """
713 713
714 714 currentPath = attr.ib(default=None) # camelcase-required
715 715 oldPath = attr.ib(default=None) # camelcase-required
716 716 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
717 717 metadata = attr.ib(default=attr.Factory(dict))
718 718 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 719 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
720 720 type = attr.ib(default=DiffChangeType.CHANGE)
721 721 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
722 722 commitHash = attr.ib(default=None) # camelcase-required
723 723 addLines = attr.ib(default=0) # camelcase-required
724 724 delLines = attr.ib(default=0) # camelcase-required
725 725 hunks = attr.ib(default=attr.Factory(list))
726 726
727 727 def copynewmetadatatoold(self):
728 728 for key in list(self.metadata.keys()):
729 729 newkey = key.replace(b'new:', b'old:')
730 730 self.metadata[newkey] = self.metadata[key]
731 731
732 732 def addoldmode(self, value):
733 733 self.oldProperties[b'unix:filemode'] = value
734 734
735 735 def addnewmode(self, value):
736 736 self.newProperties[b'unix:filemode'] = value
737 737
738 738 def addhunk(self, hunk):
739 739 if not isinstance(hunk, phabhunk):
740 740 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
741 741 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
742 742 # It's useful to include these stats since the Phab web UI shows them,
743 743 # and uses them to estimate how large a change a Revision is. Also used
744 744 # in email subjects for the [+++--] bit.
745 745 self.addLines += hunk.addLines
746 746 self.delLines += hunk.delLines
747 747
748 748
749 749 @attr.s
750 750 class phabdiff(object):
751 751 """Represents a Differential diff, owns Differential changes. Corresponds
752 752 to a commit.
753 753 """
754 754
755 755 # Doesn't seem to be any reason to send this (output of uname -n)
756 756 sourceMachine = attr.ib(default=b'') # camelcase-required
757 757 sourcePath = attr.ib(default=b'/') # camelcase-required
758 758 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
759 759 sourceControlPath = attr.ib(default=b'/') # camelcase-required
760 760 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
761 761 branch = attr.ib(default=b'default')
762 762 bookmark = attr.ib(default=None)
763 763 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
764 764 lintStatus = attr.ib(default=b'none') # camelcase-required
765 765 unitStatus = attr.ib(default=b'none') # camelcase-required
766 766 changes = attr.ib(default=attr.Factory(dict))
767 767 repositoryPHID = attr.ib(default=None) # camelcase-required
768 768
769 769 def addchange(self, change):
770 770 if not isinstance(change, phabchange):
771 771 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
772 772 self.changes[change.currentPath] = pycompat.byteskwargs(
773 773 attr.asdict(change)
774 774 )
775 775
776 776
777 777 def maketext(pchange, basectx, ctx, fname):
778 778 """populate the phabchange for a text file"""
779 779 repo = ctx.repo()
780 780 fmatcher = match.exact([fname])
781 781 diffopts = mdiff.diffopts(git=True, context=32767)
782 782 _pfctx, _fctx, header, fhunks = next(
783 783 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
784 784 )
785 785
786 786 for fhunk in fhunks:
787 787 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
788 788 corpus = b''.join(lines[1:])
789 789 shunk = list(header)
790 790 shunk.extend(lines)
791 791 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
792 792 patch.diffstatdata(util.iterlines(shunk))
793 793 )
794 794 pchange.addhunk(
795 795 phabhunk(
796 796 oldOffset,
797 797 oldLength,
798 798 newOffset,
799 799 newLength,
800 800 corpus,
801 801 addLines,
802 802 delLines,
803 803 )
804 804 )
805 805
806 806
807 807 def uploadchunks(fctx, fphid):
808 808 """upload large binary files as separate chunks.
809 809 Phab requests chunking over 8MiB, and splits into 4MiB chunks
810 810 """
811 811 ui = fctx.repo().ui
812 812 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
813 813 with ui.makeprogress(
814 814 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
815 815 ) as progress:
816 816 for chunk in chunks:
817 817 progress.increment()
818 818 if chunk[b'complete']:
819 819 continue
820 820 bstart = int(chunk[b'byteStart'])
821 821 bend = int(chunk[b'byteEnd'])
822 822 callconduit(
823 823 ui,
824 824 b'file.uploadchunk',
825 825 {
826 826 b'filePHID': fphid,
827 827 b'byteStart': bstart,
828 828 b'data': base64.b64encode(fctx.data()[bstart:bend]),
829 829 b'dataEncoding': b'base64',
830 830 },
831 831 )
832 832
833 833
834 834 def uploadfile(fctx):
835 835 """upload binary files to Phabricator"""
836 836 repo = fctx.repo()
837 837 ui = repo.ui
838 838 fname = fctx.path()
839 839 size = fctx.size()
840 840 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
841 841
842 842 # an allocate call is required first to see if an upload is even required
843 843 # (Phab might already have it) and to determine if chunking is needed
844 844 allocateparams = {
845 845 b'name': fname,
846 846 b'contentLength': size,
847 847 b'contentHash': fhash,
848 848 }
849 849 filealloc = callconduit(ui, b'file.allocate', allocateparams)
850 850 fphid = filealloc[b'filePHID']
851 851
852 852 if filealloc[b'upload']:
853 853 ui.write(_(b'uploading %s\n') % bytes(fctx))
854 854 if not fphid:
855 855 uploadparams = {
856 856 b'name': fname,
857 857 b'data_base64': base64.b64encode(fctx.data()),
858 858 }
859 859 fphid = callconduit(ui, b'file.upload', uploadparams)
860 860 else:
861 861 uploadchunks(fctx, fphid)
862 862 else:
863 863 ui.debug(b'server already has %s\n' % bytes(fctx))
864 864
865 865 if not fphid:
866 866 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
867 867
868 868 return fphid
869 869
870 870
871 871 def addoldbinary(pchange, oldfctx, fctx):
872 872 """add the metadata for the previous version of a binary file to the
873 873 phabchange for the new version
874 874
875 875 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
876 876 version of the file, or None if the file is being removed.
877 877 """
878 878 if not fctx or fctx.cmp(oldfctx):
879 879 # Files differ, add the old one
880 880 pchange.metadata[b'old:file:size'] = oldfctx.size()
881 881 mimeguess, _enc = mimetypes.guess_type(
882 882 encoding.unifromlocal(oldfctx.path())
883 883 )
884 884 if mimeguess:
885 885 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
886 886 mimeguess
887 887 )
888 888 fphid = uploadfile(oldfctx)
889 889 pchange.metadata[b'old:binary-phid'] = fphid
890 890 else:
891 891 # If it's left as IMAGE/BINARY web UI might try to display it
892 892 pchange.fileType = DiffFileType.TEXT
893 893 pchange.copynewmetadatatoold()
894 894
895 895
896 896 def makebinary(pchange, fctx):
897 897 """populate the phabchange for a binary file"""
898 898 pchange.fileType = DiffFileType.BINARY
899 899 fphid = uploadfile(fctx)
900 900 pchange.metadata[b'new:binary-phid'] = fphid
901 901 pchange.metadata[b'new:file:size'] = fctx.size()
902 902 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
903 903 if mimeguess:
904 904 mimeguess = pycompat.bytestr(mimeguess)
905 905 pchange.metadata[b'new:file:mime-type'] = mimeguess
906 906 if mimeguess.startswith(b'image/'):
907 907 pchange.fileType = DiffFileType.IMAGE
908 908
909 909
910 910 # Copied from mercurial/patch.py
911 911 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
912 912
913 913
914 914 def notutf8(fctx):
915 915 """detect non-UTF-8 text files since Phabricator requires them to be marked
916 916 as binary
917 917 """
918 918 try:
919 919 fctx.data().decode('utf-8')
920 920 return False
921 921 except UnicodeDecodeError:
922 922 fctx.repo().ui.write(
923 923 _(b'file %s detected as non-UTF-8, marked as binary\n')
924 924 % fctx.path()
925 925 )
926 926 return True
927 927
928 928
929 929 def addremoved(pdiff, basectx, ctx, removed):
930 930 """add removed files to the phabdiff. Shouldn't include moves"""
931 931 for fname in removed:
932 932 pchange = phabchange(
933 933 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
934 934 )
935 935 oldfctx = basectx.p1()[fname]
936 936 pchange.addoldmode(gitmode[oldfctx.flags()])
937 937 if not (oldfctx.isbinary() or notutf8(oldfctx)):
938 938 maketext(pchange, basectx, ctx, fname)
939 939
940 940 pdiff.addchange(pchange)
941 941
942 942
943 943 def addmodified(pdiff, basectx, ctx, modified):
944 944 """add modified files to the phabdiff"""
945 945 for fname in modified:
946 946 fctx = ctx[fname]
947 947 oldfctx = basectx.p1()[fname]
948 948 pchange = phabchange(currentPath=fname, oldPath=fname)
949 949 filemode = gitmode[fctx.flags()]
950 950 originalmode = gitmode[oldfctx.flags()]
951 951 if filemode != originalmode:
952 952 pchange.addoldmode(originalmode)
953 953 pchange.addnewmode(filemode)
954 954
955 955 if (
956 956 fctx.isbinary()
957 957 or notutf8(fctx)
958 958 or oldfctx.isbinary()
959 959 or notutf8(oldfctx)
960 960 ):
961 961 makebinary(pchange, fctx)
962 962 addoldbinary(pchange, oldfctx, fctx)
963 963 else:
964 964 maketext(pchange, basectx, ctx, fname)
965 965
966 966 pdiff.addchange(pchange)
967 967
968 968
969 969 def addadded(pdiff, basectx, ctx, added, removed):
970 970 """add file adds to the phabdiff, both new files and copies/moves"""
971 971 # Keep track of files that've been recorded as moved/copied, so if there are
972 972 # additional copies we can mark them (moves get removed from removed)
973 973 copiedchanges = {}
974 974 movedchanges = {}
975 975
976 976 copy = {}
977 977 if basectx != ctx:
978 978 copy = copies.pathcopies(basectx.p1(), ctx)
979 979
980 980 for fname in added:
981 981 fctx = ctx[fname]
982 982 oldfctx = None
983 983 pchange = phabchange(currentPath=fname)
984 984
985 985 filemode = gitmode[fctx.flags()]
986 986
987 987 if copy:
988 988 originalfname = copy.get(fname, fname)
989 989 else:
990 990 originalfname = fname
991 991 if fctx.renamed():
992 992 originalfname = fctx.renamed()[0]
993 993
994 994 renamed = fname != originalfname
995 995
996 996 if renamed:
997 997 oldfctx = basectx.p1()[originalfname]
998 998 originalmode = gitmode[oldfctx.flags()]
999 999 pchange.oldPath = originalfname
1000 1000
1001 1001 if originalfname in removed:
1002 1002 origpchange = phabchange(
1003 1003 currentPath=originalfname,
1004 1004 oldPath=originalfname,
1005 1005 type=DiffChangeType.MOVE_AWAY,
1006 1006 awayPaths=[fname],
1007 1007 )
1008 1008 movedchanges[originalfname] = origpchange
1009 1009 removed.remove(originalfname)
1010 1010 pchange.type = DiffChangeType.MOVE_HERE
1011 1011 elif originalfname in movedchanges:
1012 1012 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1013 1013 movedchanges[originalfname].awayPaths.append(fname)
1014 1014 pchange.type = DiffChangeType.COPY_HERE
1015 1015 else: # pure copy
1016 1016 if originalfname not in copiedchanges:
1017 1017 origpchange = phabchange(
1018 1018 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1019 1019 )
1020 1020 copiedchanges[originalfname] = origpchange
1021 1021 else:
1022 1022 origpchange = copiedchanges[originalfname]
1023 1023 origpchange.awayPaths.append(fname)
1024 1024 pchange.type = DiffChangeType.COPY_HERE
1025 1025
1026 1026 if filemode != originalmode:
1027 1027 pchange.addoldmode(originalmode)
1028 1028 pchange.addnewmode(filemode)
1029 1029 else: # Brand-new file
1030 1030 pchange.addnewmode(gitmode[fctx.flags()])
1031 1031 pchange.type = DiffChangeType.ADD
1032 1032
1033 1033 if (
1034 1034 fctx.isbinary()
1035 1035 or notutf8(fctx)
1036 1036 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1037 1037 ):
1038 1038 makebinary(pchange, fctx)
1039 1039 if renamed:
1040 1040 addoldbinary(pchange, oldfctx, fctx)
1041 1041 else:
1042 1042 maketext(pchange, basectx, ctx, fname)
1043 1043
1044 1044 pdiff.addchange(pchange)
1045 1045
1046 1046 for _path, copiedchange in copiedchanges.items():
1047 1047 pdiff.addchange(copiedchange)
1048 1048 for _path, movedchange in movedchanges.items():
1049 1049 pdiff.addchange(movedchange)
1050 1050
1051 1051
1052 1052 def creatediff(basectx, ctx):
1053 1053 """create a Differential Diff"""
1054 1054 repo = ctx.repo()
1055 1055 repophid = getrepophid(repo)
1056 1056 # Create a "Differential Diff" via "differential.creatediff" API
1057 1057 pdiff = phabdiff(
1058 1058 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1059 1059 branch=b'%s' % ctx.branch(),
1060 1060 )
1061 1061 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1062 1062 # addadded will remove moved files from removed, so addremoved won't get
1063 1063 # them
1064 1064 addadded(pdiff, basectx, ctx, added, removed)
1065 1065 addmodified(pdiff, basectx, ctx, modified)
1066 1066 addremoved(pdiff, basectx, ctx, removed)
1067 1067 if repophid:
1068 1068 pdiff.repositoryPHID = repophid
1069 1069 diff = callconduit(
1070 1070 repo.ui,
1071 1071 b'differential.creatediff',
1072 1072 pycompat.byteskwargs(attr.asdict(pdiff)),
1073 1073 )
1074 1074 if not diff:
1075 1075 if basectx != ctx:
1076 1076 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1077 1077 else:
1078 1078 msg = _(b'cannot create diff for %s') % ctx
1079 1079 raise error.Abort(msg)
1080 1080 return diff
1081 1081
1082 1082
1083 1083 def writediffproperties(ctxs, diff):
1084 1084 """write metadata to diff so patches could be applied losslessly
1085 1085
1086 1086 ``ctxs`` is the list of commits that created the diff, in ascending order.
1087 1087 The list is generally a single commit, but may be several when using
1088 1088 ``phabsend --fold``.
1089 1089 """
1090 1090 # creatediff returns with a diffid but query returns with an id
1091 1091 diffid = diff.get(b'diffid', diff.get(b'id'))
1092 1092 basectx = ctxs[0]
1093 1093 tipctx = ctxs[-1]
1094 1094
1095 1095 params = {
1096 1096 b'diff_id': diffid,
1097 1097 b'name': b'hg:meta',
1098 1098 b'data': templatefilters.json(
1099 1099 {
1100 1100 b'user': tipctx.user(),
1101 1101 b'date': b'%d %d' % tipctx.date(),
1102 1102 b'branch': tipctx.branch(),
1103 1103 b'node': tipctx.hex(),
1104 1104 b'parent': basectx.p1().hex(),
1105 1105 }
1106 1106 ),
1107 1107 }
1108 1108 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1109 1109
1110 1110 commits = {}
1111 1111 for ctx in ctxs:
1112 1112 commits[ctx.hex()] = {
1113 1113 b'author': stringutil.person(ctx.user()),
1114 1114 b'authorEmail': stringutil.email(ctx.user()),
1115 1115 b'time': int(ctx.date()[0]),
1116 1116 b'commit': ctx.hex(),
1117 1117 b'parents': [ctx.p1().hex()],
1118 1118 b'branch': ctx.branch(),
1119 1119 }
1120 1120 params = {
1121 1121 b'diff_id': diffid,
1122 1122 b'name': b'local:commits',
1123 1123 b'data': templatefilters.json(commits),
1124 1124 }
1125 1125 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1126 1126
1127 1127
1128 1128 def createdifferentialrevision(
1129 1129 ctxs,
1130 1130 revid=None,
1131 1131 parentrevphid=None,
1132 1132 oldbasenode=None,
1133 1133 oldnode=None,
1134 1134 olddiff=None,
1135 1135 actions=None,
1136 1136 comment=None,
1137 1137 ):
1138 1138 """create or update a Differential Revision
1139 1139
1140 1140 If revid is None, create a new Differential Revision, otherwise update
1141 1141 revid. If parentrevphid is not None, set it as a dependency.
1142 1142
1143 1143 If there is a single commit for the new Differential Revision, ``ctxs`` will
1144 1144 be a list of that single context. Otherwise, it is a list that covers the
1145 1145 range of changes for the differential, where ``ctxs[0]`` is the first change
1146 1146 to include and ``ctxs[-1]`` is the last.
1147 1147
1148 1148 If oldnode is not None, check if the patch content (without commit message
1149 1149 and metadata) has changed before creating another diff. For a Revision with
1150 1150 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1151 1151 Revision covering multiple commits, ``oldbasenode`` corresponds to
1152 1152 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1153 1153 corresponds to ``ctxs[-1]``.
1154 1154
1155 1155 If actions is not None, they will be appended to the transaction.
1156 1156 """
1157 1157 ctx = ctxs[-1]
1158 1158 basectx = ctxs[0]
1159 1159
1160 1160 repo = ctx.repo()
1161 1161 if oldnode:
1162 1162 diffopts = mdiff.diffopts(git=True, context=32767)
1163 1163 unfi = repo.unfiltered()
1164 1164 oldctx = unfi[oldnode]
1165 1165 oldbasectx = unfi[oldbasenode]
1166 1166 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1167 1167 oldbasectx, oldctx, diffopts
1168 1168 )
1169 1169 else:
1170 1170 neednewdiff = True
1171 1171
1172 1172 transactions = []
1173 1173 if neednewdiff:
1174 1174 diff = creatediff(basectx, ctx)
1175 1175 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1176 1176 if comment:
1177 1177 transactions.append({b'type': b'comment', b'value': comment})
1178 1178 else:
1179 1179 # Even if we don't need to upload a new diff because the patch content
1180 1180 # does not change. We might still need to update its metadata so
1181 1181 # pushers could know the correct node metadata.
1182 1182 assert olddiff
1183 1183 diff = olddiff
1184 1184 writediffproperties(ctxs, diff)
1185 1185
1186 1186 # Set the parent Revision every time, so commit re-ordering is picked-up
1187 1187 if parentrevphid:
1188 1188 transactions.append(
1189 1189 {b'type': b'parents.set', b'value': [parentrevphid]}
1190 1190 )
1191 1191
1192 1192 if actions:
1193 1193 transactions += actions
1194 1194
1195 1195 # When folding multiple local commits into a single review, arcanist will
1196 1196 # take the summary line of the first commit as the title, and then
1197 1197 # concatenate the rest of the remaining messages (including each of their
1198 1198 # first lines) to the rest of the first commit message (each separated by
1199 1199 # an empty line), and use that as the summary field. Do the same here.
1200 1200 # For commits with only a one line message, there is no summary field, as
1201 1201 # this gets assigned to the title.
1202 1202 fields = util.sortdict() # sorted for stable wire protocol in tests
1203 1203
1204 1204 for i, _ctx in enumerate(ctxs):
1205 1205 # Parse commit message and update related fields.
1206 1206 desc = _ctx.description()
1207 1207 info = callconduit(
1208 1208 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1209 1209 )
1210 1210
1211 1211 for k in [b'title', b'summary', b'testPlan']:
1212 1212 v = info[b'fields'].get(k)
1213 1213 if not v:
1214 1214 continue
1215 1215
1216 1216 if i == 0:
1217 1217 # Title, summary and test plan (if present) are taken verbatim
1218 1218 # for the first commit.
1219 1219 fields[k] = v.rstrip()
1220 1220 continue
1221 1221 elif k == b'title':
1222 1222 # Add subsequent titles (i.e. the first line of the commit
1223 1223 # message) back to the summary.
1224 1224 k = b'summary'
1225 1225
1226 1226 # Append any current field to the existing composite field
1227 1227 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1228 1228
1229 1229 for k, v in fields.items():
1230 1230 transactions.append({b'type': k, b'value': v})
1231 1231
1232 1232 params = {b'transactions': transactions}
1233 1233 if revid is not None:
1234 1234 # Update an existing Differential Revision
1235 1235 params[b'objectIdentifier'] = revid
1236 1236
1237 1237 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1238 1238 if not revision:
1239 1239 if len(ctxs) == 1:
1240 1240 msg = _(b'cannot create revision for %s') % ctx
1241 1241 else:
1242 1242 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1243 1243 raise error.Abort(msg)
1244 1244
1245 1245 return revision, diff
1246 1246
1247 1247
1248 1248 def userphids(ui, names):
1249 1249 """convert user names to PHIDs"""
1250 1250 names = [name.lower() for name in names]
1251 1251 query = {b'constraints': {b'usernames': names}}
1252 1252 result = callconduit(ui, b'user.search', query)
1253 1253 # username not found is not an error of the API. So check if we have missed
1254 1254 # some names here.
1255 1255 data = result[b'data']
1256 1256 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1257 1257 unresolved = set(names) - resolved
1258 1258 if unresolved:
1259 1259 raise error.Abort(
1260 1260 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1261 1261 )
1262 1262 return [entry[b'phid'] for entry in data]
1263 1263
1264 1264
1265 1265 def _print_phabsend_action(ui, ctx, newrevid, action):
1266 1266 """print the ``action`` that occurred when posting ``ctx`` for review
1267 1267
1268 1268 This is a utility function for the sending phase of ``phabsend``, which
1269 1269 makes it easier to show a status for all local commits with `--fold``.
1270 1270 """
1271 1271 actiondesc = ui.label(
1272 1272 {
1273 1273 b'created': _(b'created'),
1274 1274 b'skipped': _(b'skipped'),
1275 1275 b'updated': _(b'updated'),
1276 1276 }[action],
1277 1277 b'phabricator.action.%s' % action,
1278 1278 )
1279 1279 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1280 1280 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1281 1281 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1282 1282
1283 1283
1284 1284 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1285 1285 """update the local commit list for the ``diff`` associated with ``drevid``
1286 1286
1287 1287 This is a utility function for the amend phase of ``phabsend``, which
1288 1288 converts failures to warning messages.
1289 1289 """
1290 1290 _debug(
1291 1291 unfi.ui,
1292 1292 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1293 1293 )
1294 1294
1295 1295 try:
1296 1296 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1297 1297 except util.urlerr.urlerror:
1298 1298 # If it fails just warn and keep going, otherwise the DREV
1299 1299 # associations will be lost
1300 1300 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1301 1301
1302 1302
1303 1303 @vcrcommand(
1304 1304 b'phabsend',
1305 1305 [
1306 1306 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1307 1307 (b'', b'amend', True, _(b'update commit messages')),
1308 1308 (b'', b'reviewer', [], _(b'specify reviewers')),
1309 1309 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1310 1310 (
1311 1311 b'm',
1312 1312 b'comment',
1313 1313 b'',
1314 1314 _(b'add a comment to Revisions with new/updated Diffs'),
1315 1315 ),
1316 1316 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1317 1317 (b'', b'fold', False, _(b'combine the revisions into one review')),
1318 1318 ],
1319 1319 _(b'REV [OPTIONS]'),
1320 1320 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1321 1321 )
1322 1322 def phabsend(ui, repo, *revs, **opts):
1323 1323 """upload changesets to Phabricator
1324 1324
1325 1325 If there are multiple revisions specified, they will be send as a stack
1326 1326 with a linear dependencies relationship using the order specified by the
1327 1327 revset.
1328 1328
1329 1329 For the first time uploading changesets, local tags will be created to
1330 1330 maintain the association. After the first time, phabsend will check
1331 1331 obsstore and tags information so it can figure out whether to update an
1332 1332 existing Differential Revision, or create a new one.
1333 1333
1334 1334 If --amend is set, update commit messages so they have the
1335 1335 ``Differential Revision`` URL, remove related tags. This is similar to what
1336 1336 arcanist will do, and is more desired in author-push workflows. Otherwise,
1337 1337 use local tags to record the ``Differential Revision`` association.
1338 1338
1339 1339 The --confirm option lets you confirm changesets before sending them. You
1340 1340 can also add following to your configuration file to make it default
1341 1341 behaviour::
1342 1342
1343 1343 [phabsend]
1344 1344 confirm = true
1345 1345
1346 1346 By default, a separate review will be created for each commit that is
1347 1347 selected, and will have the same parent/child relationship in Phabricator.
1348 1348 If ``--fold`` is set, multiple commits are rolled up into a single review
1349 1349 as if diffed from the parent of the first revision to the last. The commit
1350 1350 messages are concatenated in the summary field on Phabricator.
1351 1351
1352 1352 phabsend will check obsstore and the above association to decide whether to
1353 1353 update an existing Differential Revision, or create a new one.
1354 1354 """
1355 1355 opts = pycompat.byteskwargs(opts)
1356 1356 revs = list(revs) + opts.get(b'rev', [])
1357 1357 revs = scmutil.revrange(repo, revs)
1358 1358 revs.sort() # ascending order to preserve topological parent/child in phab
1359 1359
1360 1360 if not revs:
1361 1361 raise error.Abort(_(b'phabsend requires at least one changeset'))
1362 1362 if opts.get(b'amend'):
1363 1363 cmdutil.checkunfinished(repo)
1364 1364
1365 1365 ctxs = [repo[rev] for rev in revs]
1366 1366
1367 1367 if any(c for c in ctxs if c.obsolete()):
1368 1368 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1369 1369
1370 1370 # Ensure the local commits are an unbroken range. The semantics of the
1371 1371 # --fold option implies this, and the auto restacking of orphans requires
1372 1372 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1373 1373 # get A' as a parent.
1374 1374 def _fail_nonlinear_revs(revs, revtype):
1375 1375 badnodes = [repo[r].node() for r in revs]
1376 1376 raise error.Abort(
1377 1377 _(b"cannot phabsend multiple %s revisions: %s")
1378 1378 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1379 1379 hint=_(b"the revisions must form a linear chain"),
1380 1380 )
1381 1381
1382 1382 heads = repo.revs(b'heads(%ld)', revs)
1383 1383 if len(heads) > 1:
1384 1384 _fail_nonlinear_revs(heads, b"head")
1385 1385
1386 1386 roots = repo.revs(b'roots(%ld)', revs)
1387 1387 if len(roots) > 1:
1388 1388 _fail_nonlinear_revs(roots, b"root")
1389 1389
1390 1390 fold = opts.get(b'fold')
1391 1391 if fold:
1392 1392 if len(revs) == 1:
1393 1393 # TODO: just switch to --no-fold instead?
1394 1394 raise error.Abort(_(b"cannot fold a single revision"))
1395 1395
1396 1396 # There's no clear way to manage multiple commits with a Dxxx tag, so
1397 1397 # require the amend option. (We could append "_nnn", but then it
1398 1398 # becomes jumbled if earlier commits are added to an update.) It should
1399 1399 # lock the repo and ensure that the range is editable, but that would
1400 1400 # make the code pretty convoluted. The default behavior of `arc` is to
1401 1401 # create a new review anyway.
1402 1402 if not opts.get(b"amend"):
1403 1403 raise error.Abort(_(b"cannot fold with --no-amend"))
1404 1404
1405 1405 # It might be possible to bucketize the revisions by the DREV value, and
1406 1406 # iterate over those groups when posting, and then again when amending.
1407 1407 # But for simplicity, require all selected revisions to be for the same
1408 1408 # DREV (if present). Adding local revisions to an existing DREV is
1409 1409 # acceptable.
1410 1410 drevmatchers = [
1411 1411 _differentialrevisiondescre.search(ctx.description())
1412 1412 for ctx in ctxs
1413 1413 ]
1414 1414 if len({m.group('url') for m in drevmatchers if m}) > 1:
1415 1415 raise error.Abort(
1416 1416 _(b"cannot fold revisions with different DREV values")
1417 1417 )
1418 1418
1419 1419 # {newnode: (oldnode, olddiff, olddrev}
1420 1420 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1421 1421
1422 1422 confirm = ui.configbool(b'phabsend', b'confirm')
1423 1423 confirm |= bool(opts.get(b'confirm'))
1424 1424 if confirm:
1425 1425 confirmed = _confirmbeforesend(repo, revs, oldmap)
1426 1426 if not confirmed:
1427 1427 raise error.Abort(_(b'phabsend cancelled'))
1428 1428
1429 1429 actions = []
1430 1430 reviewers = opts.get(b'reviewer', [])
1431 1431 blockers = opts.get(b'blocker', [])
1432 1432 phids = []
1433 1433 if reviewers:
1434 1434 phids.extend(userphids(repo.ui, reviewers))
1435 1435 if blockers:
1436 1436 phids.extend(
1437 1437 map(
1438 1438 lambda phid: b'blocking(%s)' % phid,
1439 1439 userphids(repo.ui, blockers),
1440 1440 )
1441 1441 )
1442 1442 if phids:
1443 1443 actions.append({b'type': b'reviewers.add', b'value': phids})
1444 1444
1445 1445 drevids = [] # [int]
1446 1446 diffmap = {} # {newnode: diff}
1447 1447
1448 1448 # Send patches one by one so we know their Differential Revision PHIDs and
1449 1449 # can provide dependency relationship
1450 1450 lastrevphid = None
1451 1451 for ctx in ctxs:
1452 1452 if fold:
1453 1453 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1454 1454 else:
1455 1455 ui.debug(b'sending rev %d\n' % ctx.rev())
1456 1456
1457 1457 # Get Differential Revision ID
1458 1458 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1459 1459 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1460 1460
1461 1461 if fold:
1462 1462 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1463 1463 ctxs[-1].node(), (None, None, None)
1464 1464 )
1465 1465
1466 1466 if oldnode != ctx.node() or opts.get(b'amend'):
1467 1467 # Create or update Differential Revision
1468 1468 revision, diff = createdifferentialrevision(
1469 1469 ctxs if fold else [ctx],
1470 1470 revid,
1471 1471 lastrevphid,
1472 1472 oldbasenode,
1473 1473 oldnode,
1474 1474 olddiff,
1475 1475 actions,
1476 1476 opts.get(b'comment'),
1477 1477 )
1478 1478
1479 1479 if fold:
1480 1480 for ctx in ctxs:
1481 1481 diffmap[ctx.node()] = diff
1482 1482 else:
1483 1483 diffmap[ctx.node()] = diff
1484 1484
1485 1485 newrevid = int(revision[b'object'][b'id'])
1486 1486 newrevphid = revision[b'object'][b'phid']
1487 1487 if revid:
1488 1488 action = b'updated'
1489 1489 else:
1490 1490 action = b'created'
1491 1491
1492 1492 # Create a local tag to note the association, if commit message
1493 1493 # does not have it already
1494 1494 if not fold:
1495 1495 m = _differentialrevisiondescre.search(ctx.description())
1496 1496 if not m or int(m.group('id')) != newrevid:
1497 1497 tagname = b'D%d' % newrevid
1498 1498 tags.tag(
1499 1499 repo,
1500 1500 tagname,
1501 1501 ctx.node(),
1502 1502 message=None,
1503 1503 user=None,
1504 1504 date=None,
1505 1505 local=True,
1506 1506 )
1507 1507 else:
1508 1508 # Nothing changed. But still set "newrevphid" so the next revision
1509 1509 # could depend on this one and "newrevid" for the summary line.
1510 1510 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1511 1511 newrevid = revid
1512 1512 action = b'skipped'
1513 1513
1514 1514 drevids.append(newrevid)
1515 1515 lastrevphid = newrevphid
1516 1516
1517 1517 if fold:
1518 1518 for c in ctxs:
1519 1519 if oldmap.get(c.node(), (None, None, None))[2]:
1520 1520 action = b'updated'
1521 1521 else:
1522 1522 action = b'created'
1523 1523 _print_phabsend_action(ui, c, newrevid, action)
1524 1524 break
1525 1525
1526 1526 _print_phabsend_action(ui, ctx, newrevid, action)
1527 1527
1528 1528 # Update commit messages and remove tags
1529 1529 if opts.get(b'amend'):
1530 1530 unfi = repo.unfiltered()
1531 1531 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1532 1532 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1533 1533 # Eagerly evaluate commits to restabilize before creating new
1534 1534 # commits. The selected revisions are excluded because they are
1535 1535 # automatically restacked as part of the submission process.
1536 1536 restack = [
1537 1537 c
1538 1538 for c in repo.set(
1539 1539 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1540 1540 revs,
1541 1541 revs,
1542 1542 )
1543 1543 ]
1544 1544 wnode = unfi[b'.'].node()
1545 1545 mapping = {} # {oldnode: [newnode]}
1546 1546 newnodes = []
1547 1547
1548 1548 drevid = drevids[0]
1549 1549
1550 1550 for i, rev in enumerate(revs):
1551 1551 old = unfi[rev]
1552 1552 if not fold:
1553 1553 drevid = drevids[i]
1554 1554 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1555 1555
1556 1556 newdesc = get_amended_desc(drev, old, fold)
1557 1557 # Make sure commit message contain "Differential Revision"
1558 1558 if (
1559 1559 old.description() != newdesc
1560 1560 or old.p1().node() in mapping
1561 1561 or old.p2().node() in mapping
1562 1562 ):
1563 1563 if old.phase() == phases.public:
1564 1564 ui.warn(
1565 1565 _(b"warning: not updating public commit %s\n")
1566 1566 % scmutil.formatchangeid(old)
1567 1567 )
1568 1568 continue
1569 1569 parents = [
1570 1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1571 1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1572 1572 ]
1573 1573 newdesc = rewriteutil.update_hash_refs(
1574 1574 repo,
1575 1575 newdesc,
1576 1576 mapping,
1577 1577 )
1578 1578 new = context.metadataonlyctx(
1579 1579 repo,
1580 1580 old,
1581 1581 parents=parents,
1582 1582 text=newdesc,
1583 1583 user=old.user(),
1584 1584 date=old.date(),
1585 1585 extra=old.extra(),
1586 1586 )
1587 1587
1588 1588 newnode = new.commit()
1589 1589
1590 1590 mapping[old.node()] = [newnode]
1591 1591
1592 1592 if fold:
1593 1593 # Defer updating the (single) Diff until all nodes are
1594 1594 # collected. No tags were created, so none need to be
1595 1595 # removed.
1596 1596 newnodes.append(newnode)
1597 1597 continue
1598 1598
1599 1599 _amend_diff_properties(
1600 1600 unfi, drevid, [newnode], diffmap[old.node()]
1601 1601 )
1602 1602
1603 1603 # Remove local tags since it's no longer necessary
1604 1604 tagname = b'D%d' % drevid
1605 1605 if tagname in repo.tags():
1606 1606 tags.tag(
1607 1607 repo,
1608 1608 tagname,
1609 1609 nullid,
1610 1610 message=None,
1611 1611 user=None,
1612 1612 date=None,
1613 1613 local=True,
1614 1614 )
1615 1615 elif fold:
1616 1616 # When folding multiple commits into one review with
1617 1617 # --fold, track even the commits that weren't amended, so
1618 1618 # that their association isn't lost if the properties are
1619 1619 # rewritten below.
1620 1620 newnodes.append(old.node())
1621 1621
1622 1622 # If the submitted commits are public, no amend takes place so
1623 1623 # there are no newnodes and therefore no diff update to do.
1624 1624 if fold and newnodes:
1625 1625 diff = diffmap[old.node()]
1626 1626
1627 1627 # The diff object in diffmap doesn't have the local commits
1628 1628 # because that could be returned from differential.creatediff,
1629 1629 # not differential.querydiffs. So use the queried diff (if
1630 1630 # present), or force the amend (a new revision is being posted.)
1631 1631 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1632 1632 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1633 1633 _amend_diff_properties(unfi, drevid, newnodes, diff)
1634 1634 else:
1635 1635 _debug(
1636 1636 ui,
1637 1637 b"local commit list for D%d is already up-to-date\n"
1638 1638 % drevid,
1639 1639 )
1640 1640 elif fold:
1641 1641 _debug(ui, b"no newnodes to update\n")
1642 1642
1643 1643 # Restack any children of first-time submissions that were orphaned
1644 1644 # in the process. The ctx won't report that it is an orphan until
1645 1645 # the cleanup takes place below.
1646 1646 for old in restack:
1647 1647 parents = [
1648 1648 mapping.get(old.p1().node(), (old.p1(),))[0],
1649 1649 mapping.get(old.p2().node(), (old.p2(),))[0],
1650 1650 ]
1651 1651 new = context.metadataonlyctx(
1652 1652 repo,
1653 1653 old,
1654 1654 parents=parents,
1655 1655 text=rewriteutil.update_hash_refs(
1656 1656 repo, old.description(), mapping
1657 1657 ),
1658 1658 user=old.user(),
1659 1659 date=old.date(),
1660 1660 extra=old.extra(),
1661 1661 )
1662 1662
1663 1663 newnode = new.commit()
1664 1664
1665 1665 # Don't obsolete unselected descendants of nodes that have not
1666 1666 # been changed in this transaction- that results in an error.
1667 1667 if newnode != old.node():
1668 1668 mapping[old.node()] = [newnode]
1669 1669 _debug(
1670 1670 ui,
1671 1671 b"restabilizing %s as %s\n"
1672 1672 % (short(old.node()), short(newnode)),
1673 1673 )
1674 1674 else:
1675 1675 _debug(
1676 1676 ui,
1677 1677 b"not restabilizing unchanged %s\n" % short(old.node()),
1678 1678 )
1679 1679
1680 1680 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1681 1681 if wnode in mapping:
1682 1682 unfi.setparents(mapping[wnode][0])
1683 1683
1684 1684
1685 1685 # Map from "hg:meta" keys to header understood by "hg import". The order is
1686 1686 # consistent with "hg export" output.
1687 1687 _metanamemap = util.sortdict(
1688 1688 [
1689 1689 (b'user', b'User'),
1690 1690 (b'date', b'Date'),
1691 1691 (b'branch', b'Branch'),
1692 1692 (b'node', b'Node ID'),
1693 1693 (b'parent', b'Parent '),
1694 1694 ]
1695 1695 )
1696 1696
1697 1697
1698 1698 def _confirmbeforesend(repo, revs, oldmap):
1699 1699 url, token = readurltoken(repo.ui)
1700 1700 ui = repo.ui
1701 1701 for rev in revs:
1702 1702 ctx = repo[rev]
1703 1703 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1704 1704 if drevid:
1705 1705 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1706 1706 else:
1707 1707 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1708 1708
1709 1709 ui.write(
1710 1710 _(b'%s - %s\n')
1711 1711 % (
1712 1712 drevdesc,
1713 1713 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1714 1714 )
1715 1715 )
1716 1716
1717 1717 if ui.promptchoice(
1718 1718 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1719 1719 ):
1720 1720 return False
1721 1721
1722 1722 return True
1723 1723
1724 1724
1725 1725 _knownstatusnames = {
1726 1726 b'accepted',
1727 1727 b'needsreview',
1728 1728 b'needsrevision',
1729 1729 b'closed',
1730 1730 b'abandoned',
1731 1731 b'changesplanned',
1732 1732 }
1733 1733
1734 1734
1735 1735 def _getstatusname(drev):
1736 1736 """get normalized status name from a Differential Revision"""
1737 1737 return drev[b'statusName'].replace(b' ', b'').lower()
1738 1738
1739 1739
1740 1740 # Small language to specify differential revisions. Support symbols: (), :X,
1741 1741 # +, and -.
1742 1742
1743 1743 _elements = {
1744 1744 # token-type: binding-strength, primary, prefix, infix, suffix
1745 1745 b'(': (12, None, (b'group', 1, b')'), None, None),
1746 1746 b':': (8, None, (b'ancestors', 8), None, None),
1747 1747 b'&': (5, None, None, (b'and_', 5), None),
1748 1748 b'+': (4, None, None, (b'add', 4), None),
1749 1749 b'-': (4, None, None, (b'sub', 4), None),
1750 1750 b')': (0, None, None, None, None),
1751 1751 b'symbol': (0, b'symbol', None, None, None),
1752 1752 b'end': (0, None, None, None, None),
1753 1753 }
1754 1754
1755 1755
1756 1756 def _tokenize(text):
1757 1757 view = memoryview(text) # zero-copy slice
1758 1758 special = b'():+-& '
1759 1759 pos = 0
1760 1760 length = len(text)
1761 1761 while pos < length:
1762 1762 symbol = b''.join(
1763 1763 itertools.takewhile(
1764 1764 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1765 1765 )
1766 1766 )
1767 1767 if symbol:
1768 1768 yield (b'symbol', symbol, pos)
1769 1769 pos += len(symbol)
1770 1770 else: # special char, ignore space
1771 1771 if text[pos : pos + 1] != b' ':
1772 1772 yield (text[pos : pos + 1], None, pos)
1773 1773 pos += 1
1774 1774 yield (b'end', None, pos)
1775 1775
1776 1776
1777 1777 def _parse(text):
1778 1778 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1779 1779 if pos != len(text):
1780 1780 raise error.ParseError(b'invalid token', pos)
1781 1781 return tree
1782 1782
1783 1783
1784 1784 def _parsedrev(symbol):
1785 1785 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1786 1786 if symbol.startswith(b'D') and symbol[1:].isdigit():
1787 1787 return int(symbol[1:])
1788 1788 if symbol.isdigit():
1789 1789 return int(symbol)
1790 1790
1791 1791
1792 1792 def _prefetchdrevs(tree):
1793 1793 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1794 1794 drevs = set()
1795 1795 ancestordrevs = set()
1796 1796 op = tree[0]
1797 1797 if op == b'symbol':
1798 1798 r = _parsedrev(tree[1])
1799 1799 if r:
1800 1800 drevs.add(r)
1801 1801 elif op == b'ancestors':
1802 1802 r, a = _prefetchdrevs(tree[1])
1803 1803 drevs.update(r)
1804 1804 ancestordrevs.update(r)
1805 1805 ancestordrevs.update(a)
1806 1806 else:
1807 1807 for t in tree[1:]:
1808 1808 r, a = _prefetchdrevs(t)
1809 1809 drevs.update(r)
1810 1810 ancestordrevs.update(a)
1811 1811 return drevs, ancestordrevs
1812 1812
1813 1813
1814 1814 def querydrev(ui, spec):
1815 1815 """return a list of "Differential Revision" dicts
1816 1816
1817 1817 spec is a string using a simple query language, see docstring in phabread
1818 1818 for details.
1819 1819
1820 1820 A "Differential Revision dict" looks like:
1821 1821
1822 1822 {
1823 1823 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1824 1824 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1825 1825 "auxiliary": {
1826 1826 "phabricator:depends-on": [
1827 1827 "PHID-DREV-gbapp366kutjebt7agcd"
1828 1828 ]
1829 1829 "phabricator:projects": [],
1830 1830 },
1831 1831 "branch": "default",
1832 1832 "ccs": [],
1833 1833 "commits": [],
1834 1834 "dateCreated": "1499181406",
1835 1835 "dateModified": "1499182103",
1836 1836 "diffs": [
1837 1837 "3",
1838 1838 "4",
1839 1839 ],
1840 1840 "hashes": [],
1841 1841 "id": "2",
1842 1842 "lineCount": "2",
1843 1843 "phid": "PHID-DREV-672qvysjcczopag46qty",
1844 1844 "properties": {},
1845 1845 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1846 1846 "reviewers": [],
1847 1847 "sourcePath": null
1848 1848 "status": "0",
1849 1849 "statusName": "Needs Review",
1850 1850 "summary": "",
1851 1851 "testPlan": "",
1852 1852 "title": "example",
1853 1853 "uri": "https://phab.example.com/D2",
1854 1854 }
1855 1855 """
1856 1856 # TODO: replace differential.query and differential.querydiffs with
1857 1857 # differential.diff.search because the former (and their output) are
1858 1858 # frozen, and planned to be deprecated and removed.
1859 1859
1860 1860 def fetch(params):
1861 1861 """params -> single drev or None"""
1862 1862 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1863 1863 if key in prefetched:
1864 1864 return prefetched[key]
1865 1865 drevs = callconduit(ui, b'differential.query', params)
1866 1866 # Fill prefetched with the result
1867 1867 for drev in drevs:
1868 1868 prefetched[drev[b'phid']] = drev
1869 1869 prefetched[int(drev[b'id'])] = drev
1870 1870 if key not in prefetched:
1871 1871 raise error.Abort(
1872 1872 _(b'cannot get Differential Revision %r') % params
1873 1873 )
1874 1874 return prefetched[key]
1875 1875
1876 1876 def getstack(topdrevids):
1877 1877 """given a top, get a stack from the bottom, [id] -> [id]"""
1878 1878 visited = set()
1879 1879 result = []
1880 1880 queue = [{b'ids': [i]} for i in topdrevids]
1881 1881 while queue:
1882 1882 params = queue.pop()
1883 1883 drev = fetch(params)
1884 1884 if drev[b'id'] in visited:
1885 1885 continue
1886 1886 visited.add(drev[b'id'])
1887 1887 result.append(int(drev[b'id']))
1888 1888 auxiliary = drev.get(b'auxiliary', {})
1889 1889 depends = auxiliary.get(b'phabricator:depends-on', [])
1890 1890 for phid in depends:
1891 1891 queue.append({b'phids': [phid]})
1892 1892 result.reverse()
1893 1893 return smartset.baseset(result)
1894 1894
1895 1895 # Initialize prefetch cache
1896 1896 prefetched = {} # {id or phid: drev}
1897 1897
1898 1898 tree = _parse(spec)
1899 1899 drevs, ancestordrevs = _prefetchdrevs(tree)
1900 1900
1901 1901 # developer config: phabricator.batchsize
1902 1902 batchsize = ui.configint(b'phabricator', b'batchsize')
1903 1903
1904 1904 # Prefetch Differential Revisions in batch
1905 1905 tofetch = set(drevs)
1906 1906 for r in ancestordrevs:
1907 1907 tofetch.update(range(max(1, r - batchsize), r + 1))
1908 1908 if drevs:
1909 1909 fetch({b'ids': list(tofetch)})
1910 1910 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1911 1911
1912 1912 # Walk through the tree, return smartsets
1913 1913 def walk(tree):
1914 1914 op = tree[0]
1915 1915 if op == b'symbol':
1916 1916 drev = _parsedrev(tree[1])
1917 1917 if drev:
1918 1918 return smartset.baseset([drev])
1919 1919 elif tree[1] in _knownstatusnames:
1920 1920 drevs = [
1921 1921 r
1922 1922 for r in validids
1923 1923 if _getstatusname(prefetched[r]) == tree[1]
1924 1924 ]
1925 1925 return smartset.baseset(drevs)
1926 1926 else:
1927 1927 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1928 1928 elif op in {b'and_', b'add', b'sub'}:
1929 1929 assert len(tree) == 3
1930 1930 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1931 1931 elif op == b'group':
1932 1932 return walk(tree[1])
1933 1933 elif op == b'ancestors':
1934 1934 return getstack(walk(tree[1]))
1935 1935 else:
1936 1936 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1937 1937
1938 1938 return [prefetched[r] for r in walk(tree)]
1939 1939
1940 1940
1941 1941 def getdescfromdrev(drev):
1942 1942 """get description (commit message) from "Differential Revision"
1943 1943
1944 1944 This is similar to differential.getcommitmessage API. But we only care
1945 1945 about limited fields: title, summary, test plan, and URL.
1946 1946 """
1947 1947 title = drev[b'title']
1948 1948 summary = drev[b'summary'].rstrip()
1949 1949 testplan = drev[b'testPlan'].rstrip()
1950 1950 if testplan:
1951 1951 testplan = b'Test Plan:\n%s' % testplan
1952 1952 uri = b'Differential Revision: %s' % drev[b'uri']
1953 1953 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1954 1954
1955 1955
1956 1956 def get_amended_desc(drev, ctx, folded):
1957 1957 """similar to ``getdescfromdrev``, but supports a folded series of commits
1958 1958
1959 1959 This is used when determining if an individual commit needs to have its
1960 1960 message amended after posting it for review. The determination is made for
1961 1961 each individual commit, even when they were folded into one review.
1962 1962 """
1963 1963 if not folded:
1964 1964 return getdescfromdrev(drev)
1965 1965
1966 1966 uri = b'Differential Revision: %s' % drev[b'uri']
1967 1967
1968 1968 # Since the commit messages were combined when posting multiple commits
1969 1969 # with --fold, the fields can't be read from Phabricator here, or *all*
1970 1970 # affected local revisions will end up with the same commit message after
1971 1971 # the URI is amended in. Append in the DREV line, or update it if it
1972 1972 # exists. At worst, this means commit message or test plan updates on
1973 1973 # Phabricator aren't propagated back to the repository, but that seems
1974 1974 # reasonable for the case where local commits are effectively combined
1975 1975 # in Phabricator.
1976 1976 m = _differentialrevisiondescre.search(ctx.description())
1977 1977 if not m:
1978 1978 return b'\n\n'.join([ctx.description(), uri])
1979 1979
1980 1980 return _differentialrevisiondescre.sub(uri, ctx.description())
1981 1981
1982 1982
1983 1983 def getlocalcommits(diff):
1984 1984 """get the set of local commits from a diff object
1985 1985
1986 1986 See ``getdiffmeta()`` for an example diff object.
1987 1987 """
1988 1988 props = diff.get(b'properties') or {}
1989 1989 commits = props.get(b'local:commits') or {}
1990 1990 if len(commits) > 1:
1991 1991 return {bin(c) for c in commits.keys()}
1992 1992
1993 1993 # Storing the diff metadata predates storing `local:commits`, so continue
1994 1994 # to use that in the --no-fold case.
1995 1995 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1996 1996
1997 1997
1998 1998 def getdiffmeta(diff):
1999 1999 """get commit metadata (date, node, user, p1) from a diff object
2000 2000
2001 2001 The metadata could be "hg:meta", sent by phabsend, like:
2002 2002
2003 2003 "properties": {
2004 2004 "hg:meta": {
2005 2005 "branch": "default",
2006 2006 "date": "1499571514 25200",
2007 2007 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2008 2008 "user": "Foo Bar <foo@example.com>",
2009 2009 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2010 2010 }
2011 2011 }
2012 2012
2013 2013 Or converted from "local:commits", sent by "arc", like:
2014 2014
2015 2015 "properties": {
2016 2016 "local:commits": {
2017 2017 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2018 2018 "author": "Foo Bar",
2019 2019 "authorEmail": "foo@example.com"
2020 2020 "branch": "default",
2021 2021 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2022 2022 "local": "1000",
2023 2023 "message": "...",
2024 2024 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2025 2025 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2026 2026 "summary": "...",
2027 2027 "tag": "",
2028 2028 "time": 1499546314,
2029 2029 }
2030 2030 }
2031 2031 }
2032 2032
2033 2033 Note: metadata extracted from "local:commits" will lose time zone
2034 2034 information.
2035 2035 """
2036 2036 props = diff.get(b'properties') or {}
2037 2037 meta = props.get(b'hg:meta')
2038 2038 if not meta:
2039 2039 if props.get(b'local:commits'):
2040 2040 commit = sorted(props[b'local:commits'].values())[0]
2041 2041 meta = {}
2042 2042 if b'author' in commit and b'authorEmail' in commit:
2043 2043 meta[b'user'] = b'%s <%s>' % (
2044 2044 commit[b'author'],
2045 2045 commit[b'authorEmail'],
2046 2046 )
2047 2047 if b'time' in commit:
2048 2048 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2049 2049 if b'branch' in commit:
2050 2050 meta[b'branch'] = commit[b'branch']
2051 2051 node = commit.get(b'commit', commit.get(b'rev'))
2052 2052 if node:
2053 2053 meta[b'node'] = node
2054 2054 if len(commit.get(b'parents', ())) >= 1:
2055 2055 meta[b'parent'] = commit[b'parents'][0]
2056 2056 else:
2057 2057 meta = {}
2058 2058 if b'date' not in meta and b'dateCreated' in diff:
2059 2059 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2060 2060 if b'branch' not in meta and diff.get(b'branch'):
2061 2061 meta[b'branch'] = diff[b'branch']
2062 2062 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2063 2063 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2064 2064 return meta
2065 2065
2066 2066
2067 2067 def _getdrevs(ui, stack, specs):
2068 2068 """convert user supplied DREVSPECs into "Differential Revision" dicts
2069 2069
2070 2070 See ``hg help phabread`` for how to specify each DREVSPEC.
2071 2071 """
2072 2072 if len(specs) > 0:
2073 2073
2074 2074 def _formatspec(s):
2075 2075 if stack:
2076 2076 s = b':(%s)' % s
2077 2077 return b'(%s)' % s
2078 2078
2079 2079 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2080 2080
2081 2081 drevs = querydrev(ui, spec)
2082 2082 if drevs:
2083 2083 return drevs
2084 2084
2085 2085 raise error.Abort(_(b"empty DREVSPEC set"))
2086 2086
2087 2087
2088 2088 def readpatch(ui, drevs, write):
2089 2089 """generate plain-text patch readable by 'hg import'
2090 2090
2091 2091 write takes a list of (DREV, bytes), where DREV is the differential number
2092 2092 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2093 2093 to be imported. drevs is what "querydrev" returns, results of
2094 2094 "differential.query".
2095 2095 """
2096 2096 # Prefetch hg:meta property for all diffs
2097 2097 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2098 2098 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2099 2099
2100 2100 patches = []
2101 2101
2102 2102 # Generate patch for each drev
2103 2103 for drev in drevs:
2104 2104 ui.note(_(b'reading D%s\n') % drev[b'id'])
2105 2105
2106 2106 diffid = max(int(v) for v in drev[b'diffs'])
2107 2107 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2108 2108 desc = getdescfromdrev(drev)
2109 2109 header = b'# HG changeset patch\n'
2110 2110
2111 2111 # Try to preserve metadata from hg:meta property. Write hg patch
2112 2112 # headers that can be read by the "import" command. See patchheadermap
2113 2113 # and extract in mercurial/patch.py for supported headers.
2114 2114 meta = getdiffmeta(diffs[b'%d' % diffid])
2115 2115 for k in _metanamemap.keys():
2116 2116 if k in meta:
2117 2117 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2118 2118
2119 2119 content = b'%s%s\n%s' % (header, desc, body)
2120 2120 patches.append((drev[b'id'], content))
2121 2121
2122 2122 # Write patches to the supplied callback
2123 2123 write(patches)
2124 2124
2125 2125
2126 2126 @vcrcommand(
2127 2127 b'phabread',
2128 2128 [(b'', b'stack', False, _(b'read dependencies'))],
2129 2129 _(b'DREVSPEC... [OPTIONS]'),
2130 2130 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2131 2131 optionalrepo=True,
2132 2132 )
2133 2133 def phabread(ui, repo, *specs, **opts):
2134 2134 """print patches from Phabricator suitable for importing
2135 2135
2136 2136 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2137 2137 the number ``123``. It could also have common operators like ``+``, ``-``,
2138 2138 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2139 2139 select a stack. If multiple DREVSPEC values are given, the result is the
2140 2140 union of each individually evaluated value. No attempt is currently made
2141 2141 to reorder the values to run from parent to child.
2142 2142
2143 2143 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2144 2144 could be used to filter patches by status. For performance reason, they
2145 2145 only represent a subset of non-status selections and cannot be used alone.
2146 2146
2147 2147 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2148 2148 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2149 2149 stack up to D9.
2150 2150
2151 2151 If --stack is given, follow dependencies information and read all patches.
2152 2152 It is equivalent to the ``:`` operator.
2153 2153 """
2154 2154 opts = pycompat.byteskwargs(opts)
2155 2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2156 2156
2157 2157 def _write(patches):
2158 2158 for drev, content in patches:
2159 2159 ui.write(content)
2160 2160
2161 2161 readpatch(ui, drevs, _write)
2162 2162
2163 2163
2164 2164 @vcrcommand(
2165 2165 b'phabimport',
2166 2166 [(b'', b'stack', False, _(b'import dependencies as well'))],
2167 2167 _(b'DREVSPEC... [OPTIONS]'),
2168 2168 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2169 2169 )
2170 2170 def phabimport(ui, repo, *specs, **opts):
2171 2171 """import patches from Phabricator for the specified Differential Revisions
2172 2172
2173 2173 The patches are read and applied starting at the parent of the working
2174 2174 directory.
2175 2175
2176 2176 See ``hg help phabread`` for how to specify DREVSPEC.
2177 2177 """
2178 2178 opts = pycompat.byteskwargs(opts)
2179 2179
2180 2180 # --bypass avoids losing exec and symlink bits when importing on Windows,
2181 2181 # and allows importing with a dirty wdir. It also aborts instead of leaving
2182 2182 # rejects.
2183 2183 opts[b'bypass'] = True
2184 2184
2185 2185 # Mandatory default values, synced with commands.import
2186 2186 opts[b'strip'] = 1
2187 2187 opts[b'prefix'] = b''
2188 2188 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2189 2189 opts[b'obsolete'] = False
2190 2190
2191 2191 if ui.configbool(b'phabimport', b'secret'):
2192 2192 opts[b'secret'] = True
2193 2193 if ui.configbool(b'phabimport', b'obsolete'):
2194 2194 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2195 2195
2196 2196 def _write(patches):
2197 2197 parents = repo[None].parents()
2198 2198
2199 2199 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2200 2200 for drev, contents in patches:
2201 2201 ui.status(_(b'applying patch from D%s\n') % drev)
2202 2202
2203 2203 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2204 2204 msg, node, rej = cmdutil.tryimportone(
2205 2205 ui,
2206 2206 repo,
2207 2207 patchdata,
2208 2208 parents,
2209 2209 opts,
2210 2210 [],
2211 2211 None, # Never update wdir to another revision
2212 2212 )
2213 2213
2214 2214 if not node:
2215 2215 raise error.Abort(_(b'D%s: no diffs found') % drev)
2216 2216
2217 2217 ui.note(msg + b'\n')
2218 2218 parents = [repo[node]]
2219 2219
2220 2220 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2221 2221
2222 2222 readpatch(repo.ui, drevs, _write)
2223 2223
2224 2224
2225 2225 @vcrcommand(
2226 2226 b'phabupdate',
2227 2227 [
2228 2228 (b'', b'accept', False, _(b'accept revisions')),
2229 2229 (b'', b'reject', False, _(b'reject revisions')),
2230 2230 (b'', b'request-review', False, _(b'request review on revisions')),
2231 2231 (b'', b'abandon', False, _(b'abandon revisions')),
2232 2232 (b'', b'reclaim', False, _(b'reclaim revisions')),
2233 2233 (b'', b'close', False, _(b'close revisions')),
2234 2234 (b'', b'reopen', False, _(b'reopen revisions')),
2235 2235 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2236 2236 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2237 2237 (b'', b'commandeer', False, _(b'commandeer revisions')),
2238 2238 (b'm', b'comment', b'', _(b'comment on the last revision')),
2239 2239 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2240 2240 ],
2241 2241 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2242 2242 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2243 2243 optionalrepo=True,
2244 2244 )
2245 2245 def phabupdate(ui, repo, *specs, **opts):
2246 2246 """update Differential Revision in batch
2247 2247
2248 2248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2249 2249 """
2250 2250 opts = pycompat.byteskwargs(opts)
2251 2251 transactions = [
2252 2252 b'abandon',
2253 2253 b'accept',
2254 2254 b'close',
2255 2255 b'commandeer',
2256 2256 b'plan-changes',
2257 2257 b'reclaim',
2258 2258 b'reject',
2259 2259 b'reopen',
2260 2260 b'request-review',
2261 2261 b'resign',
2262 2262 ]
2263 2263 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2264 2264 if len(flags) > 1:
2265 2265 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2266 2266
2267 2267 actions = []
2268 2268 for f in flags:
2269 2269 actions.append({b'type': f, b'value': True})
2270 2270
2271 2271 revs = opts.get(b'rev')
2272 2272 if revs:
2273 2273 if not repo:
2274 2274 raise error.InputError(_(b'--rev requires a repository'))
2275 2275
2276 2276 if specs:
2277 2277 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2278 2278
2279 2279 drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
2280 2280 specs = []
2281 2281 unknown = []
2282 2282 for r, d in pycompat.iteritems(drevmap):
2283 2283 if d is None:
2284 2284 unknown.append(repo[r])
2285 2285 else:
2286 2286 specs.append(b'D%d' % d)
2287 2287 if unknown:
2288 2288 raise error.InputError(
2289 2289 _(b'selected revisions without a Differential: %s')
2290 2290 % scmutil.nodesummaries(repo, unknown)
2291 2291 )
2292 2292
2293 2293 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2294 2294 for i, drev in enumerate(drevs):
2295 2295 if i + 1 == len(drevs) and opts.get(b'comment'):
2296 2296 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2297 2297 if actions:
2298 2298 params = {
2299 2299 b'objectIdentifier': drev[b'phid'],
2300 2300 b'transactions': actions,
2301 2301 }
2302 2302 callconduit(ui, b'differential.revision.edit', params)
2303 2303
2304 2304
2305 2305 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2306 2306 def template_review(context, mapping):
2307 2307 """:phabreview: Object describing the review for this changeset.
2308 2308 Has attributes `url` and `id`.
2309 2309 """
2310 2310 ctx = context.resource(mapping, b'ctx')
2311 2311 m = _differentialrevisiondescre.search(ctx.description())
2312 2312 if m:
2313 2313 return templateutil.hybriddict(
2314 2314 {
2315 2315 b'url': m.group('url'),
2316 2316 b'id': b"D%s" % m.group('id'),
2317 2317 }
2318 2318 )
2319 2319 else:
2320 2320 tags = ctx.repo().nodetags(ctx.node())
2321 2321 for t in tags:
2322 2322 if _differentialrevisiontagre.match(t):
2323 2323 url = ctx.repo().ui.config(b'phabricator', b'url')
2324 2324 if not url.endswith(b'/'):
2325 2325 url += b'/'
2326 2326 url += t
2327 2327
2328 2328 return templateutil.hybriddict(
2329 2329 {
2330 2330 b'url': url,
2331 2331 b'id': t,
2332 2332 }
2333 2333 )
2334 2334 return None
2335 2335
2336 2336
2337 2337 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2338 2338 def template_status(context, mapping):
2339 2339 """:phabstatus: String. Status of Phabricator differential."""
2340 2340 ctx = context.resource(mapping, b'ctx')
2341 2341 repo = context.resource(mapping, b'repo')
2342 2342 ui = context.resource(mapping, b'ui')
2343 2343
2344 2344 rev = ctx.rev()
2345 2345 try:
2346 2346 drevid = getdrevmap(repo, [rev])[rev]
2347 2347 except KeyError:
2348 2348 return None
2349 2349 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2350 2350 for drev in drevs:
2351 2351 if int(drev[b'id']) == drevid:
2352 2352 return templateutil.hybriddict(
2353 2353 {
2354 2354 b'url': drev[b'uri'],
2355 2355 b'status': drev[b'statusName'],
2356 2356 }
2357 2357 )
2358 2358 return None
2359 2359
2360 2360
2361 2361 @show.showview(b'phabstatus', csettopic=b'work')
2362 2362 def phabstatusshowview(ui, repo, displayer):
2363 2363 """Phabricator differiential status"""
2364 2364 revs = repo.revs('sort(_underway(), topo)')
2365 2365 drevmap = getdrevmap(repo, revs)
2366 2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2367 2367 for rev, drevid in pycompat.iteritems(drevmap):
2368 2368 if drevid is not None:
2369 2369 drevids.add(drevid)
2370 2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2371 2371 else:
2372 2372 unknownrevs.append(rev)
2373 2373
2374 2374 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2375 2375 drevsbyrev = {}
2376 2376 for drev in drevs:
2377 2377 for rev in revsbydrevid[int(drev[b'id'])]:
2378 2378 drevsbyrev[rev] = drev
2379 2379
2380 2380 def phabstatus(ctx):
2381 2381 drev = drevsbyrev[ctx.rev()]
2382 2382 status = ui.label(
2383 2383 b'%(statusName)s' % drev,
2384 2384 b'phabricator.status.%s' % _getstatusname(drev),
2385 2385 )
2386 2386 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2387 2387
2388 2388 revs -= smartset.baseset(unknownrevs)
2389 2389 revdag = graphmod.dagwalker(repo, revs)
2390 2390
2391 2391 ui.setconfig(b'experimental', b'graphshorten', True)
2392 2392 displayer._exthook = phabstatus
2393 2393 nodelen = show.longestshortest(repo, revs)
2394 2394 logcmdutil.displaygraph(
2395 2395 ui,
2396 2396 repo,
2397 2397 revdag,
2398 2398 displayer,
2399 2399 graphmod.asciiedges,
2400 2400 props={b'nodelen': nodelen},
2401 2401 )
General Comments 0
You need to be logged in to leave comments. Login now