##// END OF EJS Templates
phabricator: pass ui instead of repo to `querydrev()`...
Matt Harbison -
r44906:df805308 default
parent child Browse files
Show More
@@ -1,1799 +1,1799 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 750 def addoldbinary(pchange, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753 753 """
754 754 oldfctx = fctx.p1()
755 755 if fctx.cmp(oldfctx):
756 756 # Files differ, add the old one
757 757 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 758 mimeguess, _enc = mimetypes.guess_type(
759 759 encoding.unifromlocal(oldfctx.path())
760 760 )
761 761 if mimeguess:
762 762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 763 mimeguess
764 764 )
765 765 fphid = uploadfile(oldfctx)
766 766 pchange.metadata[b'old:binary-phid'] = fphid
767 767 else:
768 768 # If it's left as IMAGE/BINARY web UI might try to display it
769 769 pchange.fileType = DiffFileType.TEXT
770 770 pchange.copynewmetadatatoold()
771 771
772 772
773 773 def makebinary(pchange, fctx):
774 774 """populate the phabchange for a binary file"""
775 775 pchange.fileType = DiffFileType.BINARY
776 776 fphid = uploadfile(fctx)
777 777 pchange.metadata[b'new:binary-phid'] = fphid
778 778 pchange.metadata[b'new:file:size'] = fctx.size()
779 779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 780 if mimeguess:
781 781 mimeguess = pycompat.bytestr(mimeguess)
782 782 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 783 if mimeguess.startswith(b'image/'):
784 784 pchange.fileType = DiffFileType.IMAGE
785 785
786 786
787 787 # Copied from mercurial/patch.py
788 788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789 789
790 790
791 791 def notutf8(fctx):
792 792 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 793 as binary
794 794 """
795 795 try:
796 796 fctx.data().decode('utf-8')
797 797 if fctx.parents():
798 798 fctx.p1().data().decode('utf-8')
799 799 return False
800 800 except UnicodeDecodeError:
801 801 fctx.repo().ui.write(
802 802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 803 % fctx.path()
804 804 )
805 805 return True
806 806
807 807
808 808 def addremoved(pdiff, ctx, removed):
809 809 """add removed files to the phabdiff. Shouldn't include moves"""
810 810 for fname in removed:
811 811 pchange = phabchange(
812 812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 813 )
814 814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 815 fctx = ctx.p1()[fname]
816 816 if not (fctx.isbinary() or notutf8(fctx)):
817 817 maketext(pchange, ctx, fname)
818 818
819 819 pdiff.addchange(pchange)
820 820
821 821
822 822 def addmodified(pdiff, ctx, modified):
823 823 """add modified files to the phabdiff"""
824 824 for fname in modified:
825 825 fctx = ctx[fname]
826 826 pchange = phabchange(currentPath=fname, oldPath=fname)
827 827 filemode = gitmode[ctx[fname].flags()]
828 828 originalmode = gitmode[ctx.p1()[fname].flags()]
829 829 if filemode != originalmode:
830 830 pchange.addoldmode(originalmode)
831 831 pchange.addnewmode(filemode)
832 832
833 833 if fctx.isbinary() or notutf8(fctx):
834 834 makebinary(pchange, fctx)
835 835 addoldbinary(pchange, fctx)
836 836 else:
837 837 maketext(pchange, ctx, fname)
838 838
839 839 pdiff.addchange(pchange)
840 840
841 841
842 842 def addadded(pdiff, ctx, added, removed):
843 843 """add file adds to the phabdiff, both new files and copies/moves"""
844 844 # Keep track of files that've been recorded as moved/copied, so if there are
845 845 # additional copies we can mark them (moves get removed from removed)
846 846 copiedchanges = {}
847 847 movedchanges = {}
848 848 for fname in added:
849 849 fctx = ctx[fname]
850 850 pchange = phabchange(currentPath=fname)
851 851
852 852 filemode = gitmode[ctx[fname].flags()]
853 853 renamed = fctx.renamed()
854 854
855 855 if renamed:
856 856 originalfname = renamed[0]
857 857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 858 pchange.oldPath = originalfname
859 859
860 860 if originalfname in removed:
861 861 origpchange = phabchange(
862 862 currentPath=originalfname,
863 863 oldPath=originalfname,
864 864 type=DiffChangeType.MOVE_AWAY,
865 865 awayPaths=[fname],
866 866 )
867 867 movedchanges[originalfname] = origpchange
868 868 removed.remove(originalfname)
869 869 pchange.type = DiffChangeType.MOVE_HERE
870 870 elif originalfname in movedchanges:
871 871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 872 movedchanges[originalfname].awayPaths.append(fname)
873 873 pchange.type = DiffChangeType.COPY_HERE
874 874 else: # pure copy
875 875 if originalfname not in copiedchanges:
876 876 origpchange = phabchange(
877 877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 878 )
879 879 copiedchanges[originalfname] = origpchange
880 880 else:
881 881 origpchange = copiedchanges[originalfname]
882 882 origpchange.awayPaths.append(fname)
883 883 pchange.type = DiffChangeType.COPY_HERE
884 884
885 885 if filemode != originalmode:
886 886 pchange.addoldmode(originalmode)
887 887 pchange.addnewmode(filemode)
888 888 else: # Brand-new file
889 889 pchange.addnewmode(gitmode[fctx.flags()])
890 890 pchange.type = DiffChangeType.ADD
891 891
892 892 if fctx.isbinary() or notutf8(fctx):
893 893 makebinary(pchange, fctx)
894 894 if renamed:
895 895 addoldbinary(pchange, fctx)
896 896 else:
897 897 maketext(pchange, ctx, fname)
898 898
899 899 pdiff.addchange(pchange)
900 900
901 901 for _path, copiedchange in copiedchanges.items():
902 902 pdiff.addchange(copiedchange)
903 903 for _path, movedchange in movedchanges.items():
904 904 pdiff.addchange(movedchange)
905 905
906 906
907 907 def creatediff(ctx):
908 908 """create a Differential Diff"""
909 909 repo = ctx.repo()
910 910 repophid = getrepophid(repo)
911 911 # Create a "Differential Diff" via "differential.creatediff" API
912 912 pdiff = phabdiff(
913 913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 914 branch=b'%s' % ctx.branch(),
915 915 )
916 916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 917 # addadded will remove moved files from removed, so addremoved won't get
918 918 # them
919 919 addadded(pdiff, ctx, added, removed)
920 920 addmodified(pdiff, ctx, modified)
921 921 addremoved(pdiff, ctx, removed)
922 922 if repophid:
923 923 pdiff.repositoryPHID = repophid
924 924 diff = callconduit(
925 925 repo.ui,
926 926 b'differential.creatediff',
927 927 pycompat.byteskwargs(attr.asdict(pdiff)),
928 928 )
929 929 if not diff:
930 930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 931 return diff
932 932
933 933
934 934 def writediffproperties(ctx, diff):
935 935 """write metadata to diff so patches could be applied losslessly"""
936 936 # creatediff returns with a diffid but query returns with an id
937 937 diffid = diff.get(b'diffid', diff.get(b'id'))
938 938 params = {
939 939 b'diff_id': diffid,
940 940 b'name': b'hg:meta',
941 941 b'data': templatefilters.json(
942 942 {
943 943 b'user': ctx.user(),
944 944 b'date': b'%d %d' % ctx.date(),
945 945 b'branch': ctx.branch(),
946 946 b'node': ctx.hex(),
947 947 b'parent': ctx.p1().hex(),
948 948 }
949 949 ),
950 950 }
951 951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952 952
953 953 params = {
954 954 b'diff_id': diffid,
955 955 b'name': b'local:commits',
956 956 b'data': templatefilters.json(
957 957 {
958 958 ctx.hex(): {
959 959 b'author': stringutil.person(ctx.user()),
960 960 b'authorEmail': stringutil.email(ctx.user()),
961 961 b'time': int(ctx.date()[0]),
962 962 b'commit': ctx.hex(),
963 963 b'parents': [ctx.p1().hex()],
964 964 b'branch': ctx.branch(),
965 965 },
966 966 }
967 967 ),
968 968 }
969 969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970 970
971 971
972 972 def createdifferentialrevision(
973 973 ctx,
974 974 revid=None,
975 975 parentrevphid=None,
976 976 oldnode=None,
977 977 olddiff=None,
978 978 actions=None,
979 979 comment=None,
980 980 ):
981 981 """create or update a Differential Revision
982 982
983 983 If revid is None, create a new Differential Revision, otherwise update
984 984 revid. If parentrevphid is not None, set it as a dependency.
985 985
986 986 If oldnode is not None, check if the patch content (without commit message
987 987 and metadata) has changed before creating another diff.
988 988
989 989 If actions is not None, they will be appended to the transaction.
990 990 """
991 991 repo = ctx.repo()
992 992 if oldnode:
993 993 diffopts = mdiff.diffopts(git=True, context=32767)
994 994 oldctx = repo.unfiltered()[oldnode]
995 995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 996 else:
997 997 neednewdiff = True
998 998
999 999 transactions = []
1000 1000 if neednewdiff:
1001 1001 diff = creatediff(ctx)
1002 1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 1003 if comment:
1004 1004 transactions.append({b'type': b'comment', b'value': comment})
1005 1005 else:
1006 1006 # Even if we don't need to upload a new diff because the patch content
1007 1007 # does not change. We might still need to update its metadata so
1008 1008 # pushers could know the correct node metadata.
1009 1009 assert olddiff
1010 1010 diff = olddiff
1011 1011 writediffproperties(ctx, diff)
1012 1012
1013 1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 1014 if parentrevphid:
1015 1015 transactions.append(
1016 1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 1017 )
1018 1018
1019 1019 if actions:
1020 1020 transactions += actions
1021 1021
1022 1022 # Parse commit message and update related fields.
1023 1023 desc = ctx.description()
1024 1024 info = callconduit(
1025 1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 1026 )
1027 1027 for k, v in info[b'fields'].items():
1028 1028 if k in [b'title', b'summary', b'testPlan']:
1029 1029 transactions.append({b'type': k, b'value': v})
1030 1030
1031 1031 params = {b'transactions': transactions}
1032 1032 if revid is not None:
1033 1033 # Update an existing Differential Revision
1034 1034 params[b'objectIdentifier'] = revid
1035 1035
1036 1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 1037 if not revision:
1038 1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039 1039
1040 1040 return revision, diff
1041 1041
1042 1042
1043 1043 def userphids(repo, names):
1044 1044 """convert user names to PHIDs"""
1045 1045 names = [name.lower() for name in names]
1046 1046 query = {b'constraints': {b'usernames': names}}
1047 1047 result = callconduit(repo.ui, b'user.search', query)
1048 1048 # username not found is not an error of the API. So check if we have missed
1049 1049 # some names here.
1050 1050 data = result[b'data']
1051 1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 1052 unresolved = set(names) - resolved
1053 1053 if unresolved:
1054 1054 raise error.Abort(
1055 1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 1056 )
1057 1057 return [entry[b'phid'] for entry in data]
1058 1058
1059 1059
1060 1060 @vcrcommand(
1061 1061 b'phabsend',
1062 1062 [
1063 1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 1064 (b'', b'amend', True, _(b'update commit messages')),
1065 1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 1067 (
1068 1068 b'm',
1069 1069 b'comment',
1070 1070 b'',
1071 1071 _(b'add a comment to Revisions with new/updated Diffs'),
1072 1072 ),
1073 1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 1074 ],
1075 1075 _(b'REV [OPTIONS]'),
1076 1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 1077 )
1078 1078 def phabsend(ui, repo, *revs, **opts):
1079 1079 """upload changesets to Phabricator
1080 1080
1081 1081 If there are multiple revisions specified, they will be send as a stack
1082 1082 with a linear dependencies relationship using the order specified by the
1083 1083 revset.
1084 1084
1085 1085 For the first time uploading changesets, local tags will be created to
1086 1086 maintain the association. After the first time, phabsend will check
1087 1087 obsstore and tags information so it can figure out whether to update an
1088 1088 existing Differential Revision, or create a new one.
1089 1089
1090 1090 If --amend is set, update commit messages so they have the
1091 1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 1093 use local tags to record the ``Differential Revision`` association.
1094 1094
1095 1095 The --confirm option lets you confirm changesets before sending them. You
1096 1096 can also add following to your configuration file to make it default
1097 1097 behaviour::
1098 1098
1099 1099 [phabsend]
1100 1100 confirm = true
1101 1101
1102 1102 phabsend will check obsstore and the above association to decide whether to
1103 1103 update an existing Differential Revision, or create a new one.
1104 1104 """
1105 1105 opts = pycompat.byteskwargs(opts)
1106 1106 revs = list(revs) + opts.get(b'rev', [])
1107 1107 revs = scmutil.revrange(repo, revs)
1108 1108 revs.sort() # ascending order to preserve topological parent/child in phab
1109 1109
1110 1110 if not revs:
1111 1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 1112 if opts.get(b'amend'):
1113 1113 cmdutil.checkunfinished(repo)
1114 1114
1115 1115 # {newnode: (oldnode, olddiff, olddrev}
1116 1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117 1117
1118 1118 confirm = ui.configbool(b'phabsend', b'confirm')
1119 1119 confirm |= bool(opts.get(b'confirm'))
1120 1120 if confirm:
1121 1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 1122 if not confirmed:
1123 1123 raise error.Abort(_(b'phabsend cancelled'))
1124 1124
1125 1125 actions = []
1126 1126 reviewers = opts.get(b'reviewer', [])
1127 1127 blockers = opts.get(b'blocker', [])
1128 1128 phids = []
1129 1129 if reviewers:
1130 1130 phids.extend(userphids(repo, reviewers))
1131 1131 if blockers:
1132 1132 phids.extend(
1133 1133 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1134 1134 )
1135 1135 if phids:
1136 1136 actions.append({b'type': b'reviewers.add', b'value': phids})
1137 1137
1138 1138 drevids = [] # [int]
1139 1139 diffmap = {} # {newnode: diff}
1140 1140
1141 1141 # Send patches one by one so we know their Differential Revision PHIDs and
1142 1142 # can provide dependency relationship
1143 1143 lastrevphid = None
1144 1144 for rev in revs:
1145 1145 ui.debug(b'sending rev %d\n' % rev)
1146 1146 ctx = repo[rev]
1147 1147
1148 1148 # Get Differential Revision ID
1149 1149 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1150 1150 if oldnode != ctx.node() or opts.get(b'amend'):
1151 1151 # Create or update Differential Revision
1152 1152 revision, diff = createdifferentialrevision(
1153 1153 ctx,
1154 1154 revid,
1155 1155 lastrevphid,
1156 1156 oldnode,
1157 1157 olddiff,
1158 1158 actions,
1159 1159 opts.get(b'comment'),
1160 1160 )
1161 1161 diffmap[ctx.node()] = diff
1162 1162 newrevid = int(revision[b'object'][b'id'])
1163 1163 newrevphid = revision[b'object'][b'phid']
1164 1164 if revid:
1165 1165 action = b'updated'
1166 1166 else:
1167 1167 action = b'created'
1168 1168
1169 1169 # Create a local tag to note the association, if commit message
1170 1170 # does not have it already
1171 1171 m = _differentialrevisiondescre.search(ctx.description())
1172 1172 if not m or int(m.group('id')) != newrevid:
1173 1173 tagname = b'D%d' % newrevid
1174 1174 tags.tag(
1175 1175 repo,
1176 1176 tagname,
1177 1177 ctx.node(),
1178 1178 message=None,
1179 1179 user=None,
1180 1180 date=None,
1181 1181 local=True,
1182 1182 )
1183 1183 else:
1184 1184 # Nothing changed. But still set "newrevphid" so the next revision
1185 1185 # could depend on this one and "newrevid" for the summary line.
1186 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1186 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1187 1187 newrevid = revid
1188 1188 action = b'skipped'
1189 1189
1190 1190 actiondesc = ui.label(
1191 1191 {
1192 1192 b'created': _(b'created'),
1193 1193 b'skipped': _(b'skipped'),
1194 1194 b'updated': _(b'updated'),
1195 1195 }[action],
1196 1196 b'phabricator.action.%s' % action,
1197 1197 )
1198 1198 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1199 1199 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1200 1200 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1201 1201 ui.write(
1202 1202 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1203 1203 )
1204 1204 drevids.append(newrevid)
1205 1205 lastrevphid = newrevphid
1206 1206
1207 1207 # Update commit messages and remove tags
1208 1208 if opts.get(b'amend'):
1209 1209 unfi = repo.unfiltered()
1210 1210 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1211 1211 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1212 1212 wnode = unfi[b'.'].node()
1213 1213 mapping = {} # {oldnode: [newnode]}
1214 1214 for i, rev in enumerate(revs):
1215 1215 old = unfi[rev]
1216 1216 drevid = drevids[i]
1217 1217 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1218 1218 newdesc = getdescfromdrev(drev)
1219 1219 # Make sure commit message contain "Differential Revision"
1220 1220 if old.description() != newdesc:
1221 1221 if old.phase() == phases.public:
1222 1222 ui.warn(
1223 1223 _(b"warning: not updating public commit %s\n")
1224 1224 % scmutil.formatchangeid(old)
1225 1225 )
1226 1226 continue
1227 1227 parents = [
1228 1228 mapping.get(old.p1().node(), (old.p1(),))[0],
1229 1229 mapping.get(old.p2().node(), (old.p2(),))[0],
1230 1230 ]
1231 1231 new = context.metadataonlyctx(
1232 1232 repo,
1233 1233 old,
1234 1234 parents=parents,
1235 1235 text=newdesc,
1236 1236 user=old.user(),
1237 1237 date=old.date(),
1238 1238 extra=old.extra(),
1239 1239 )
1240 1240
1241 1241 newnode = new.commit()
1242 1242
1243 1243 mapping[old.node()] = [newnode]
1244 1244 # Update diff property
1245 1245 # If it fails just warn and keep going, otherwise the DREV
1246 1246 # associations will be lost
1247 1247 try:
1248 1248 writediffproperties(unfi[newnode], diffmap[old.node()])
1249 1249 except util.urlerr.urlerror:
1250 1250 ui.warnnoi18n(
1251 1251 b'Failed to update metadata for D%d\n' % drevid
1252 1252 )
1253 1253 # Remove local tags since it's no longer necessary
1254 1254 tagname = b'D%d' % drevid
1255 1255 if tagname in repo.tags():
1256 1256 tags.tag(
1257 1257 repo,
1258 1258 tagname,
1259 1259 nullid,
1260 1260 message=None,
1261 1261 user=None,
1262 1262 date=None,
1263 1263 local=True,
1264 1264 )
1265 1265 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1266 1266 if wnode in mapping:
1267 1267 unfi.setparents(mapping[wnode][0])
1268 1268
1269 1269
1270 1270 # Map from "hg:meta" keys to header understood by "hg import". The order is
1271 1271 # consistent with "hg export" output.
1272 1272 _metanamemap = util.sortdict(
1273 1273 [
1274 1274 (b'user', b'User'),
1275 1275 (b'date', b'Date'),
1276 1276 (b'branch', b'Branch'),
1277 1277 (b'node', b'Node ID'),
1278 1278 (b'parent', b'Parent '),
1279 1279 ]
1280 1280 )
1281 1281
1282 1282
1283 1283 def _confirmbeforesend(repo, revs, oldmap):
1284 1284 url, token = readurltoken(repo.ui)
1285 1285 ui = repo.ui
1286 1286 for rev in revs:
1287 1287 ctx = repo[rev]
1288 1288 desc = ctx.description().splitlines()[0]
1289 1289 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1290 1290 if drevid:
1291 1291 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1292 1292 else:
1293 1293 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1294 1294
1295 1295 ui.write(
1296 1296 _(b'%s - %s: %s\n')
1297 1297 % (
1298 1298 drevdesc,
1299 1299 ui.label(bytes(ctx), b'phabricator.node'),
1300 1300 ui.label(desc, b'phabricator.desc'),
1301 1301 )
1302 1302 )
1303 1303
1304 1304 if ui.promptchoice(
1305 1305 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1306 1306 ):
1307 1307 return False
1308 1308
1309 1309 return True
1310 1310
1311 1311
1312 1312 _knownstatusnames = {
1313 1313 b'accepted',
1314 1314 b'needsreview',
1315 1315 b'needsrevision',
1316 1316 b'closed',
1317 1317 b'abandoned',
1318 1318 b'changesplanned',
1319 1319 }
1320 1320
1321 1321
1322 1322 def _getstatusname(drev):
1323 1323 """get normalized status name from a Differential Revision"""
1324 1324 return drev[b'statusName'].replace(b' ', b'').lower()
1325 1325
1326 1326
1327 1327 # Small language to specify differential revisions. Support symbols: (), :X,
1328 1328 # +, and -.
1329 1329
1330 1330 _elements = {
1331 1331 # token-type: binding-strength, primary, prefix, infix, suffix
1332 1332 b'(': (12, None, (b'group', 1, b')'), None, None),
1333 1333 b':': (8, None, (b'ancestors', 8), None, None),
1334 1334 b'&': (5, None, None, (b'and_', 5), None),
1335 1335 b'+': (4, None, None, (b'add', 4), None),
1336 1336 b'-': (4, None, None, (b'sub', 4), None),
1337 1337 b')': (0, None, None, None, None),
1338 1338 b'symbol': (0, b'symbol', None, None, None),
1339 1339 b'end': (0, None, None, None, None),
1340 1340 }
1341 1341
1342 1342
1343 1343 def _tokenize(text):
1344 1344 view = memoryview(text) # zero-copy slice
1345 1345 special = b'():+-& '
1346 1346 pos = 0
1347 1347 length = len(text)
1348 1348 while pos < length:
1349 1349 symbol = b''.join(
1350 1350 itertools.takewhile(
1351 1351 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1352 1352 )
1353 1353 )
1354 1354 if symbol:
1355 1355 yield (b'symbol', symbol, pos)
1356 1356 pos += len(symbol)
1357 1357 else: # special char, ignore space
1358 1358 if text[pos : pos + 1] != b' ':
1359 1359 yield (text[pos : pos + 1], None, pos)
1360 1360 pos += 1
1361 1361 yield (b'end', None, pos)
1362 1362
1363 1363
1364 1364 def _parse(text):
1365 1365 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1366 1366 if pos != len(text):
1367 1367 raise error.ParseError(b'invalid token', pos)
1368 1368 return tree
1369 1369
1370 1370
1371 1371 def _parsedrev(symbol):
1372 1372 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1373 1373 if symbol.startswith(b'D') and symbol[1:].isdigit():
1374 1374 return int(symbol[1:])
1375 1375 if symbol.isdigit():
1376 1376 return int(symbol)
1377 1377
1378 1378
1379 1379 def _prefetchdrevs(tree):
1380 1380 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1381 1381 drevs = set()
1382 1382 ancestordrevs = set()
1383 1383 op = tree[0]
1384 1384 if op == b'symbol':
1385 1385 r = _parsedrev(tree[1])
1386 1386 if r:
1387 1387 drevs.add(r)
1388 1388 elif op == b'ancestors':
1389 1389 r, a = _prefetchdrevs(tree[1])
1390 1390 drevs.update(r)
1391 1391 ancestordrevs.update(r)
1392 1392 ancestordrevs.update(a)
1393 1393 else:
1394 1394 for t in tree[1:]:
1395 1395 r, a = _prefetchdrevs(t)
1396 1396 drevs.update(r)
1397 1397 ancestordrevs.update(a)
1398 1398 return drevs, ancestordrevs
1399 1399
1400 1400
1401 def querydrev(repo, spec):
1401 def querydrev(ui, spec):
1402 1402 """return a list of "Differential Revision" dicts
1403 1403
1404 1404 spec is a string using a simple query language, see docstring in phabread
1405 1405 for details.
1406 1406
1407 1407 A "Differential Revision dict" looks like:
1408 1408
1409 1409 {
1410 1410 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1411 1411 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1412 1412 "auxiliary": {
1413 1413 "phabricator:depends-on": [
1414 1414 "PHID-DREV-gbapp366kutjebt7agcd"
1415 1415 ]
1416 1416 "phabricator:projects": [],
1417 1417 },
1418 1418 "branch": "default",
1419 1419 "ccs": [],
1420 1420 "commits": [],
1421 1421 "dateCreated": "1499181406",
1422 1422 "dateModified": "1499182103",
1423 1423 "diffs": [
1424 1424 "3",
1425 1425 "4",
1426 1426 ],
1427 1427 "hashes": [],
1428 1428 "id": "2",
1429 1429 "lineCount": "2",
1430 1430 "phid": "PHID-DREV-672qvysjcczopag46qty",
1431 1431 "properties": {},
1432 1432 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1433 1433 "reviewers": [],
1434 1434 "sourcePath": null
1435 1435 "status": "0",
1436 1436 "statusName": "Needs Review",
1437 1437 "summary": "",
1438 1438 "testPlan": "",
1439 1439 "title": "example",
1440 1440 "uri": "https://phab.example.com/D2",
1441 1441 }
1442 1442 """
1443 1443 # TODO: replace differential.query and differential.querydiffs with
1444 1444 # differential.diff.search because the former (and their output) are
1445 1445 # frozen, and planned to be deprecated and removed.
1446 1446
1447 1447 def fetch(params):
1448 1448 """params -> single drev or None"""
1449 1449 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1450 1450 if key in prefetched:
1451 1451 return prefetched[key]
1452 drevs = callconduit(repo.ui, b'differential.query', params)
1452 drevs = callconduit(ui, b'differential.query', params)
1453 1453 # Fill prefetched with the result
1454 1454 for drev in drevs:
1455 1455 prefetched[drev[b'phid']] = drev
1456 1456 prefetched[int(drev[b'id'])] = drev
1457 1457 if key not in prefetched:
1458 1458 raise error.Abort(
1459 1459 _(b'cannot get Differential Revision %r') % params
1460 1460 )
1461 1461 return prefetched[key]
1462 1462
1463 1463 def getstack(topdrevids):
1464 1464 """given a top, get a stack from the bottom, [id] -> [id]"""
1465 1465 visited = set()
1466 1466 result = []
1467 1467 queue = [{b'ids': [i]} for i in topdrevids]
1468 1468 while queue:
1469 1469 params = queue.pop()
1470 1470 drev = fetch(params)
1471 1471 if drev[b'id'] in visited:
1472 1472 continue
1473 1473 visited.add(drev[b'id'])
1474 1474 result.append(int(drev[b'id']))
1475 1475 auxiliary = drev.get(b'auxiliary', {})
1476 1476 depends = auxiliary.get(b'phabricator:depends-on', [])
1477 1477 for phid in depends:
1478 1478 queue.append({b'phids': [phid]})
1479 1479 result.reverse()
1480 1480 return smartset.baseset(result)
1481 1481
1482 1482 # Initialize prefetch cache
1483 1483 prefetched = {} # {id or phid: drev}
1484 1484
1485 1485 tree = _parse(spec)
1486 1486 drevs, ancestordrevs = _prefetchdrevs(tree)
1487 1487
1488 1488 # developer config: phabricator.batchsize
1489 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1489 batchsize = ui.configint(b'phabricator', b'batchsize')
1490 1490
1491 1491 # Prefetch Differential Revisions in batch
1492 1492 tofetch = set(drevs)
1493 1493 for r in ancestordrevs:
1494 1494 tofetch.update(range(max(1, r - batchsize), r + 1))
1495 1495 if drevs:
1496 1496 fetch({b'ids': list(tofetch)})
1497 1497 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1498 1498
1499 1499 # Walk through the tree, return smartsets
1500 1500 def walk(tree):
1501 1501 op = tree[0]
1502 1502 if op == b'symbol':
1503 1503 drev = _parsedrev(tree[1])
1504 1504 if drev:
1505 1505 return smartset.baseset([drev])
1506 1506 elif tree[1] in _knownstatusnames:
1507 1507 drevs = [
1508 1508 r
1509 1509 for r in validids
1510 1510 if _getstatusname(prefetched[r]) == tree[1]
1511 1511 ]
1512 1512 return smartset.baseset(drevs)
1513 1513 else:
1514 1514 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1515 1515 elif op in {b'and_', b'add', b'sub'}:
1516 1516 assert len(tree) == 3
1517 1517 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1518 1518 elif op == b'group':
1519 1519 return walk(tree[1])
1520 1520 elif op == b'ancestors':
1521 1521 return getstack(walk(tree[1]))
1522 1522 else:
1523 1523 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1524 1524
1525 1525 return [prefetched[r] for r in walk(tree)]
1526 1526
1527 1527
1528 1528 def getdescfromdrev(drev):
1529 1529 """get description (commit message) from "Differential Revision"
1530 1530
1531 1531 This is similar to differential.getcommitmessage API. But we only care
1532 1532 about limited fields: title, summary, test plan, and URL.
1533 1533 """
1534 1534 title = drev[b'title']
1535 1535 summary = drev[b'summary'].rstrip()
1536 1536 testplan = drev[b'testPlan'].rstrip()
1537 1537 if testplan:
1538 1538 testplan = b'Test Plan:\n%s' % testplan
1539 1539 uri = b'Differential Revision: %s' % drev[b'uri']
1540 1540 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1541 1541
1542 1542
1543 1543 def getdiffmeta(diff):
1544 1544 """get commit metadata (date, node, user, p1) from a diff object
1545 1545
1546 1546 The metadata could be "hg:meta", sent by phabsend, like:
1547 1547
1548 1548 "properties": {
1549 1549 "hg:meta": {
1550 1550 "branch": "default",
1551 1551 "date": "1499571514 25200",
1552 1552 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1553 1553 "user": "Foo Bar <foo@example.com>",
1554 1554 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1555 1555 }
1556 1556 }
1557 1557
1558 1558 Or converted from "local:commits", sent by "arc", like:
1559 1559
1560 1560 "properties": {
1561 1561 "local:commits": {
1562 1562 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1563 1563 "author": "Foo Bar",
1564 1564 "authorEmail": "foo@example.com"
1565 1565 "branch": "default",
1566 1566 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1567 1567 "local": "1000",
1568 1568 "message": "...",
1569 1569 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1570 1570 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1571 1571 "summary": "...",
1572 1572 "tag": "",
1573 1573 "time": 1499546314,
1574 1574 }
1575 1575 }
1576 1576 }
1577 1577
1578 1578 Note: metadata extracted from "local:commits" will lose time zone
1579 1579 information.
1580 1580 """
1581 1581 props = diff.get(b'properties') or {}
1582 1582 meta = props.get(b'hg:meta')
1583 1583 if not meta:
1584 1584 if props.get(b'local:commits'):
1585 1585 commit = sorted(props[b'local:commits'].values())[0]
1586 1586 meta = {}
1587 1587 if b'author' in commit and b'authorEmail' in commit:
1588 1588 meta[b'user'] = b'%s <%s>' % (
1589 1589 commit[b'author'],
1590 1590 commit[b'authorEmail'],
1591 1591 )
1592 1592 if b'time' in commit:
1593 1593 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1594 1594 if b'branch' in commit:
1595 1595 meta[b'branch'] = commit[b'branch']
1596 1596 node = commit.get(b'commit', commit.get(b'rev'))
1597 1597 if node:
1598 1598 meta[b'node'] = node
1599 1599 if len(commit.get(b'parents', ())) >= 1:
1600 1600 meta[b'parent'] = commit[b'parents'][0]
1601 1601 else:
1602 1602 meta = {}
1603 1603 if b'date' not in meta and b'dateCreated' in diff:
1604 1604 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1605 1605 if b'branch' not in meta and diff.get(b'branch'):
1606 1606 meta[b'branch'] = diff[b'branch']
1607 1607 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1608 1608 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1609 1609 return meta
1610 1610
1611 1611
1612 1612 def readpatch(ui, drevs, write):
1613 1613 """generate plain-text patch readable by 'hg import'
1614 1614
1615 1615 write is usually ui.write. drevs is what "querydrev" returns, results of
1616 1616 "differential.query".
1617 1617 """
1618 1618 # Prefetch hg:meta property for all diffs
1619 1619 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1620 1620 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1621 1621
1622 1622 # Generate patch for each drev
1623 1623 for drev in drevs:
1624 1624 ui.note(_(b'reading D%s\n') % drev[b'id'])
1625 1625
1626 1626 diffid = max(int(v) for v in drev[b'diffs'])
1627 1627 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1628 1628 desc = getdescfromdrev(drev)
1629 1629 header = b'# HG changeset patch\n'
1630 1630
1631 1631 # Try to preserve metadata from hg:meta property. Write hg patch
1632 1632 # headers that can be read by the "import" command. See patchheadermap
1633 1633 # and extract in mercurial/patch.py for supported headers.
1634 1634 meta = getdiffmeta(diffs[b'%d' % diffid])
1635 1635 for k in _metanamemap.keys():
1636 1636 if k in meta:
1637 1637 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1638 1638
1639 1639 content = b'%s%s\n%s' % (header, desc, body)
1640 1640 write(content)
1641 1641
1642 1642
1643 1643 @vcrcommand(
1644 1644 b'phabread',
1645 1645 [(b'', b'stack', False, _(b'read dependencies'))],
1646 1646 _(b'DREVSPEC [OPTIONS]'),
1647 1647 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1648 1648 )
1649 1649 def phabread(ui, repo, spec, **opts):
1650 1650 """print patches from Phabricator suitable for importing
1651 1651
1652 1652 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1653 1653 the number ``123``. It could also have common operators like ``+``, ``-``,
1654 1654 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1655 1655 select a stack.
1656 1656
1657 1657 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1658 1658 could be used to filter patches by status. For performance reason, they
1659 1659 only represent a subset of non-status selections and cannot be used alone.
1660 1660
1661 1661 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1662 1662 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1663 1663 stack up to D9.
1664 1664
1665 1665 If --stack is given, follow dependencies information and read all patches.
1666 1666 It is equivalent to the ``:`` operator.
1667 1667 """
1668 1668 opts = pycompat.byteskwargs(opts)
1669 1669 if opts.get(b'stack'):
1670 1670 spec = b':(%s)' % spec
1671 drevs = querydrev(repo, spec)
1671 drevs = querydrev(repo.ui, spec)
1672 1672 readpatch(repo.ui, drevs, ui.write)
1673 1673
1674 1674
1675 1675 @vcrcommand(
1676 1676 b'phabupdate',
1677 1677 [
1678 1678 (b'', b'accept', False, _(b'accept revisions')),
1679 1679 (b'', b'reject', False, _(b'reject revisions')),
1680 1680 (b'', b'abandon', False, _(b'abandon revisions')),
1681 1681 (b'', b'reclaim', False, _(b'reclaim revisions')),
1682 1682 (b'm', b'comment', b'', _(b'comment on the last revision')),
1683 1683 ],
1684 1684 _(b'DREVSPEC [OPTIONS]'),
1685 1685 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1686 1686 )
1687 1687 def phabupdate(ui, repo, spec, **opts):
1688 1688 """update Differential Revision in batch
1689 1689
1690 1690 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1691 1691 """
1692 1692 opts = pycompat.byteskwargs(opts)
1693 1693 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1694 1694 if len(flags) > 1:
1695 1695 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1696 1696
1697 1697 actions = []
1698 1698 for f in flags:
1699 1699 actions.append({b'type': f, b'value': True})
1700 1700
1701 drevs = querydrev(repo, spec)
1701 drevs = querydrev(repo.ui, spec)
1702 1702 for i, drev in enumerate(drevs):
1703 1703 if i + 1 == len(drevs) and opts.get(b'comment'):
1704 1704 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1705 1705 if actions:
1706 1706 params = {
1707 1707 b'objectIdentifier': drev[b'phid'],
1708 1708 b'transactions': actions,
1709 1709 }
1710 1710 callconduit(ui, b'differential.revision.edit', params)
1711 1711
1712 1712
1713 1713 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1714 1714 def template_review(context, mapping):
1715 1715 """:phabreview: Object describing the review for this changeset.
1716 1716 Has attributes `url` and `id`.
1717 1717 """
1718 1718 ctx = context.resource(mapping, b'ctx')
1719 1719 m = _differentialrevisiondescre.search(ctx.description())
1720 1720 if m:
1721 1721 return templateutil.hybriddict(
1722 1722 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1723 1723 )
1724 1724 else:
1725 1725 tags = ctx.repo().nodetags(ctx.node())
1726 1726 for t in tags:
1727 1727 if _differentialrevisiontagre.match(t):
1728 1728 url = ctx.repo().ui.config(b'phabricator', b'url')
1729 1729 if not url.endswith(b'/'):
1730 1730 url += b'/'
1731 1731 url += t
1732 1732
1733 1733 return templateutil.hybriddict({b'url': url, b'id': t,})
1734 1734 return None
1735 1735
1736 1736
1737 1737 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1738 1738 def template_status(context, mapping):
1739 1739 """:phabstatus: String. Status of Phabricator differential.
1740 1740 """
1741 1741 ctx = context.resource(mapping, b'ctx')
1742 1742 repo = context.resource(mapping, b'repo')
1743 1743 ui = context.resource(mapping, b'ui')
1744 1744
1745 1745 rev = ctx.rev()
1746 1746 try:
1747 1747 drevid = getdrevmap(repo, [rev])[rev]
1748 1748 except KeyError:
1749 1749 return None
1750 1750 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1751 1751 for drev in drevs:
1752 1752 if int(drev[b'id']) == drevid:
1753 1753 return templateutil.hybriddict(
1754 1754 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1755 1755 )
1756 1756 return None
1757 1757
1758 1758
1759 1759 @show.showview(b'phabstatus', csettopic=b'work')
1760 1760 def phabstatusshowview(ui, repo, displayer):
1761 1761 """Phabricator differiential status"""
1762 1762 revs = repo.revs('sort(_underway(), topo)')
1763 1763 drevmap = getdrevmap(repo, revs)
1764 1764 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1765 1765 for rev, drevid in pycompat.iteritems(drevmap):
1766 1766 if drevid is not None:
1767 1767 drevids.add(drevid)
1768 1768 revsbydrevid.setdefault(drevid, set([])).add(rev)
1769 1769 else:
1770 1770 unknownrevs.append(rev)
1771 1771
1772 1772 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1773 1773 drevsbyrev = {}
1774 1774 for drev in drevs:
1775 1775 for rev in revsbydrevid[int(drev[b'id'])]:
1776 1776 drevsbyrev[rev] = drev
1777 1777
1778 1778 def phabstatus(ctx):
1779 1779 drev = drevsbyrev[ctx.rev()]
1780 1780 status = ui.label(
1781 1781 b'%(statusName)s' % drev,
1782 1782 b'phabricator.status.%s' % _getstatusname(drev),
1783 1783 )
1784 1784 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1785 1785
1786 1786 revs -= smartset.baseset(unknownrevs)
1787 1787 revdag = graphmod.dagwalker(repo, revs)
1788 1788
1789 1789 ui.setconfig(b'experimental', b'graphshorten', True)
1790 1790 displayer._exthook = phabstatus
1791 1791 nodelen = show.longestshortest(repo, revs)
1792 1792 logcmdutil.displaygraph(
1793 1793 ui,
1794 1794 repo,
1795 1795 revdag,
1796 1796 displayer,
1797 1797 graphmod.asciiedges,
1798 1798 props={b'nodelen': nodelen},
1799 1799 )
General Comments 0
You need to be logged in to leave comments. Login now