##// END OF EJS Templates
phabricator: refactor `phabread` to write all patches at once...
Matt Harbison -
r44909:d5d262c7 default
parent child Browse files
Show More
@@ -1,1803 +1,1815 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 750 def addoldbinary(pchange, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753 753 """
754 754 oldfctx = fctx.p1()
755 755 if fctx.cmp(oldfctx):
756 756 # Files differ, add the old one
757 757 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 758 mimeguess, _enc = mimetypes.guess_type(
759 759 encoding.unifromlocal(oldfctx.path())
760 760 )
761 761 if mimeguess:
762 762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 763 mimeguess
764 764 )
765 765 fphid = uploadfile(oldfctx)
766 766 pchange.metadata[b'old:binary-phid'] = fphid
767 767 else:
768 768 # If it's left as IMAGE/BINARY web UI might try to display it
769 769 pchange.fileType = DiffFileType.TEXT
770 770 pchange.copynewmetadatatoold()
771 771
772 772
773 773 def makebinary(pchange, fctx):
774 774 """populate the phabchange for a binary file"""
775 775 pchange.fileType = DiffFileType.BINARY
776 776 fphid = uploadfile(fctx)
777 777 pchange.metadata[b'new:binary-phid'] = fphid
778 778 pchange.metadata[b'new:file:size'] = fctx.size()
779 779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 780 if mimeguess:
781 781 mimeguess = pycompat.bytestr(mimeguess)
782 782 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 783 if mimeguess.startswith(b'image/'):
784 784 pchange.fileType = DiffFileType.IMAGE
785 785
786 786
787 787 # Copied from mercurial/patch.py
788 788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789 789
790 790
791 791 def notutf8(fctx):
792 792 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 793 as binary
794 794 """
795 795 try:
796 796 fctx.data().decode('utf-8')
797 797 if fctx.parents():
798 798 fctx.p1().data().decode('utf-8')
799 799 return False
800 800 except UnicodeDecodeError:
801 801 fctx.repo().ui.write(
802 802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 803 % fctx.path()
804 804 )
805 805 return True
806 806
807 807
808 808 def addremoved(pdiff, ctx, removed):
809 809 """add removed files to the phabdiff. Shouldn't include moves"""
810 810 for fname in removed:
811 811 pchange = phabchange(
812 812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 813 )
814 814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 815 fctx = ctx.p1()[fname]
816 816 if not (fctx.isbinary() or notutf8(fctx)):
817 817 maketext(pchange, ctx, fname)
818 818
819 819 pdiff.addchange(pchange)
820 820
821 821
822 822 def addmodified(pdiff, ctx, modified):
823 823 """add modified files to the phabdiff"""
824 824 for fname in modified:
825 825 fctx = ctx[fname]
826 826 pchange = phabchange(currentPath=fname, oldPath=fname)
827 827 filemode = gitmode[ctx[fname].flags()]
828 828 originalmode = gitmode[ctx.p1()[fname].flags()]
829 829 if filemode != originalmode:
830 830 pchange.addoldmode(originalmode)
831 831 pchange.addnewmode(filemode)
832 832
833 833 if fctx.isbinary() or notutf8(fctx):
834 834 makebinary(pchange, fctx)
835 835 addoldbinary(pchange, fctx)
836 836 else:
837 837 maketext(pchange, ctx, fname)
838 838
839 839 pdiff.addchange(pchange)
840 840
841 841
842 842 def addadded(pdiff, ctx, added, removed):
843 843 """add file adds to the phabdiff, both new files and copies/moves"""
844 844 # Keep track of files that've been recorded as moved/copied, so if there are
845 845 # additional copies we can mark them (moves get removed from removed)
846 846 copiedchanges = {}
847 847 movedchanges = {}
848 848 for fname in added:
849 849 fctx = ctx[fname]
850 850 pchange = phabchange(currentPath=fname)
851 851
852 852 filemode = gitmode[ctx[fname].flags()]
853 853 renamed = fctx.renamed()
854 854
855 855 if renamed:
856 856 originalfname = renamed[0]
857 857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 858 pchange.oldPath = originalfname
859 859
860 860 if originalfname in removed:
861 861 origpchange = phabchange(
862 862 currentPath=originalfname,
863 863 oldPath=originalfname,
864 864 type=DiffChangeType.MOVE_AWAY,
865 865 awayPaths=[fname],
866 866 )
867 867 movedchanges[originalfname] = origpchange
868 868 removed.remove(originalfname)
869 869 pchange.type = DiffChangeType.MOVE_HERE
870 870 elif originalfname in movedchanges:
871 871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 872 movedchanges[originalfname].awayPaths.append(fname)
873 873 pchange.type = DiffChangeType.COPY_HERE
874 874 else: # pure copy
875 875 if originalfname not in copiedchanges:
876 876 origpchange = phabchange(
877 877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 878 )
879 879 copiedchanges[originalfname] = origpchange
880 880 else:
881 881 origpchange = copiedchanges[originalfname]
882 882 origpchange.awayPaths.append(fname)
883 883 pchange.type = DiffChangeType.COPY_HERE
884 884
885 885 if filemode != originalmode:
886 886 pchange.addoldmode(originalmode)
887 887 pchange.addnewmode(filemode)
888 888 else: # Brand-new file
889 889 pchange.addnewmode(gitmode[fctx.flags()])
890 890 pchange.type = DiffChangeType.ADD
891 891
892 892 if fctx.isbinary() or notutf8(fctx):
893 893 makebinary(pchange, fctx)
894 894 if renamed:
895 895 addoldbinary(pchange, fctx)
896 896 else:
897 897 maketext(pchange, ctx, fname)
898 898
899 899 pdiff.addchange(pchange)
900 900
901 901 for _path, copiedchange in copiedchanges.items():
902 902 pdiff.addchange(copiedchange)
903 903 for _path, movedchange in movedchanges.items():
904 904 pdiff.addchange(movedchange)
905 905
906 906
907 907 def creatediff(ctx):
908 908 """create a Differential Diff"""
909 909 repo = ctx.repo()
910 910 repophid = getrepophid(repo)
911 911 # Create a "Differential Diff" via "differential.creatediff" API
912 912 pdiff = phabdiff(
913 913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 914 branch=b'%s' % ctx.branch(),
915 915 )
916 916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 917 # addadded will remove moved files from removed, so addremoved won't get
918 918 # them
919 919 addadded(pdiff, ctx, added, removed)
920 920 addmodified(pdiff, ctx, modified)
921 921 addremoved(pdiff, ctx, removed)
922 922 if repophid:
923 923 pdiff.repositoryPHID = repophid
924 924 diff = callconduit(
925 925 repo.ui,
926 926 b'differential.creatediff',
927 927 pycompat.byteskwargs(attr.asdict(pdiff)),
928 928 )
929 929 if not diff:
930 930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 931 return diff
932 932
933 933
934 934 def writediffproperties(ctx, diff):
935 935 """write metadata to diff so patches could be applied losslessly"""
936 936 # creatediff returns with a diffid but query returns with an id
937 937 diffid = diff.get(b'diffid', diff.get(b'id'))
938 938 params = {
939 939 b'diff_id': diffid,
940 940 b'name': b'hg:meta',
941 941 b'data': templatefilters.json(
942 942 {
943 943 b'user': ctx.user(),
944 944 b'date': b'%d %d' % ctx.date(),
945 945 b'branch': ctx.branch(),
946 946 b'node': ctx.hex(),
947 947 b'parent': ctx.p1().hex(),
948 948 }
949 949 ),
950 950 }
951 951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952 952
953 953 params = {
954 954 b'diff_id': diffid,
955 955 b'name': b'local:commits',
956 956 b'data': templatefilters.json(
957 957 {
958 958 ctx.hex(): {
959 959 b'author': stringutil.person(ctx.user()),
960 960 b'authorEmail': stringutil.email(ctx.user()),
961 961 b'time': int(ctx.date()[0]),
962 962 b'commit': ctx.hex(),
963 963 b'parents': [ctx.p1().hex()],
964 964 b'branch': ctx.branch(),
965 965 },
966 966 }
967 967 ),
968 968 }
969 969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970 970
971 971
972 972 def createdifferentialrevision(
973 973 ctx,
974 974 revid=None,
975 975 parentrevphid=None,
976 976 oldnode=None,
977 977 olddiff=None,
978 978 actions=None,
979 979 comment=None,
980 980 ):
981 981 """create or update a Differential Revision
982 982
983 983 If revid is None, create a new Differential Revision, otherwise update
984 984 revid. If parentrevphid is not None, set it as a dependency.
985 985
986 986 If oldnode is not None, check if the patch content (without commit message
987 987 and metadata) has changed before creating another diff.
988 988
989 989 If actions is not None, they will be appended to the transaction.
990 990 """
991 991 repo = ctx.repo()
992 992 if oldnode:
993 993 diffopts = mdiff.diffopts(git=True, context=32767)
994 994 oldctx = repo.unfiltered()[oldnode]
995 995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 996 else:
997 997 neednewdiff = True
998 998
999 999 transactions = []
1000 1000 if neednewdiff:
1001 1001 diff = creatediff(ctx)
1002 1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 1003 if comment:
1004 1004 transactions.append({b'type': b'comment', b'value': comment})
1005 1005 else:
1006 1006 # Even if we don't need to upload a new diff because the patch content
1007 1007 # does not change. We might still need to update its metadata so
1008 1008 # pushers could know the correct node metadata.
1009 1009 assert olddiff
1010 1010 diff = olddiff
1011 1011 writediffproperties(ctx, diff)
1012 1012
1013 1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 1014 if parentrevphid:
1015 1015 transactions.append(
1016 1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 1017 )
1018 1018
1019 1019 if actions:
1020 1020 transactions += actions
1021 1021
1022 1022 # Parse commit message and update related fields.
1023 1023 desc = ctx.description()
1024 1024 info = callconduit(
1025 1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 1026 )
1027 1027 for k, v in info[b'fields'].items():
1028 1028 if k in [b'title', b'summary', b'testPlan']:
1029 1029 transactions.append({b'type': k, b'value': v})
1030 1030
1031 1031 params = {b'transactions': transactions}
1032 1032 if revid is not None:
1033 1033 # Update an existing Differential Revision
1034 1034 params[b'objectIdentifier'] = revid
1035 1035
1036 1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 1037 if not revision:
1038 1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039 1039
1040 1040 return revision, diff
1041 1041
1042 1042
1043 1043 def userphids(ui, names):
1044 1044 """convert user names to PHIDs"""
1045 1045 names = [name.lower() for name in names]
1046 1046 query = {b'constraints': {b'usernames': names}}
1047 1047 result = callconduit(ui, b'user.search', query)
1048 1048 # username not found is not an error of the API. So check if we have missed
1049 1049 # some names here.
1050 1050 data = result[b'data']
1051 1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 1052 unresolved = set(names) - resolved
1053 1053 if unresolved:
1054 1054 raise error.Abort(
1055 1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 1056 )
1057 1057 return [entry[b'phid'] for entry in data]
1058 1058
1059 1059
1060 1060 @vcrcommand(
1061 1061 b'phabsend',
1062 1062 [
1063 1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 1064 (b'', b'amend', True, _(b'update commit messages')),
1065 1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 1067 (
1068 1068 b'm',
1069 1069 b'comment',
1070 1070 b'',
1071 1071 _(b'add a comment to Revisions with new/updated Diffs'),
1072 1072 ),
1073 1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 1074 ],
1075 1075 _(b'REV [OPTIONS]'),
1076 1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 1077 )
1078 1078 def phabsend(ui, repo, *revs, **opts):
1079 1079 """upload changesets to Phabricator
1080 1080
1081 1081 If there are multiple revisions specified, they will be send as a stack
1082 1082 with a linear dependencies relationship using the order specified by the
1083 1083 revset.
1084 1084
1085 1085 For the first time uploading changesets, local tags will be created to
1086 1086 maintain the association. After the first time, phabsend will check
1087 1087 obsstore and tags information so it can figure out whether to update an
1088 1088 existing Differential Revision, or create a new one.
1089 1089
1090 1090 If --amend is set, update commit messages so they have the
1091 1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 1093 use local tags to record the ``Differential Revision`` association.
1094 1094
1095 1095 The --confirm option lets you confirm changesets before sending them. You
1096 1096 can also add following to your configuration file to make it default
1097 1097 behaviour::
1098 1098
1099 1099 [phabsend]
1100 1100 confirm = true
1101 1101
1102 1102 phabsend will check obsstore and the above association to decide whether to
1103 1103 update an existing Differential Revision, or create a new one.
1104 1104 """
1105 1105 opts = pycompat.byteskwargs(opts)
1106 1106 revs = list(revs) + opts.get(b'rev', [])
1107 1107 revs = scmutil.revrange(repo, revs)
1108 1108 revs.sort() # ascending order to preserve topological parent/child in phab
1109 1109
1110 1110 if not revs:
1111 1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 1112 if opts.get(b'amend'):
1113 1113 cmdutil.checkunfinished(repo)
1114 1114
1115 1115 # {newnode: (oldnode, olddiff, olddrev}
1116 1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117 1117
1118 1118 confirm = ui.configbool(b'phabsend', b'confirm')
1119 1119 confirm |= bool(opts.get(b'confirm'))
1120 1120 if confirm:
1121 1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 1122 if not confirmed:
1123 1123 raise error.Abort(_(b'phabsend cancelled'))
1124 1124
1125 1125 actions = []
1126 1126 reviewers = opts.get(b'reviewer', [])
1127 1127 blockers = opts.get(b'blocker', [])
1128 1128 phids = []
1129 1129 if reviewers:
1130 1130 phids.extend(userphids(repo.ui, reviewers))
1131 1131 if blockers:
1132 1132 phids.extend(
1133 1133 map(
1134 1134 lambda phid: b'blocking(%s)' % phid,
1135 1135 userphids(repo.ui, blockers),
1136 1136 )
1137 1137 )
1138 1138 if phids:
1139 1139 actions.append({b'type': b'reviewers.add', b'value': phids})
1140 1140
1141 1141 drevids = [] # [int]
1142 1142 diffmap = {} # {newnode: diff}
1143 1143
1144 1144 # Send patches one by one so we know their Differential Revision PHIDs and
1145 1145 # can provide dependency relationship
1146 1146 lastrevphid = None
1147 1147 for rev in revs:
1148 1148 ui.debug(b'sending rev %d\n' % rev)
1149 1149 ctx = repo[rev]
1150 1150
1151 1151 # Get Differential Revision ID
1152 1152 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1153 1153 if oldnode != ctx.node() or opts.get(b'amend'):
1154 1154 # Create or update Differential Revision
1155 1155 revision, diff = createdifferentialrevision(
1156 1156 ctx,
1157 1157 revid,
1158 1158 lastrevphid,
1159 1159 oldnode,
1160 1160 olddiff,
1161 1161 actions,
1162 1162 opts.get(b'comment'),
1163 1163 )
1164 1164 diffmap[ctx.node()] = diff
1165 1165 newrevid = int(revision[b'object'][b'id'])
1166 1166 newrevphid = revision[b'object'][b'phid']
1167 1167 if revid:
1168 1168 action = b'updated'
1169 1169 else:
1170 1170 action = b'created'
1171 1171
1172 1172 # Create a local tag to note the association, if commit message
1173 1173 # does not have it already
1174 1174 m = _differentialrevisiondescre.search(ctx.description())
1175 1175 if not m or int(m.group('id')) != newrevid:
1176 1176 tagname = b'D%d' % newrevid
1177 1177 tags.tag(
1178 1178 repo,
1179 1179 tagname,
1180 1180 ctx.node(),
1181 1181 message=None,
1182 1182 user=None,
1183 1183 date=None,
1184 1184 local=True,
1185 1185 )
1186 1186 else:
1187 1187 # Nothing changed. But still set "newrevphid" so the next revision
1188 1188 # could depend on this one and "newrevid" for the summary line.
1189 1189 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1190 1190 newrevid = revid
1191 1191 action = b'skipped'
1192 1192
1193 1193 actiondesc = ui.label(
1194 1194 {
1195 1195 b'created': _(b'created'),
1196 1196 b'skipped': _(b'skipped'),
1197 1197 b'updated': _(b'updated'),
1198 1198 }[action],
1199 1199 b'phabricator.action.%s' % action,
1200 1200 )
1201 1201 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1202 1202 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1203 1203 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1204 1204 ui.write(
1205 1205 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1206 1206 )
1207 1207 drevids.append(newrevid)
1208 1208 lastrevphid = newrevphid
1209 1209
1210 1210 # Update commit messages and remove tags
1211 1211 if opts.get(b'amend'):
1212 1212 unfi = repo.unfiltered()
1213 1213 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1214 1214 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1215 1215 wnode = unfi[b'.'].node()
1216 1216 mapping = {} # {oldnode: [newnode]}
1217 1217 for i, rev in enumerate(revs):
1218 1218 old = unfi[rev]
1219 1219 drevid = drevids[i]
1220 1220 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1221 1221 newdesc = getdescfromdrev(drev)
1222 1222 # Make sure commit message contain "Differential Revision"
1223 1223 if old.description() != newdesc:
1224 1224 if old.phase() == phases.public:
1225 1225 ui.warn(
1226 1226 _(b"warning: not updating public commit %s\n")
1227 1227 % scmutil.formatchangeid(old)
1228 1228 )
1229 1229 continue
1230 1230 parents = [
1231 1231 mapping.get(old.p1().node(), (old.p1(),))[0],
1232 1232 mapping.get(old.p2().node(), (old.p2(),))[0],
1233 1233 ]
1234 1234 new = context.metadataonlyctx(
1235 1235 repo,
1236 1236 old,
1237 1237 parents=parents,
1238 1238 text=newdesc,
1239 1239 user=old.user(),
1240 1240 date=old.date(),
1241 1241 extra=old.extra(),
1242 1242 )
1243 1243
1244 1244 newnode = new.commit()
1245 1245
1246 1246 mapping[old.node()] = [newnode]
1247 1247 # Update diff property
1248 1248 # If it fails just warn and keep going, otherwise the DREV
1249 1249 # associations will be lost
1250 1250 try:
1251 1251 writediffproperties(unfi[newnode], diffmap[old.node()])
1252 1252 except util.urlerr.urlerror:
1253 1253 ui.warnnoi18n(
1254 1254 b'Failed to update metadata for D%d\n' % drevid
1255 1255 )
1256 1256 # Remove local tags since it's no longer necessary
1257 1257 tagname = b'D%d' % drevid
1258 1258 if tagname in repo.tags():
1259 1259 tags.tag(
1260 1260 repo,
1261 1261 tagname,
1262 1262 nullid,
1263 1263 message=None,
1264 1264 user=None,
1265 1265 date=None,
1266 1266 local=True,
1267 1267 )
1268 1268 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1269 1269 if wnode in mapping:
1270 1270 unfi.setparents(mapping[wnode][0])
1271 1271
1272 1272
1273 1273 # Map from "hg:meta" keys to header understood by "hg import". The order is
1274 1274 # consistent with "hg export" output.
1275 1275 _metanamemap = util.sortdict(
1276 1276 [
1277 1277 (b'user', b'User'),
1278 1278 (b'date', b'Date'),
1279 1279 (b'branch', b'Branch'),
1280 1280 (b'node', b'Node ID'),
1281 1281 (b'parent', b'Parent '),
1282 1282 ]
1283 1283 )
1284 1284
1285 1285
1286 1286 def _confirmbeforesend(repo, revs, oldmap):
1287 1287 url, token = readurltoken(repo.ui)
1288 1288 ui = repo.ui
1289 1289 for rev in revs:
1290 1290 ctx = repo[rev]
1291 1291 desc = ctx.description().splitlines()[0]
1292 1292 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1293 1293 if drevid:
1294 1294 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1295 1295 else:
1296 1296 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1297 1297
1298 1298 ui.write(
1299 1299 _(b'%s - %s: %s\n')
1300 1300 % (
1301 1301 drevdesc,
1302 1302 ui.label(bytes(ctx), b'phabricator.node'),
1303 1303 ui.label(desc, b'phabricator.desc'),
1304 1304 )
1305 1305 )
1306 1306
1307 1307 if ui.promptchoice(
1308 1308 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1309 1309 ):
1310 1310 return False
1311 1311
1312 1312 return True
1313 1313
1314 1314
1315 1315 _knownstatusnames = {
1316 1316 b'accepted',
1317 1317 b'needsreview',
1318 1318 b'needsrevision',
1319 1319 b'closed',
1320 1320 b'abandoned',
1321 1321 b'changesplanned',
1322 1322 }
1323 1323
1324 1324
1325 1325 def _getstatusname(drev):
1326 1326 """get normalized status name from a Differential Revision"""
1327 1327 return drev[b'statusName'].replace(b' ', b'').lower()
1328 1328
1329 1329
1330 1330 # Small language to specify differential revisions. Support symbols: (), :X,
1331 1331 # +, and -.
1332 1332
1333 1333 _elements = {
1334 1334 # token-type: binding-strength, primary, prefix, infix, suffix
1335 1335 b'(': (12, None, (b'group', 1, b')'), None, None),
1336 1336 b':': (8, None, (b'ancestors', 8), None, None),
1337 1337 b'&': (5, None, None, (b'and_', 5), None),
1338 1338 b'+': (4, None, None, (b'add', 4), None),
1339 1339 b'-': (4, None, None, (b'sub', 4), None),
1340 1340 b')': (0, None, None, None, None),
1341 1341 b'symbol': (0, b'symbol', None, None, None),
1342 1342 b'end': (0, None, None, None, None),
1343 1343 }
1344 1344
1345 1345
1346 1346 def _tokenize(text):
1347 1347 view = memoryview(text) # zero-copy slice
1348 1348 special = b'():+-& '
1349 1349 pos = 0
1350 1350 length = len(text)
1351 1351 while pos < length:
1352 1352 symbol = b''.join(
1353 1353 itertools.takewhile(
1354 1354 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1355 1355 )
1356 1356 )
1357 1357 if symbol:
1358 1358 yield (b'symbol', symbol, pos)
1359 1359 pos += len(symbol)
1360 1360 else: # special char, ignore space
1361 1361 if text[pos : pos + 1] != b' ':
1362 1362 yield (text[pos : pos + 1], None, pos)
1363 1363 pos += 1
1364 1364 yield (b'end', None, pos)
1365 1365
1366 1366
1367 1367 def _parse(text):
1368 1368 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1369 1369 if pos != len(text):
1370 1370 raise error.ParseError(b'invalid token', pos)
1371 1371 return tree
1372 1372
1373 1373
1374 1374 def _parsedrev(symbol):
1375 1375 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1376 1376 if symbol.startswith(b'D') and symbol[1:].isdigit():
1377 1377 return int(symbol[1:])
1378 1378 if symbol.isdigit():
1379 1379 return int(symbol)
1380 1380
1381 1381
1382 1382 def _prefetchdrevs(tree):
1383 1383 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1384 1384 drevs = set()
1385 1385 ancestordrevs = set()
1386 1386 op = tree[0]
1387 1387 if op == b'symbol':
1388 1388 r = _parsedrev(tree[1])
1389 1389 if r:
1390 1390 drevs.add(r)
1391 1391 elif op == b'ancestors':
1392 1392 r, a = _prefetchdrevs(tree[1])
1393 1393 drevs.update(r)
1394 1394 ancestordrevs.update(r)
1395 1395 ancestordrevs.update(a)
1396 1396 else:
1397 1397 for t in tree[1:]:
1398 1398 r, a = _prefetchdrevs(t)
1399 1399 drevs.update(r)
1400 1400 ancestordrevs.update(a)
1401 1401 return drevs, ancestordrevs
1402 1402
1403 1403
1404 1404 def querydrev(ui, spec):
1405 1405 """return a list of "Differential Revision" dicts
1406 1406
1407 1407 spec is a string using a simple query language, see docstring in phabread
1408 1408 for details.
1409 1409
1410 1410 A "Differential Revision dict" looks like:
1411 1411
1412 1412 {
1413 1413 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1414 1414 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1415 1415 "auxiliary": {
1416 1416 "phabricator:depends-on": [
1417 1417 "PHID-DREV-gbapp366kutjebt7agcd"
1418 1418 ]
1419 1419 "phabricator:projects": [],
1420 1420 },
1421 1421 "branch": "default",
1422 1422 "ccs": [],
1423 1423 "commits": [],
1424 1424 "dateCreated": "1499181406",
1425 1425 "dateModified": "1499182103",
1426 1426 "diffs": [
1427 1427 "3",
1428 1428 "4",
1429 1429 ],
1430 1430 "hashes": [],
1431 1431 "id": "2",
1432 1432 "lineCount": "2",
1433 1433 "phid": "PHID-DREV-672qvysjcczopag46qty",
1434 1434 "properties": {},
1435 1435 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1436 1436 "reviewers": [],
1437 1437 "sourcePath": null
1438 1438 "status": "0",
1439 1439 "statusName": "Needs Review",
1440 1440 "summary": "",
1441 1441 "testPlan": "",
1442 1442 "title": "example",
1443 1443 "uri": "https://phab.example.com/D2",
1444 1444 }
1445 1445 """
1446 1446 # TODO: replace differential.query and differential.querydiffs with
1447 1447 # differential.diff.search because the former (and their output) are
1448 1448 # frozen, and planned to be deprecated and removed.
1449 1449
1450 1450 def fetch(params):
1451 1451 """params -> single drev or None"""
1452 1452 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1453 1453 if key in prefetched:
1454 1454 return prefetched[key]
1455 1455 drevs = callconduit(ui, b'differential.query', params)
1456 1456 # Fill prefetched with the result
1457 1457 for drev in drevs:
1458 1458 prefetched[drev[b'phid']] = drev
1459 1459 prefetched[int(drev[b'id'])] = drev
1460 1460 if key not in prefetched:
1461 1461 raise error.Abort(
1462 1462 _(b'cannot get Differential Revision %r') % params
1463 1463 )
1464 1464 return prefetched[key]
1465 1465
1466 1466 def getstack(topdrevids):
1467 1467 """given a top, get a stack from the bottom, [id] -> [id]"""
1468 1468 visited = set()
1469 1469 result = []
1470 1470 queue = [{b'ids': [i]} for i in topdrevids]
1471 1471 while queue:
1472 1472 params = queue.pop()
1473 1473 drev = fetch(params)
1474 1474 if drev[b'id'] in visited:
1475 1475 continue
1476 1476 visited.add(drev[b'id'])
1477 1477 result.append(int(drev[b'id']))
1478 1478 auxiliary = drev.get(b'auxiliary', {})
1479 1479 depends = auxiliary.get(b'phabricator:depends-on', [])
1480 1480 for phid in depends:
1481 1481 queue.append({b'phids': [phid]})
1482 1482 result.reverse()
1483 1483 return smartset.baseset(result)
1484 1484
1485 1485 # Initialize prefetch cache
1486 1486 prefetched = {} # {id or phid: drev}
1487 1487
1488 1488 tree = _parse(spec)
1489 1489 drevs, ancestordrevs = _prefetchdrevs(tree)
1490 1490
1491 1491 # developer config: phabricator.batchsize
1492 1492 batchsize = ui.configint(b'phabricator', b'batchsize')
1493 1493
1494 1494 # Prefetch Differential Revisions in batch
1495 1495 tofetch = set(drevs)
1496 1496 for r in ancestordrevs:
1497 1497 tofetch.update(range(max(1, r - batchsize), r + 1))
1498 1498 if drevs:
1499 1499 fetch({b'ids': list(tofetch)})
1500 1500 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1501 1501
1502 1502 # Walk through the tree, return smartsets
1503 1503 def walk(tree):
1504 1504 op = tree[0]
1505 1505 if op == b'symbol':
1506 1506 drev = _parsedrev(tree[1])
1507 1507 if drev:
1508 1508 return smartset.baseset([drev])
1509 1509 elif tree[1] in _knownstatusnames:
1510 1510 drevs = [
1511 1511 r
1512 1512 for r in validids
1513 1513 if _getstatusname(prefetched[r]) == tree[1]
1514 1514 ]
1515 1515 return smartset.baseset(drevs)
1516 1516 else:
1517 1517 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1518 1518 elif op in {b'and_', b'add', b'sub'}:
1519 1519 assert len(tree) == 3
1520 1520 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1521 1521 elif op == b'group':
1522 1522 return walk(tree[1])
1523 1523 elif op == b'ancestors':
1524 1524 return getstack(walk(tree[1]))
1525 1525 else:
1526 1526 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1527 1527
1528 1528 return [prefetched[r] for r in walk(tree)]
1529 1529
1530 1530
1531 1531 def getdescfromdrev(drev):
1532 1532 """get description (commit message) from "Differential Revision"
1533 1533
1534 1534 This is similar to differential.getcommitmessage API. But we only care
1535 1535 about limited fields: title, summary, test plan, and URL.
1536 1536 """
1537 1537 title = drev[b'title']
1538 1538 summary = drev[b'summary'].rstrip()
1539 1539 testplan = drev[b'testPlan'].rstrip()
1540 1540 if testplan:
1541 1541 testplan = b'Test Plan:\n%s' % testplan
1542 1542 uri = b'Differential Revision: %s' % drev[b'uri']
1543 1543 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1544 1544
1545 1545
1546 1546 def getdiffmeta(diff):
1547 1547 """get commit metadata (date, node, user, p1) from a diff object
1548 1548
1549 1549 The metadata could be "hg:meta", sent by phabsend, like:
1550 1550
1551 1551 "properties": {
1552 1552 "hg:meta": {
1553 1553 "branch": "default",
1554 1554 "date": "1499571514 25200",
1555 1555 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1556 1556 "user": "Foo Bar <foo@example.com>",
1557 1557 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1558 1558 }
1559 1559 }
1560 1560
1561 1561 Or converted from "local:commits", sent by "arc", like:
1562 1562
1563 1563 "properties": {
1564 1564 "local:commits": {
1565 1565 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1566 1566 "author": "Foo Bar",
1567 1567 "authorEmail": "foo@example.com"
1568 1568 "branch": "default",
1569 1569 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 1570 "local": "1000",
1571 1571 "message": "...",
1572 1572 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1573 1573 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1574 1574 "summary": "...",
1575 1575 "tag": "",
1576 1576 "time": 1499546314,
1577 1577 }
1578 1578 }
1579 1579 }
1580 1580
1581 1581 Note: metadata extracted from "local:commits" will lose time zone
1582 1582 information.
1583 1583 """
1584 1584 props = diff.get(b'properties') or {}
1585 1585 meta = props.get(b'hg:meta')
1586 1586 if not meta:
1587 1587 if props.get(b'local:commits'):
1588 1588 commit = sorted(props[b'local:commits'].values())[0]
1589 1589 meta = {}
1590 1590 if b'author' in commit and b'authorEmail' in commit:
1591 1591 meta[b'user'] = b'%s <%s>' % (
1592 1592 commit[b'author'],
1593 1593 commit[b'authorEmail'],
1594 1594 )
1595 1595 if b'time' in commit:
1596 1596 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1597 1597 if b'branch' in commit:
1598 1598 meta[b'branch'] = commit[b'branch']
1599 1599 node = commit.get(b'commit', commit.get(b'rev'))
1600 1600 if node:
1601 1601 meta[b'node'] = node
1602 1602 if len(commit.get(b'parents', ())) >= 1:
1603 1603 meta[b'parent'] = commit[b'parents'][0]
1604 1604 else:
1605 1605 meta = {}
1606 1606 if b'date' not in meta and b'dateCreated' in diff:
1607 1607 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1608 1608 if b'branch' not in meta and diff.get(b'branch'):
1609 1609 meta[b'branch'] = diff[b'branch']
1610 1610 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1611 1611 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1612 1612 return meta
1613 1613
1614 1614
1615 1615 def readpatch(ui, drevs, write):
1616 1616 """generate plain-text patch readable by 'hg import'
1617 1617
1618 write is usually ui.write. drevs is what "querydrev" returns, results of
1618 write takes a list of (DREV, bytes), where DREV is the differential number
1619 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1620 to be imported. drevs is what "querydrev" returns, results of
1619 1621 "differential.query".
1620 1622 """
1621 1623 # Prefetch hg:meta property for all diffs
1622 1624 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1623 1625 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1624 1626
1627 patches = []
1628
1625 1629 # Generate patch for each drev
1626 1630 for drev in drevs:
1627 1631 ui.note(_(b'reading D%s\n') % drev[b'id'])
1628 1632
1629 1633 diffid = max(int(v) for v in drev[b'diffs'])
1630 1634 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1631 1635 desc = getdescfromdrev(drev)
1632 1636 header = b'# HG changeset patch\n'
1633 1637
1634 1638 # Try to preserve metadata from hg:meta property. Write hg patch
1635 1639 # headers that can be read by the "import" command. See patchheadermap
1636 1640 # and extract in mercurial/patch.py for supported headers.
1637 1641 meta = getdiffmeta(diffs[b'%d' % diffid])
1638 1642 for k in _metanamemap.keys():
1639 1643 if k in meta:
1640 1644 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1641 1645
1642 1646 content = b'%s%s\n%s' % (header, desc, body)
1643 write(content)
1647 patches.append((drev[b'id'], content))
1648
1649 # Write patches to the supplied callback
1650 write(patches)
1644 1651
1645 1652
1646 1653 @vcrcommand(
1647 1654 b'phabread',
1648 1655 [(b'', b'stack', False, _(b'read dependencies'))],
1649 1656 _(b'DREVSPEC [OPTIONS]'),
1650 1657 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1651 1658 )
1652 1659 def phabread(ui, repo, spec, **opts):
1653 1660 """print patches from Phabricator suitable for importing
1654 1661
1655 1662 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1656 1663 the number ``123``. It could also have common operators like ``+``, ``-``,
1657 1664 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1658 1665 select a stack.
1659 1666
1660 1667 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1661 1668 could be used to filter patches by status. For performance reason, they
1662 1669 only represent a subset of non-status selections and cannot be used alone.
1663 1670
1664 1671 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1665 1672 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1666 1673 stack up to D9.
1667 1674
1668 1675 If --stack is given, follow dependencies information and read all patches.
1669 1676 It is equivalent to the ``:`` operator.
1670 1677 """
1671 1678 opts = pycompat.byteskwargs(opts)
1672 1679 if opts.get(b'stack'):
1673 1680 spec = b':(%s)' % spec
1674 1681 drevs = querydrev(repo.ui, spec)
1675 readpatch(repo.ui, drevs, ui.write)
1682
1683 def _write(patches):
1684 for drev, content in patches:
1685 ui.write(content)
1686
1687 readpatch(repo.ui, drevs, _write)
1676 1688
1677 1689
1678 1690 @vcrcommand(
1679 1691 b'phabupdate',
1680 1692 [
1681 1693 (b'', b'accept', False, _(b'accept revisions')),
1682 1694 (b'', b'reject', False, _(b'reject revisions')),
1683 1695 (b'', b'abandon', False, _(b'abandon revisions')),
1684 1696 (b'', b'reclaim', False, _(b'reclaim revisions')),
1685 1697 (b'm', b'comment', b'', _(b'comment on the last revision')),
1686 1698 ],
1687 1699 _(b'DREVSPEC [OPTIONS]'),
1688 1700 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1689 1701 optionalrepo=True,
1690 1702 )
1691 1703 def phabupdate(ui, repo, spec, **opts):
1692 1704 """update Differential Revision in batch
1693 1705
1694 1706 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1695 1707 """
1696 1708 opts = pycompat.byteskwargs(opts)
1697 1709 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1698 1710 if len(flags) > 1:
1699 1711 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1700 1712
1701 1713 actions = []
1702 1714 for f in flags:
1703 1715 actions.append({b'type': f, b'value': True})
1704 1716
1705 1717 drevs = querydrev(ui, spec)
1706 1718 for i, drev in enumerate(drevs):
1707 1719 if i + 1 == len(drevs) and opts.get(b'comment'):
1708 1720 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1709 1721 if actions:
1710 1722 params = {
1711 1723 b'objectIdentifier': drev[b'phid'],
1712 1724 b'transactions': actions,
1713 1725 }
1714 1726 callconduit(ui, b'differential.revision.edit', params)
1715 1727
1716 1728
1717 1729 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1718 1730 def template_review(context, mapping):
1719 1731 """:phabreview: Object describing the review for this changeset.
1720 1732 Has attributes `url` and `id`.
1721 1733 """
1722 1734 ctx = context.resource(mapping, b'ctx')
1723 1735 m = _differentialrevisiondescre.search(ctx.description())
1724 1736 if m:
1725 1737 return templateutil.hybriddict(
1726 1738 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1727 1739 )
1728 1740 else:
1729 1741 tags = ctx.repo().nodetags(ctx.node())
1730 1742 for t in tags:
1731 1743 if _differentialrevisiontagre.match(t):
1732 1744 url = ctx.repo().ui.config(b'phabricator', b'url')
1733 1745 if not url.endswith(b'/'):
1734 1746 url += b'/'
1735 1747 url += t
1736 1748
1737 1749 return templateutil.hybriddict({b'url': url, b'id': t,})
1738 1750 return None
1739 1751
1740 1752
1741 1753 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1742 1754 def template_status(context, mapping):
1743 1755 """:phabstatus: String. Status of Phabricator differential.
1744 1756 """
1745 1757 ctx = context.resource(mapping, b'ctx')
1746 1758 repo = context.resource(mapping, b'repo')
1747 1759 ui = context.resource(mapping, b'ui')
1748 1760
1749 1761 rev = ctx.rev()
1750 1762 try:
1751 1763 drevid = getdrevmap(repo, [rev])[rev]
1752 1764 except KeyError:
1753 1765 return None
1754 1766 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1755 1767 for drev in drevs:
1756 1768 if int(drev[b'id']) == drevid:
1757 1769 return templateutil.hybriddict(
1758 1770 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1759 1771 )
1760 1772 return None
1761 1773
1762 1774
1763 1775 @show.showview(b'phabstatus', csettopic=b'work')
1764 1776 def phabstatusshowview(ui, repo, displayer):
1765 1777 """Phabricator differiential status"""
1766 1778 revs = repo.revs('sort(_underway(), topo)')
1767 1779 drevmap = getdrevmap(repo, revs)
1768 1780 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1769 1781 for rev, drevid in pycompat.iteritems(drevmap):
1770 1782 if drevid is not None:
1771 1783 drevids.add(drevid)
1772 1784 revsbydrevid.setdefault(drevid, set([])).add(rev)
1773 1785 else:
1774 1786 unknownrevs.append(rev)
1775 1787
1776 1788 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1777 1789 drevsbyrev = {}
1778 1790 for drev in drevs:
1779 1791 for rev in revsbydrevid[int(drev[b'id'])]:
1780 1792 drevsbyrev[rev] = drev
1781 1793
1782 1794 def phabstatus(ctx):
1783 1795 drev = drevsbyrev[ctx.rev()]
1784 1796 status = ui.label(
1785 1797 b'%(statusName)s' % drev,
1786 1798 b'phabricator.status.%s' % _getstatusname(drev),
1787 1799 )
1788 1800 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1789 1801
1790 1802 revs -= smartset.baseset(unknownrevs)
1791 1803 revdag = graphmod.dagwalker(repo, revs)
1792 1804
1793 1805 ui.setconfig(b'experimental', b'graphshorten', True)
1794 1806 displayer._exthook = phabstatus
1795 1807 nodelen = show.longestshortest(repo, revs)
1796 1808 logcmdutil.displaygraph(
1797 1809 ui,
1798 1810 repo,
1799 1811 revdag,
1800 1812 displayer,
1801 1813 graphmod.asciiedges,
1802 1814 props={b'nodelen': nodelen},
1803 1815 )
General Comments 0
You need to be logged in to leave comments. Login now