##// END OF EJS Templates
phabricator: pass old `fctx` to `addoldbinary()` instead of inferring it...
Matt Harbison -
r44911:98f7b9cf default
parent child Browse files
Show More
@@ -1,1816 +1,1818 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 def addoldbinary(pchange, fctx):
750 def addoldbinary(pchange, oldfctx, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753
754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 version of the file, or None if the file is being removed.
753 756 """
754 oldfctx = fctx.p1()
755 if fctx.cmp(oldfctx):
757 if not fctx or fctx.cmp(oldfctx):
756 758 # Files differ, add the old one
757 759 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 760 mimeguess, _enc = mimetypes.guess_type(
759 761 encoding.unifromlocal(oldfctx.path())
760 762 )
761 763 if mimeguess:
762 764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 765 mimeguess
764 766 )
765 767 fphid = uploadfile(oldfctx)
766 768 pchange.metadata[b'old:binary-phid'] = fphid
767 769 else:
768 770 # If it's left as IMAGE/BINARY web UI might try to display it
769 771 pchange.fileType = DiffFileType.TEXT
770 772 pchange.copynewmetadatatoold()
771 773
772 774
773 775 def makebinary(pchange, fctx):
774 776 """populate the phabchange for a binary file"""
775 777 pchange.fileType = DiffFileType.BINARY
776 778 fphid = uploadfile(fctx)
777 779 pchange.metadata[b'new:binary-phid'] = fphid
778 780 pchange.metadata[b'new:file:size'] = fctx.size()
779 781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 782 if mimeguess:
781 783 mimeguess = pycompat.bytestr(mimeguess)
782 784 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 785 if mimeguess.startswith(b'image/'):
784 786 pchange.fileType = DiffFileType.IMAGE
785 787
786 788
787 789 # Copied from mercurial/patch.py
788 790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789 791
790 792
791 793 def notutf8(fctx):
792 794 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 795 as binary
794 796 """
795 797 try:
796 798 fctx.data().decode('utf-8')
797 799 if fctx.parents():
798 800 fctx.p1().data().decode('utf-8')
799 801 return False
800 802 except UnicodeDecodeError:
801 803 fctx.repo().ui.write(
802 804 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 805 % fctx.path()
804 806 )
805 807 return True
806 808
807 809
808 810 def addremoved(pdiff, ctx, removed):
809 811 """add removed files to the phabdiff. Shouldn't include moves"""
810 812 for fname in removed:
811 813 pchange = phabchange(
812 814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 815 )
814 816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 817 fctx = ctx.p1()[fname]
816 818 if not (fctx.isbinary() or notutf8(fctx)):
817 819 maketext(pchange, ctx, fname)
818 820
819 821 pdiff.addchange(pchange)
820 822
821 823
822 824 def addmodified(pdiff, ctx, modified):
823 825 """add modified files to the phabdiff"""
824 826 for fname in modified:
825 827 fctx = ctx[fname]
826 828 pchange = phabchange(currentPath=fname, oldPath=fname)
827 829 filemode = gitmode[ctx[fname].flags()]
828 830 originalmode = gitmode[ctx.p1()[fname].flags()]
829 831 if filemode != originalmode:
830 832 pchange.addoldmode(originalmode)
831 833 pchange.addnewmode(filemode)
832 834
833 835 if fctx.isbinary() or notutf8(fctx):
834 836 makebinary(pchange, fctx)
835 addoldbinary(pchange, fctx)
837 addoldbinary(pchange, fctx.p1(), fctx)
836 838 else:
837 839 maketext(pchange, ctx, fname)
838 840
839 841 pdiff.addchange(pchange)
840 842
841 843
842 844 def addadded(pdiff, ctx, added, removed):
843 845 """add file adds to the phabdiff, both new files and copies/moves"""
844 846 # Keep track of files that've been recorded as moved/copied, so if there are
845 847 # additional copies we can mark them (moves get removed from removed)
846 848 copiedchanges = {}
847 849 movedchanges = {}
848 850 for fname in added:
849 851 fctx = ctx[fname]
850 852 pchange = phabchange(currentPath=fname)
851 853
852 854 filemode = gitmode[ctx[fname].flags()]
853 855 renamed = fctx.renamed()
854 856
855 857 if renamed:
856 858 originalfname = renamed[0]
857 859 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 860 pchange.oldPath = originalfname
859 861
860 862 if originalfname in removed:
861 863 origpchange = phabchange(
862 864 currentPath=originalfname,
863 865 oldPath=originalfname,
864 866 type=DiffChangeType.MOVE_AWAY,
865 867 awayPaths=[fname],
866 868 )
867 869 movedchanges[originalfname] = origpchange
868 870 removed.remove(originalfname)
869 871 pchange.type = DiffChangeType.MOVE_HERE
870 872 elif originalfname in movedchanges:
871 873 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 874 movedchanges[originalfname].awayPaths.append(fname)
873 875 pchange.type = DiffChangeType.COPY_HERE
874 876 else: # pure copy
875 877 if originalfname not in copiedchanges:
876 878 origpchange = phabchange(
877 879 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 880 )
879 881 copiedchanges[originalfname] = origpchange
880 882 else:
881 883 origpchange = copiedchanges[originalfname]
882 884 origpchange.awayPaths.append(fname)
883 885 pchange.type = DiffChangeType.COPY_HERE
884 886
885 887 if filemode != originalmode:
886 888 pchange.addoldmode(originalmode)
887 889 pchange.addnewmode(filemode)
888 890 else: # Brand-new file
889 891 pchange.addnewmode(gitmode[fctx.flags()])
890 892 pchange.type = DiffChangeType.ADD
891 893
892 894 if fctx.isbinary() or notutf8(fctx):
893 895 makebinary(pchange, fctx)
894 896 if renamed:
895 addoldbinary(pchange, fctx)
897 addoldbinary(pchange, fctx.p1(), fctx)
896 898 else:
897 899 maketext(pchange, ctx, fname)
898 900
899 901 pdiff.addchange(pchange)
900 902
901 903 for _path, copiedchange in copiedchanges.items():
902 904 pdiff.addchange(copiedchange)
903 905 for _path, movedchange in movedchanges.items():
904 906 pdiff.addchange(movedchange)
905 907
906 908
907 909 def creatediff(ctx):
908 910 """create a Differential Diff"""
909 911 repo = ctx.repo()
910 912 repophid = getrepophid(repo)
911 913 # Create a "Differential Diff" via "differential.creatediff" API
912 914 pdiff = phabdiff(
913 915 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 916 branch=b'%s' % ctx.branch(),
915 917 )
916 918 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 919 # addadded will remove moved files from removed, so addremoved won't get
918 920 # them
919 921 addadded(pdiff, ctx, added, removed)
920 922 addmodified(pdiff, ctx, modified)
921 923 addremoved(pdiff, ctx, removed)
922 924 if repophid:
923 925 pdiff.repositoryPHID = repophid
924 926 diff = callconduit(
925 927 repo.ui,
926 928 b'differential.creatediff',
927 929 pycompat.byteskwargs(attr.asdict(pdiff)),
928 930 )
929 931 if not diff:
930 932 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 933 return diff
932 934
933 935
934 936 def writediffproperties(ctx, diff):
935 937 """write metadata to diff so patches could be applied losslessly"""
936 938 # creatediff returns with a diffid but query returns with an id
937 939 diffid = diff.get(b'diffid', diff.get(b'id'))
938 940 params = {
939 941 b'diff_id': diffid,
940 942 b'name': b'hg:meta',
941 943 b'data': templatefilters.json(
942 944 {
943 945 b'user': ctx.user(),
944 946 b'date': b'%d %d' % ctx.date(),
945 947 b'branch': ctx.branch(),
946 948 b'node': ctx.hex(),
947 949 b'parent': ctx.p1().hex(),
948 950 }
949 951 ),
950 952 }
951 953 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952 954
953 955 params = {
954 956 b'diff_id': diffid,
955 957 b'name': b'local:commits',
956 958 b'data': templatefilters.json(
957 959 {
958 960 ctx.hex(): {
959 961 b'author': stringutil.person(ctx.user()),
960 962 b'authorEmail': stringutil.email(ctx.user()),
961 963 b'time': int(ctx.date()[0]),
962 964 b'commit': ctx.hex(),
963 965 b'parents': [ctx.p1().hex()],
964 966 b'branch': ctx.branch(),
965 967 },
966 968 }
967 969 ),
968 970 }
969 971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970 972
971 973
972 974 def createdifferentialrevision(
973 975 ctx,
974 976 revid=None,
975 977 parentrevphid=None,
976 978 oldnode=None,
977 979 olddiff=None,
978 980 actions=None,
979 981 comment=None,
980 982 ):
981 983 """create or update a Differential Revision
982 984
983 985 If revid is None, create a new Differential Revision, otherwise update
984 986 revid. If parentrevphid is not None, set it as a dependency.
985 987
986 988 If oldnode is not None, check if the patch content (without commit message
987 989 and metadata) has changed before creating another diff.
988 990
989 991 If actions is not None, they will be appended to the transaction.
990 992 """
991 993 repo = ctx.repo()
992 994 if oldnode:
993 995 diffopts = mdiff.diffopts(git=True, context=32767)
994 996 oldctx = repo.unfiltered()[oldnode]
995 997 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 998 else:
997 999 neednewdiff = True
998 1000
999 1001 transactions = []
1000 1002 if neednewdiff:
1001 1003 diff = creatediff(ctx)
1002 1004 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 1005 if comment:
1004 1006 transactions.append({b'type': b'comment', b'value': comment})
1005 1007 else:
1006 1008 # Even if we don't need to upload a new diff because the patch content
1007 1009 # does not change. We might still need to update its metadata so
1008 1010 # pushers could know the correct node metadata.
1009 1011 assert olddiff
1010 1012 diff = olddiff
1011 1013 writediffproperties(ctx, diff)
1012 1014
1013 1015 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 1016 if parentrevphid:
1015 1017 transactions.append(
1016 1018 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 1019 )
1018 1020
1019 1021 if actions:
1020 1022 transactions += actions
1021 1023
1022 1024 # Parse commit message and update related fields.
1023 1025 desc = ctx.description()
1024 1026 info = callconduit(
1025 1027 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 1028 )
1027 1029 for k, v in info[b'fields'].items():
1028 1030 if k in [b'title', b'summary', b'testPlan']:
1029 1031 transactions.append({b'type': k, b'value': v})
1030 1032
1031 1033 params = {b'transactions': transactions}
1032 1034 if revid is not None:
1033 1035 # Update an existing Differential Revision
1034 1036 params[b'objectIdentifier'] = revid
1035 1037
1036 1038 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 1039 if not revision:
1038 1040 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039 1041
1040 1042 return revision, diff
1041 1043
1042 1044
1043 1045 def userphids(ui, names):
1044 1046 """convert user names to PHIDs"""
1045 1047 names = [name.lower() for name in names]
1046 1048 query = {b'constraints': {b'usernames': names}}
1047 1049 result = callconduit(ui, b'user.search', query)
1048 1050 # username not found is not an error of the API. So check if we have missed
1049 1051 # some names here.
1050 1052 data = result[b'data']
1051 1053 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 1054 unresolved = set(names) - resolved
1053 1055 if unresolved:
1054 1056 raise error.Abort(
1055 1057 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 1058 )
1057 1059 return [entry[b'phid'] for entry in data]
1058 1060
1059 1061
1060 1062 @vcrcommand(
1061 1063 b'phabsend',
1062 1064 [
1063 1065 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 1066 (b'', b'amend', True, _(b'update commit messages')),
1065 1067 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 1068 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 1069 (
1068 1070 b'm',
1069 1071 b'comment',
1070 1072 b'',
1071 1073 _(b'add a comment to Revisions with new/updated Diffs'),
1072 1074 ),
1073 1075 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 1076 ],
1075 1077 _(b'REV [OPTIONS]'),
1076 1078 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 1079 )
1078 1080 def phabsend(ui, repo, *revs, **opts):
1079 1081 """upload changesets to Phabricator
1080 1082
1081 1083 If there are multiple revisions specified, they will be send as a stack
1082 1084 with a linear dependencies relationship using the order specified by the
1083 1085 revset.
1084 1086
1085 1087 For the first time uploading changesets, local tags will be created to
1086 1088 maintain the association. After the first time, phabsend will check
1087 1089 obsstore and tags information so it can figure out whether to update an
1088 1090 existing Differential Revision, or create a new one.
1089 1091
1090 1092 If --amend is set, update commit messages so they have the
1091 1093 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 1094 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 1095 use local tags to record the ``Differential Revision`` association.
1094 1096
1095 1097 The --confirm option lets you confirm changesets before sending them. You
1096 1098 can also add following to your configuration file to make it default
1097 1099 behaviour::
1098 1100
1099 1101 [phabsend]
1100 1102 confirm = true
1101 1103
1102 1104 phabsend will check obsstore and the above association to decide whether to
1103 1105 update an existing Differential Revision, or create a new one.
1104 1106 """
1105 1107 opts = pycompat.byteskwargs(opts)
1106 1108 revs = list(revs) + opts.get(b'rev', [])
1107 1109 revs = scmutil.revrange(repo, revs)
1108 1110 revs.sort() # ascending order to preserve topological parent/child in phab
1109 1111
1110 1112 if not revs:
1111 1113 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 1114 if opts.get(b'amend'):
1113 1115 cmdutil.checkunfinished(repo)
1114 1116
1115 1117 # {newnode: (oldnode, olddiff, olddrev}
1116 1118 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117 1119
1118 1120 confirm = ui.configbool(b'phabsend', b'confirm')
1119 1121 confirm |= bool(opts.get(b'confirm'))
1120 1122 if confirm:
1121 1123 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 1124 if not confirmed:
1123 1125 raise error.Abort(_(b'phabsend cancelled'))
1124 1126
1125 1127 actions = []
1126 1128 reviewers = opts.get(b'reviewer', [])
1127 1129 blockers = opts.get(b'blocker', [])
1128 1130 phids = []
1129 1131 if reviewers:
1130 1132 phids.extend(userphids(repo.ui, reviewers))
1131 1133 if blockers:
1132 1134 phids.extend(
1133 1135 map(
1134 1136 lambda phid: b'blocking(%s)' % phid,
1135 1137 userphids(repo.ui, blockers),
1136 1138 )
1137 1139 )
1138 1140 if phids:
1139 1141 actions.append({b'type': b'reviewers.add', b'value': phids})
1140 1142
1141 1143 drevids = [] # [int]
1142 1144 diffmap = {} # {newnode: diff}
1143 1145
1144 1146 # Send patches one by one so we know their Differential Revision PHIDs and
1145 1147 # can provide dependency relationship
1146 1148 lastrevphid = None
1147 1149 for rev in revs:
1148 1150 ui.debug(b'sending rev %d\n' % rev)
1149 1151 ctx = repo[rev]
1150 1152
1151 1153 # Get Differential Revision ID
1152 1154 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1153 1155 if oldnode != ctx.node() or opts.get(b'amend'):
1154 1156 # Create or update Differential Revision
1155 1157 revision, diff = createdifferentialrevision(
1156 1158 ctx,
1157 1159 revid,
1158 1160 lastrevphid,
1159 1161 oldnode,
1160 1162 olddiff,
1161 1163 actions,
1162 1164 opts.get(b'comment'),
1163 1165 )
1164 1166 diffmap[ctx.node()] = diff
1165 1167 newrevid = int(revision[b'object'][b'id'])
1166 1168 newrevphid = revision[b'object'][b'phid']
1167 1169 if revid:
1168 1170 action = b'updated'
1169 1171 else:
1170 1172 action = b'created'
1171 1173
1172 1174 # Create a local tag to note the association, if commit message
1173 1175 # does not have it already
1174 1176 m = _differentialrevisiondescre.search(ctx.description())
1175 1177 if not m or int(m.group('id')) != newrevid:
1176 1178 tagname = b'D%d' % newrevid
1177 1179 tags.tag(
1178 1180 repo,
1179 1181 tagname,
1180 1182 ctx.node(),
1181 1183 message=None,
1182 1184 user=None,
1183 1185 date=None,
1184 1186 local=True,
1185 1187 )
1186 1188 else:
1187 1189 # Nothing changed. But still set "newrevphid" so the next revision
1188 1190 # could depend on this one and "newrevid" for the summary line.
1189 1191 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1190 1192 newrevid = revid
1191 1193 action = b'skipped'
1192 1194
1193 1195 actiondesc = ui.label(
1194 1196 {
1195 1197 b'created': _(b'created'),
1196 1198 b'skipped': _(b'skipped'),
1197 1199 b'updated': _(b'updated'),
1198 1200 }[action],
1199 1201 b'phabricator.action.%s' % action,
1200 1202 )
1201 1203 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1202 1204 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1203 1205 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1204 1206 ui.write(
1205 1207 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1206 1208 )
1207 1209 drevids.append(newrevid)
1208 1210 lastrevphid = newrevphid
1209 1211
1210 1212 # Update commit messages and remove tags
1211 1213 if opts.get(b'amend'):
1212 1214 unfi = repo.unfiltered()
1213 1215 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1214 1216 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1215 1217 wnode = unfi[b'.'].node()
1216 1218 mapping = {} # {oldnode: [newnode]}
1217 1219 for i, rev in enumerate(revs):
1218 1220 old = unfi[rev]
1219 1221 drevid = drevids[i]
1220 1222 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1221 1223 newdesc = getdescfromdrev(drev)
1222 1224 # Make sure commit message contain "Differential Revision"
1223 1225 if old.description() != newdesc:
1224 1226 if old.phase() == phases.public:
1225 1227 ui.warn(
1226 1228 _(b"warning: not updating public commit %s\n")
1227 1229 % scmutil.formatchangeid(old)
1228 1230 )
1229 1231 continue
1230 1232 parents = [
1231 1233 mapping.get(old.p1().node(), (old.p1(),))[0],
1232 1234 mapping.get(old.p2().node(), (old.p2(),))[0],
1233 1235 ]
1234 1236 new = context.metadataonlyctx(
1235 1237 repo,
1236 1238 old,
1237 1239 parents=parents,
1238 1240 text=newdesc,
1239 1241 user=old.user(),
1240 1242 date=old.date(),
1241 1243 extra=old.extra(),
1242 1244 )
1243 1245
1244 1246 newnode = new.commit()
1245 1247
1246 1248 mapping[old.node()] = [newnode]
1247 1249 # Update diff property
1248 1250 # If it fails just warn and keep going, otherwise the DREV
1249 1251 # associations will be lost
1250 1252 try:
1251 1253 writediffproperties(unfi[newnode], diffmap[old.node()])
1252 1254 except util.urlerr.urlerror:
1253 1255 ui.warnnoi18n(
1254 1256 b'Failed to update metadata for D%d\n' % drevid
1255 1257 )
1256 1258 # Remove local tags since it's no longer necessary
1257 1259 tagname = b'D%d' % drevid
1258 1260 if tagname in repo.tags():
1259 1261 tags.tag(
1260 1262 repo,
1261 1263 tagname,
1262 1264 nullid,
1263 1265 message=None,
1264 1266 user=None,
1265 1267 date=None,
1266 1268 local=True,
1267 1269 )
1268 1270 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1269 1271 if wnode in mapping:
1270 1272 unfi.setparents(mapping[wnode][0])
1271 1273
1272 1274
1273 1275 # Map from "hg:meta" keys to header understood by "hg import". The order is
1274 1276 # consistent with "hg export" output.
1275 1277 _metanamemap = util.sortdict(
1276 1278 [
1277 1279 (b'user', b'User'),
1278 1280 (b'date', b'Date'),
1279 1281 (b'branch', b'Branch'),
1280 1282 (b'node', b'Node ID'),
1281 1283 (b'parent', b'Parent '),
1282 1284 ]
1283 1285 )
1284 1286
1285 1287
1286 1288 def _confirmbeforesend(repo, revs, oldmap):
1287 1289 url, token = readurltoken(repo.ui)
1288 1290 ui = repo.ui
1289 1291 for rev in revs:
1290 1292 ctx = repo[rev]
1291 1293 desc = ctx.description().splitlines()[0]
1292 1294 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1293 1295 if drevid:
1294 1296 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1295 1297 else:
1296 1298 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1297 1299
1298 1300 ui.write(
1299 1301 _(b'%s - %s: %s\n')
1300 1302 % (
1301 1303 drevdesc,
1302 1304 ui.label(bytes(ctx), b'phabricator.node'),
1303 1305 ui.label(desc, b'phabricator.desc'),
1304 1306 )
1305 1307 )
1306 1308
1307 1309 if ui.promptchoice(
1308 1310 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1309 1311 ):
1310 1312 return False
1311 1313
1312 1314 return True
1313 1315
1314 1316
1315 1317 _knownstatusnames = {
1316 1318 b'accepted',
1317 1319 b'needsreview',
1318 1320 b'needsrevision',
1319 1321 b'closed',
1320 1322 b'abandoned',
1321 1323 b'changesplanned',
1322 1324 }
1323 1325
1324 1326
1325 1327 def _getstatusname(drev):
1326 1328 """get normalized status name from a Differential Revision"""
1327 1329 return drev[b'statusName'].replace(b' ', b'').lower()
1328 1330
1329 1331
1330 1332 # Small language to specify differential revisions. Support symbols: (), :X,
1331 1333 # +, and -.
1332 1334
1333 1335 _elements = {
1334 1336 # token-type: binding-strength, primary, prefix, infix, suffix
1335 1337 b'(': (12, None, (b'group', 1, b')'), None, None),
1336 1338 b':': (8, None, (b'ancestors', 8), None, None),
1337 1339 b'&': (5, None, None, (b'and_', 5), None),
1338 1340 b'+': (4, None, None, (b'add', 4), None),
1339 1341 b'-': (4, None, None, (b'sub', 4), None),
1340 1342 b')': (0, None, None, None, None),
1341 1343 b'symbol': (0, b'symbol', None, None, None),
1342 1344 b'end': (0, None, None, None, None),
1343 1345 }
1344 1346
1345 1347
1346 1348 def _tokenize(text):
1347 1349 view = memoryview(text) # zero-copy slice
1348 1350 special = b'():+-& '
1349 1351 pos = 0
1350 1352 length = len(text)
1351 1353 while pos < length:
1352 1354 symbol = b''.join(
1353 1355 itertools.takewhile(
1354 1356 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1355 1357 )
1356 1358 )
1357 1359 if symbol:
1358 1360 yield (b'symbol', symbol, pos)
1359 1361 pos += len(symbol)
1360 1362 else: # special char, ignore space
1361 1363 if text[pos : pos + 1] != b' ':
1362 1364 yield (text[pos : pos + 1], None, pos)
1363 1365 pos += 1
1364 1366 yield (b'end', None, pos)
1365 1367
1366 1368
1367 1369 def _parse(text):
1368 1370 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1369 1371 if pos != len(text):
1370 1372 raise error.ParseError(b'invalid token', pos)
1371 1373 return tree
1372 1374
1373 1375
1374 1376 def _parsedrev(symbol):
1375 1377 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1376 1378 if symbol.startswith(b'D') and symbol[1:].isdigit():
1377 1379 return int(symbol[1:])
1378 1380 if symbol.isdigit():
1379 1381 return int(symbol)
1380 1382
1381 1383
1382 1384 def _prefetchdrevs(tree):
1383 1385 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1384 1386 drevs = set()
1385 1387 ancestordrevs = set()
1386 1388 op = tree[0]
1387 1389 if op == b'symbol':
1388 1390 r = _parsedrev(tree[1])
1389 1391 if r:
1390 1392 drevs.add(r)
1391 1393 elif op == b'ancestors':
1392 1394 r, a = _prefetchdrevs(tree[1])
1393 1395 drevs.update(r)
1394 1396 ancestordrevs.update(r)
1395 1397 ancestordrevs.update(a)
1396 1398 else:
1397 1399 for t in tree[1:]:
1398 1400 r, a = _prefetchdrevs(t)
1399 1401 drevs.update(r)
1400 1402 ancestordrevs.update(a)
1401 1403 return drevs, ancestordrevs
1402 1404
1403 1405
1404 1406 def querydrev(ui, spec):
1405 1407 """return a list of "Differential Revision" dicts
1406 1408
1407 1409 spec is a string using a simple query language, see docstring in phabread
1408 1410 for details.
1409 1411
1410 1412 A "Differential Revision dict" looks like:
1411 1413
1412 1414 {
1413 1415 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1414 1416 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1415 1417 "auxiliary": {
1416 1418 "phabricator:depends-on": [
1417 1419 "PHID-DREV-gbapp366kutjebt7agcd"
1418 1420 ]
1419 1421 "phabricator:projects": [],
1420 1422 },
1421 1423 "branch": "default",
1422 1424 "ccs": [],
1423 1425 "commits": [],
1424 1426 "dateCreated": "1499181406",
1425 1427 "dateModified": "1499182103",
1426 1428 "diffs": [
1427 1429 "3",
1428 1430 "4",
1429 1431 ],
1430 1432 "hashes": [],
1431 1433 "id": "2",
1432 1434 "lineCount": "2",
1433 1435 "phid": "PHID-DREV-672qvysjcczopag46qty",
1434 1436 "properties": {},
1435 1437 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1436 1438 "reviewers": [],
1437 1439 "sourcePath": null
1438 1440 "status": "0",
1439 1441 "statusName": "Needs Review",
1440 1442 "summary": "",
1441 1443 "testPlan": "",
1442 1444 "title": "example",
1443 1445 "uri": "https://phab.example.com/D2",
1444 1446 }
1445 1447 """
1446 1448 # TODO: replace differential.query and differential.querydiffs with
1447 1449 # differential.diff.search because the former (and their output) are
1448 1450 # frozen, and planned to be deprecated and removed.
1449 1451
1450 1452 def fetch(params):
1451 1453 """params -> single drev or None"""
1452 1454 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1453 1455 if key in prefetched:
1454 1456 return prefetched[key]
1455 1457 drevs = callconduit(ui, b'differential.query', params)
1456 1458 # Fill prefetched with the result
1457 1459 for drev in drevs:
1458 1460 prefetched[drev[b'phid']] = drev
1459 1461 prefetched[int(drev[b'id'])] = drev
1460 1462 if key not in prefetched:
1461 1463 raise error.Abort(
1462 1464 _(b'cannot get Differential Revision %r') % params
1463 1465 )
1464 1466 return prefetched[key]
1465 1467
1466 1468 def getstack(topdrevids):
1467 1469 """given a top, get a stack from the bottom, [id] -> [id]"""
1468 1470 visited = set()
1469 1471 result = []
1470 1472 queue = [{b'ids': [i]} for i in topdrevids]
1471 1473 while queue:
1472 1474 params = queue.pop()
1473 1475 drev = fetch(params)
1474 1476 if drev[b'id'] in visited:
1475 1477 continue
1476 1478 visited.add(drev[b'id'])
1477 1479 result.append(int(drev[b'id']))
1478 1480 auxiliary = drev.get(b'auxiliary', {})
1479 1481 depends = auxiliary.get(b'phabricator:depends-on', [])
1480 1482 for phid in depends:
1481 1483 queue.append({b'phids': [phid]})
1482 1484 result.reverse()
1483 1485 return smartset.baseset(result)
1484 1486
1485 1487 # Initialize prefetch cache
1486 1488 prefetched = {} # {id or phid: drev}
1487 1489
1488 1490 tree = _parse(spec)
1489 1491 drevs, ancestordrevs = _prefetchdrevs(tree)
1490 1492
1491 1493 # developer config: phabricator.batchsize
1492 1494 batchsize = ui.configint(b'phabricator', b'batchsize')
1493 1495
1494 1496 # Prefetch Differential Revisions in batch
1495 1497 tofetch = set(drevs)
1496 1498 for r in ancestordrevs:
1497 1499 tofetch.update(range(max(1, r - batchsize), r + 1))
1498 1500 if drevs:
1499 1501 fetch({b'ids': list(tofetch)})
1500 1502 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1501 1503
1502 1504 # Walk through the tree, return smartsets
1503 1505 def walk(tree):
1504 1506 op = tree[0]
1505 1507 if op == b'symbol':
1506 1508 drev = _parsedrev(tree[1])
1507 1509 if drev:
1508 1510 return smartset.baseset([drev])
1509 1511 elif tree[1] in _knownstatusnames:
1510 1512 drevs = [
1511 1513 r
1512 1514 for r in validids
1513 1515 if _getstatusname(prefetched[r]) == tree[1]
1514 1516 ]
1515 1517 return smartset.baseset(drevs)
1516 1518 else:
1517 1519 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1518 1520 elif op in {b'and_', b'add', b'sub'}:
1519 1521 assert len(tree) == 3
1520 1522 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1521 1523 elif op == b'group':
1522 1524 return walk(tree[1])
1523 1525 elif op == b'ancestors':
1524 1526 return getstack(walk(tree[1]))
1525 1527 else:
1526 1528 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1527 1529
1528 1530 return [prefetched[r] for r in walk(tree)]
1529 1531
1530 1532
1531 1533 def getdescfromdrev(drev):
1532 1534 """get description (commit message) from "Differential Revision"
1533 1535
1534 1536 This is similar to differential.getcommitmessage API. But we only care
1535 1537 about limited fields: title, summary, test plan, and URL.
1536 1538 """
1537 1539 title = drev[b'title']
1538 1540 summary = drev[b'summary'].rstrip()
1539 1541 testplan = drev[b'testPlan'].rstrip()
1540 1542 if testplan:
1541 1543 testplan = b'Test Plan:\n%s' % testplan
1542 1544 uri = b'Differential Revision: %s' % drev[b'uri']
1543 1545 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1544 1546
1545 1547
1546 1548 def getdiffmeta(diff):
1547 1549 """get commit metadata (date, node, user, p1) from a diff object
1548 1550
1549 1551 The metadata could be "hg:meta", sent by phabsend, like:
1550 1552
1551 1553 "properties": {
1552 1554 "hg:meta": {
1553 1555 "branch": "default",
1554 1556 "date": "1499571514 25200",
1555 1557 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1556 1558 "user": "Foo Bar <foo@example.com>",
1557 1559 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1558 1560 }
1559 1561 }
1560 1562
1561 1563 Or converted from "local:commits", sent by "arc", like:
1562 1564
1563 1565 "properties": {
1564 1566 "local:commits": {
1565 1567 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1566 1568 "author": "Foo Bar",
1567 1569 "authorEmail": "foo@example.com"
1568 1570 "branch": "default",
1569 1571 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 1572 "local": "1000",
1571 1573 "message": "...",
1572 1574 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1573 1575 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1574 1576 "summary": "...",
1575 1577 "tag": "",
1576 1578 "time": 1499546314,
1577 1579 }
1578 1580 }
1579 1581 }
1580 1582
1581 1583 Note: metadata extracted from "local:commits" will lose time zone
1582 1584 information.
1583 1585 """
1584 1586 props = diff.get(b'properties') or {}
1585 1587 meta = props.get(b'hg:meta')
1586 1588 if not meta:
1587 1589 if props.get(b'local:commits'):
1588 1590 commit = sorted(props[b'local:commits'].values())[0]
1589 1591 meta = {}
1590 1592 if b'author' in commit and b'authorEmail' in commit:
1591 1593 meta[b'user'] = b'%s <%s>' % (
1592 1594 commit[b'author'],
1593 1595 commit[b'authorEmail'],
1594 1596 )
1595 1597 if b'time' in commit:
1596 1598 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1597 1599 if b'branch' in commit:
1598 1600 meta[b'branch'] = commit[b'branch']
1599 1601 node = commit.get(b'commit', commit.get(b'rev'))
1600 1602 if node:
1601 1603 meta[b'node'] = node
1602 1604 if len(commit.get(b'parents', ())) >= 1:
1603 1605 meta[b'parent'] = commit[b'parents'][0]
1604 1606 else:
1605 1607 meta = {}
1606 1608 if b'date' not in meta and b'dateCreated' in diff:
1607 1609 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1608 1610 if b'branch' not in meta and diff.get(b'branch'):
1609 1611 meta[b'branch'] = diff[b'branch']
1610 1612 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1611 1613 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1612 1614 return meta
1613 1615
1614 1616
1615 1617 def readpatch(ui, drevs, write):
1616 1618 """generate plain-text patch readable by 'hg import'
1617 1619
1618 1620 write takes a list of (DREV, bytes), where DREV is the differential number
1619 1621 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1620 1622 to be imported. drevs is what "querydrev" returns, results of
1621 1623 "differential.query".
1622 1624 """
1623 1625 # Prefetch hg:meta property for all diffs
1624 1626 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1625 1627 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1626 1628
1627 1629 patches = []
1628 1630
1629 1631 # Generate patch for each drev
1630 1632 for drev in drevs:
1631 1633 ui.note(_(b'reading D%s\n') % drev[b'id'])
1632 1634
1633 1635 diffid = max(int(v) for v in drev[b'diffs'])
1634 1636 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1635 1637 desc = getdescfromdrev(drev)
1636 1638 header = b'# HG changeset patch\n'
1637 1639
1638 1640 # Try to preserve metadata from hg:meta property. Write hg patch
1639 1641 # headers that can be read by the "import" command. See patchheadermap
1640 1642 # and extract in mercurial/patch.py for supported headers.
1641 1643 meta = getdiffmeta(diffs[b'%d' % diffid])
1642 1644 for k in _metanamemap.keys():
1643 1645 if k in meta:
1644 1646 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1645 1647
1646 1648 content = b'%s%s\n%s' % (header, desc, body)
1647 1649 patches.append((drev[b'id'], content))
1648 1650
1649 1651 # Write patches to the supplied callback
1650 1652 write(patches)
1651 1653
1652 1654
1653 1655 @vcrcommand(
1654 1656 b'phabread',
1655 1657 [(b'', b'stack', False, _(b'read dependencies'))],
1656 1658 _(b'DREVSPEC [OPTIONS]'),
1657 1659 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1658 1660 optionalrepo=True,
1659 1661 )
1660 1662 def phabread(ui, repo, spec, **opts):
1661 1663 """print patches from Phabricator suitable for importing
1662 1664
1663 1665 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1664 1666 the number ``123``. It could also have common operators like ``+``, ``-``,
1665 1667 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1666 1668 select a stack.
1667 1669
1668 1670 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1669 1671 could be used to filter patches by status. For performance reason, they
1670 1672 only represent a subset of non-status selections and cannot be used alone.
1671 1673
1672 1674 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1673 1675 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1674 1676 stack up to D9.
1675 1677
1676 1678 If --stack is given, follow dependencies information and read all patches.
1677 1679 It is equivalent to the ``:`` operator.
1678 1680 """
1679 1681 opts = pycompat.byteskwargs(opts)
1680 1682 if opts.get(b'stack'):
1681 1683 spec = b':(%s)' % spec
1682 1684 drevs = querydrev(ui, spec)
1683 1685
1684 1686 def _write(patches):
1685 1687 for drev, content in patches:
1686 1688 ui.write(content)
1687 1689
1688 1690 readpatch(ui, drevs, _write)
1689 1691
1690 1692
1691 1693 @vcrcommand(
1692 1694 b'phabupdate',
1693 1695 [
1694 1696 (b'', b'accept', False, _(b'accept revisions')),
1695 1697 (b'', b'reject', False, _(b'reject revisions')),
1696 1698 (b'', b'abandon', False, _(b'abandon revisions')),
1697 1699 (b'', b'reclaim', False, _(b'reclaim revisions')),
1698 1700 (b'm', b'comment', b'', _(b'comment on the last revision')),
1699 1701 ],
1700 1702 _(b'DREVSPEC [OPTIONS]'),
1701 1703 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1702 1704 optionalrepo=True,
1703 1705 )
1704 1706 def phabupdate(ui, repo, spec, **opts):
1705 1707 """update Differential Revision in batch
1706 1708
1707 1709 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1708 1710 """
1709 1711 opts = pycompat.byteskwargs(opts)
1710 1712 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1711 1713 if len(flags) > 1:
1712 1714 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1713 1715
1714 1716 actions = []
1715 1717 for f in flags:
1716 1718 actions.append({b'type': f, b'value': True})
1717 1719
1718 1720 drevs = querydrev(ui, spec)
1719 1721 for i, drev in enumerate(drevs):
1720 1722 if i + 1 == len(drevs) and opts.get(b'comment'):
1721 1723 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1722 1724 if actions:
1723 1725 params = {
1724 1726 b'objectIdentifier': drev[b'phid'],
1725 1727 b'transactions': actions,
1726 1728 }
1727 1729 callconduit(ui, b'differential.revision.edit', params)
1728 1730
1729 1731
1730 1732 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1731 1733 def template_review(context, mapping):
1732 1734 """:phabreview: Object describing the review for this changeset.
1733 1735 Has attributes `url` and `id`.
1734 1736 """
1735 1737 ctx = context.resource(mapping, b'ctx')
1736 1738 m = _differentialrevisiondescre.search(ctx.description())
1737 1739 if m:
1738 1740 return templateutil.hybriddict(
1739 1741 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1740 1742 )
1741 1743 else:
1742 1744 tags = ctx.repo().nodetags(ctx.node())
1743 1745 for t in tags:
1744 1746 if _differentialrevisiontagre.match(t):
1745 1747 url = ctx.repo().ui.config(b'phabricator', b'url')
1746 1748 if not url.endswith(b'/'):
1747 1749 url += b'/'
1748 1750 url += t
1749 1751
1750 1752 return templateutil.hybriddict({b'url': url, b'id': t,})
1751 1753 return None
1752 1754
1753 1755
1754 1756 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1755 1757 def template_status(context, mapping):
1756 1758 """:phabstatus: String. Status of Phabricator differential.
1757 1759 """
1758 1760 ctx = context.resource(mapping, b'ctx')
1759 1761 repo = context.resource(mapping, b'repo')
1760 1762 ui = context.resource(mapping, b'ui')
1761 1763
1762 1764 rev = ctx.rev()
1763 1765 try:
1764 1766 drevid = getdrevmap(repo, [rev])[rev]
1765 1767 except KeyError:
1766 1768 return None
1767 1769 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1768 1770 for drev in drevs:
1769 1771 if int(drev[b'id']) == drevid:
1770 1772 return templateutil.hybriddict(
1771 1773 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1772 1774 )
1773 1775 return None
1774 1776
1775 1777
1776 1778 @show.showview(b'phabstatus', csettopic=b'work')
1777 1779 def phabstatusshowview(ui, repo, displayer):
1778 1780 """Phabricator differiential status"""
1779 1781 revs = repo.revs('sort(_underway(), topo)')
1780 1782 drevmap = getdrevmap(repo, revs)
1781 1783 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1782 1784 for rev, drevid in pycompat.iteritems(drevmap):
1783 1785 if drevid is not None:
1784 1786 drevids.add(drevid)
1785 1787 revsbydrevid.setdefault(drevid, set([])).add(rev)
1786 1788 else:
1787 1789 unknownrevs.append(rev)
1788 1790
1789 1791 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1790 1792 drevsbyrev = {}
1791 1793 for drev in drevs:
1792 1794 for rev in revsbydrevid[int(drev[b'id'])]:
1793 1795 drevsbyrev[rev] = drev
1794 1796
1795 1797 def phabstatus(ctx):
1796 1798 drev = drevsbyrev[ctx.rev()]
1797 1799 status = ui.label(
1798 1800 b'%(statusName)s' % drev,
1799 1801 b'phabricator.status.%s' % _getstatusname(drev),
1800 1802 )
1801 1803 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1802 1804
1803 1805 revs -= smartset.baseset(unknownrevs)
1804 1806 revdag = graphmod.dagwalker(repo, revs)
1805 1807
1806 1808 ui.setconfig(b'experimental', b'graphshorten', True)
1807 1809 displayer._exthook = phabstatus
1808 1810 nodelen = show.longestshortest(repo, revs)
1809 1811 logcmdutil.displaygraph(
1810 1812 ui,
1811 1813 repo,
1812 1814 revdag,
1813 1815 displayer,
1814 1816 graphmod.asciiedges,
1815 1817 props={b'nodelen': nodelen},
1816 1818 )
General Comments 0
You need to be logged in to leave comments. Login now