##// END OF EJS Templates
phabricator: don't infer the old `fctx` in `notutf8()`...
Matt Harbison -
r44913:66a05dbb default
parent child Browse files
Show More
@@ -1,1818 +1,1819 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 750 def addoldbinary(pchange, oldfctx, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753 753
754 754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 755 version of the file, or None if the file is being removed.
756 756 """
757 757 if not fctx or fctx.cmp(oldfctx):
758 758 # Files differ, add the old one
759 759 pchange.metadata[b'old:file:size'] = oldfctx.size()
760 760 mimeguess, _enc = mimetypes.guess_type(
761 761 encoding.unifromlocal(oldfctx.path())
762 762 )
763 763 if mimeguess:
764 764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
765 765 mimeguess
766 766 )
767 767 fphid = uploadfile(oldfctx)
768 768 pchange.metadata[b'old:binary-phid'] = fphid
769 769 else:
770 770 # If it's left as IMAGE/BINARY web UI might try to display it
771 771 pchange.fileType = DiffFileType.TEXT
772 772 pchange.copynewmetadatatoold()
773 773
774 774
775 775 def makebinary(pchange, fctx):
776 776 """populate the phabchange for a binary file"""
777 777 pchange.fileType = DiffFileType.BINARY
778 778 fphid = uploadfile(fctx)
779 779 pchange.metadata[b'new:binary-phid'] = fphid
780 780 pchange.metadata[b'new:file:size'] = fctx.size()
781 781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
782 782 if mimeguess:
783 783 mimeguess = pycompat.bytestr(mimeguess)
784 784 pchange.metadata[b'new:file:mime-type'] = mimeguess
785 785 if mimeguess.startswith(b'image/'):
786 786 pchange.fileType = DiffFileType.IMAGE
787 787
788 788
789 789 # Copied from mercurial/patch.py
790 790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
791 791
792 792
793 793 def notutf8(fctx):
794 794 """detect non-UTF-8 text files since Phabricator requires them to be marked
795 795 as binary
796 796 """
797 797 try:
798 798 fctx.data().decode('utf-8')
799 if fctx.parents():
800 fctx.p1().data().decode('utf-8')
801 799 return False
802 800 except UnicodeDecodeError:
803 801 fctx.repo().ui.write(
804 802 _(b'file %s detected as non-UTF-8, marked as binary\n')
805 803 % fctx.path()
806 804 )
807 805 return True
808 806
809 807
810 808 def addremoved(pdiff, ctx, removed):
811 809 """add removed files to the phabdiff. Shouldn't include moves"""
812 810 for fname in removed:
813 811 pchange = phabchange(
814 812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
815 813 )
816 814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
817 815 oldfctx = ctx.p1()[fname]
818 816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
819 817 maketext(pchange, ctx, fname)
820 818
821 819 pdiff.addchange(pchange)
822 820
823 821
824 822 def addmodified(pdiff, ctx, modified):
825 823 """add modified files to the phabdiff"""
826 824 for fname in modified:
827 825 fctx = ctx[fname]
826 oldfctx = fctx.p1()
828 827 pchange = phabchange(currentPath=fname, oldPath=fname)
829 828 filemode = gitmode[ctx[fname].flags()]
830 829 originalmode = gitmode[ctx.p1()[fname].flags()]
831 830 if filemode != originalmode:
832 831 pchange.addoldmode(originalmode)
833 832 pchange.addnewmode(filemode)
834 833
835 if fctx.isbinary() or notutf8(fctx):
834 if fctx.isbinary() or notutf8(fctx) or notutf8(oldfctx):
836 835 makebinary(pchange, fctx)
837 836 addoldbinary(pchange, fctx.p1(), fctx)
838 837 else:
839 838 maketext(pchange, ctx, fname)
840 839
841 840 pdiff.addchange(pchange)
842 841
843 842
844 843 def addadded(pdiff, ctx, added, removed):
845 844 """add file adds to the phabdiff, both new files and copies/moves"""
846 845 # Keep track of files that've been recorded as moved/copied, so if there are
847 846 # additional copies we can mark them (moves get removed from removed)
848 847 copiedchanges = {}
849 848 movedchanges = {}
850 849 for fname in added:
851 850 fctx = ctx[fname]
851 oldfctx = None
852 852 pchange = phabchange(currentPath=fname)
853 853
854 854 filemode = gitmode[ctx[fname].flags()]
855 855 renamed = fctx.renamed()
856 856
857 857 if renamed:
858 858 originalfname = renamed[0]
859 originalmode = gitmode[ctx.p1()[originalfname].flags()]
859 oldfctx = ctx.p1()[originalfname]
860 originalmode = gitmode[oldfctx.flags()]
860 861 pchange.oldPath = originalfname
861 862
862 863 if originalfname in removed:
863 864 origpchange = phabchange(
864 865 currentPath=originalfname,
865 866 oldPath=originalfname,
866 867 type=DiffChangeType.MOVE_AWAY,
867 868 awayPaths=[fname],
868 869 )
869 870 movedchanges[originalfname] = origpchange
870 871 removed.remove(originalfname)
871 872 pchange.type = DiffChangeType.MOVE_HERE
872 873 elif originalfname in movedchanges:
873 874 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
874 875 movedchanges[originalfname].awayPaths.append(fname)
875 876 pchange.type = DiffChangeType.COPY_HERE
876 877 else: # pure copy
877 878 if originalfname not in copiedchanges:
878 879 origpchange = phabchange(
879 880 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
880 881 )
881 882 copiedchanges[originalfname] = origpchange
882 883 else:
883 884 origpchange = copiedchanges[originalfname]
884 885 origpchange.awayPaths.append(fname)
885 886 pchange.type = DiffChangeType.COPY_HERE
886 887
887 888 if filemode != originalmode:
888 889 pchange.addoldmode(originalmode)
889 890 pchange.addnewmode(filemode)
890 891 else: # Brand-new file
891 892 pchange.addnewmode(gitmode[fctx.flags()])
892 893 pchange.type = DiffChangeType.ADD
893 894
894 if fctx.isbinary() or notutf8(fctx):
895 if fctx.isbinary() or notutf8(fctx) or (oldfctx and notutf8(oldfctx)):
895 896 makebinary(pchange, fctx)
896 897 if renamed:
897 addoldbinary(pchange, fctx.p1(), fctx)
898 addoldbinary(pchange, oldfctx, fctx)
898 899 else:
899 900 maketext(pchange, ctx, fname)
900 901
901 902 pdiff.addchange(pchange)
902 903
903 904 for _path, copiedchange in copiedchanges.items():
904 905 pdiff.addchange(copiedchange)
905 906 for _path, movedchange in movedchanges.items():
906 907 pdiff.addchange(movedchange)
907 908
908 909
909 910 def creatediff(ctx):
910 911 """create a Differential Diff"""
911 912 repo = ctx.repo()
912 913 repophid = getrepophid(repo)
913 914 # Create a "Differential Diff" via "differential.creatediff" API
914 915 pdiff = phabdiff(
915 916 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
916 917 branch=b'%s' % ctx.branch(),
917 918 )
918 919 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
919 920 # addadded will remove moved files from removed, so addremoved won't get
920 921 # them
921 922 addadded(pdiff, ctx, added, removed)
922 923 addmodified(pdiff, ctx, modified)
923 924 addremoved(pdiff, ctx, removed)
924 925 if repophid:
925 926 pdiff.repositoryPHID = repophid
926 927 diff = callconduit(
927 928 repo.ui,
928 929 b'differential.creatediff',
929 930 pycompat.byteskwargs(attr.asdict(pdiff)),
930 931 )
931 932 if not diff:
932 933 raise error.Abort(_(b'cannot create diff for %s') % ctx)
933 934 return diff
934 935
935 936
936 937 def writediffproperties(ctx, diff):
937 938 """write metadata to diff so patches could be applied losslessly"""
938 939 # creatediff returns with a diffid but query returns with an id
939 940 diffid = diff.get(b'diffid', diff.get(b'id'))
940 941 params = {
941 942 b'diff_id': diffid,
942 943 b'name': b'hg:meta',
943 944 b'data': templatefilters.json(
944 945 {
945 946 b'user': ctx.user(),
946 947 b'date': b'%d %d' % ctx.date(),
947 948 b'branch': ctx.branch(),
948 949 b'node': ctx.hex(),
949 950 b'parent': ctx.p1().hex(),
950 951 }
951 952 ),
952 953 }
953 954 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
954 955
955 956 params = {
956 957 b'diff_id': diffid,
957 958 b'name': b'local:commits',
958 959 b'data': templatefilters.json(
959 960 {
960 961 ctx.hex(): {
961 962 b'author': stringutil.person(ctx.user()),
962 963 b'authorEmail': stringutil.email(ctx.user()),
963 964 b'time': int(ctx.date()[0]),
964 965 b'commit': ctx.hex(),
965 966 b'parents': [ctx.p1().hex()],
966 967 b'branch': ctx.branch(),
967 968 },
968 969 }
969 970 ),
970 971 }
971 972 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972 973
973 974
974 975 def createdifferentialrevision(
975 976 ctx,
976 977 revid=None,
977 978 parentrevphid=None,
978 979 oldnode=None,
979 980 olddiff=None,
980 981 actions=None,
981 982 comment=None,
982 983 ):
983 984 """create or update a Differential Revision
984 985
985 986 If revid is None, create a new Differential Revision, otherwise update
986 987 revid. If parentrevphid is not None, set it as a dependency.
987 988
988 989 If oldnode is not None, check if the patch content (without commit message
989 990 and metadata) has changed before creating another diff.
990 991
991 992 If actions is not None, they will be appended to the transaction.
992 993 """
993 994 repo = ctx.repo()
994 995 if oldnode:
995 996 diffopts = mdiff.diffopts(git=True, context=32767)
996 997 oldctx = repo.unfiltered()[oldnode]
997 998 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
998 999 else:
999 1000 neednewdiff = True
1000 1001
1001 1002 transactions = []
1002 1003 if neednewdiff:
1003 1004 diff = creatediff(ctx)
1004 1005 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1005 1006 if comment:
1006 1007 transactions.append({b'type': b'comment', b'value': comment})
1007 1008 else:
1008 1009 # Even if we don't need to upload a new diff because the patch content
1009 1010 # does not change. We might still need to update its metadata so
1010 1011 # pushers could know the correct node metadata.
1011 1012 assert olddiff
1012 1013 diff = olddiff
1013 1014 writediffproperties(ctx, diff)
1014 1015
1015 1016 # Set the parent Revision every time, so commit re-ordering is picked-up
1016 1017 if parentrevphid:
1017 1018 transactions.append(
1018 1019 {b'type': b'parents.set', b'value': [parentrevphid]}
1019 1020 )
1020 1021
1021 1022 if actions:
1022 1023 transactions += actions
1023 1024
1024 1025 # Parse commit message and update related fields.
1025 1026 desc = ctx.description()
1026 1027 info = callconduit(
1027 1028 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1028 1029 )
1029 1030 for k, v in info[b'fields'].items():
1030 1031 if k in [b'title', b'summary', b'testPlan']:
1031 1032 transactions.append({b'type': k, b'value': v})
1032 1033
1033 1034 params = {b'transactions': transactions}
1034 1035 if revid is not None:
1035 1036 # Update an existing Differential Revision
1036 1037 params[b'objectIdentifier'] = revid
1037 1038
1038 1039 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1039 1040 if not revision:
1040 1041 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1041 1042
1042 1043 return revision, diff
1043 1044
1044 1045
1045 1046 def userphids(ui, names):
1046 1047 """convert user names to PHIDs"""
1047 1048 names = [name.lower() for name in names]
1048 1049 query = {b'constraints': {b'usernames': names}}
1049 1050 result = callconduit(ui, b'user.search', query)
1050 1051 # username not found is not an error of the API. So check if we have missed
1051 1052 # some names here.
1052 1053 data = result[b'data']
1053 1054 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1054 1055 unresolved = set(names) - resolved
1055 1056 if unresolved:
1056 1057 raise error.Abort(
1057 1058 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1058 1059 )
1059 1060 return [entry[b'phid'] for entry in data]
1060 1061
1061 1062
1062 1063 @vcrcommand(
1063 1064 b'phabsend',
1064 1065 [
1065 1066 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1066 1067 (b'', b'amend', True, _(b'update commit messages')),
1067 1068 (b'', b'reviewer', [], _(b'specify reviewers')),
1068 1069 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1069 1070 (
1070 1071 b'm',
1071 1072 b'comment',
1072 1073 b'',
1073 1074 _(b'add a comment to Revisions with new/updated Diffs'),
1074 1075 ),
1075 1076 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1076 1077 ],
1077 1078 _(b'REV [OPTIONS]'),
1078 1079 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1079 1080 )
1080 1081 def phabsend(ui, repo, *revs, **opts):
1081 1082 """upload changesets to Phabricator
1082 1083
1083 1084 If there are multiple revisions specified, they will be send as a stack
1084 1085 with a linear dependencies relationship using the order specified by the
1085 1086 revset.
1086 1087
1087 1088 For the first time uploading changesets, local tags will be created to
1088 1089 maintain the association. After the first time, phabsend will check
1089 1090 obsstore and tags information so it can figure out whether to update an
1090 1091 existing Differential Revision, or create a new one.
1091 1092
1092 1093 If --amend is set, update commit messages so they have the
1093 1094 ``Differential Revision`` URL, remove related tags. This is similar to what
1094 1095 arcanist will do, and is more desired in author-push workflows. Otherwise,
1095 1096 use local tags to record the ``Differential Revision`` association.
1096 1097
1097 1098 The --confirm option lets you confirm changesets before sending them. You
1098 1099 can also add following to your configuration file to make it default
1099 1100 behaviour::
1100 1101
1101 1102 [phabsend]
1102 1103 confirm = true
1103 1104
1104 1105 phabsend will check obsstore and the above association to decide whether to
1105 1106 update an existing Differential Revision, or create a new one.
1106 1107 """
1107 1108 opts = pycompat.byteskwargs(opts)
1108 1109 revs = list(revs) + opts.get(b'rev', [])
1109 1110 revs = scmutil.revrange(repo, revs)
1110 1111 revs.sort() # ascending order to preserve topological parent/child in phab
1111 1112
1112 1113 if not revs:
1113 1114 raise error.Abort(_(b'phabsend requires at least one changeset'))
1114 1115 if opts.get(b'amend'):
1115 1116 cmdutil.checkunfinished(repo)
1116 1117
1117 1118 # {newnode: (oldnode, olddiff, olddrev}
1118 1119 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1119 1120
1120 1121 confirm = ui.configbool(b'phabsend', b'confirm')
1121 1122 confirm |= bool(opts.get(b'confirm'))
1122 1123 if confirm:
1123 1124 confirmed = _confirmbeforesend(repo, revs, oldmap)
1124 1125 if not confirmed:
1125 1126 raise error.Abort(_(b'phabsend cancelled'))
1126 1127
1127 1128 actions = []
1128 1129 reviewers = opts.get(b'reviewer', [])
1129 1130 blockers = opts.get(b'blocker', [])
1130 1131 phids = []
1131 1132 if reviewers:
1132 1133 phids.extend(userphids(repo.ui, reviewers))
1133 1134 if blockers:
1134 1135 phids.extend(
1135 1136 map(
1136 1137 lambda phid: b'blocking(%s)' % phid,
1137 1138 userphids(repo.ui, blockers),
1138 1139 )
1139 1140 )
1140 1141 if phids:
1141 1142 actions.append({b'type': b'reviewers.add', b'value': phids})
1142 1143
1143 1144 drevids = [] # [int]
1144 1145 diffmap = {} # {newnode: diff}
1145 1146
1146 1147 # Send patches one by one so we know their Differential Revision PHIDs and
1147 1148 # can provide dependency relationship
1148 1149 lastrevphid = None
1149 1150 for rev in revs:
1150 1151 ui.debug(b'sending rev %d\n' % rev)
1151 1152 ctx = repo[rev]
1152 1153
1153 1154 # Get Differential Revision ID
1154 1155 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1155 1156 if oldnode != ctx.node() or opts.get(b'amend'):
1156 1157 # Create or update Differential Revision
1157 1158 revision, diff = createdifferentialrevision(
1158 1159 ctx,
1159 1160 revid,
1160 1161 lastrevphid,
1161 1162 oldnode,
1162 1163 olddiff,
1163 1164 actions,
1164 1165 opts.get(b'comment'),
1165 1166 )
1166 1167 diffmap[ctx.node()] = diff
1167 1168 newrevid = int(revision[b'object'][b'id'])
1168 1169 newrevphid = revision[b'object'][b'phid']
1169 1170 if revid:
1170 1171 action = b'updated'
1171 1172 else:
1172 1173 action = b'created'
1173 1174
1174 1175 # Create a local tag to note the association, if commit message
1175 1176 # does not have it already
1176 1177 m = _differentialrevisiondescre.search(ctx.description())
1177 1178 if not m or int(m.group('id')) != newrevid:
1178 1179 tagname = b'D%d' % newrevid
1179 1180 tags.tag(
1180 1181 repo,
1181 1182 tagname,
1182 1183 ctx.node(),
1183 1184 message=None,
1184 1185 user=None,
1185 1186 date=None,
1186 1187 local=True,
1187 1188 )
1188 1189 else:
1189 1190 # Nothing changed. But still set "newrevphid" so the next revision
1190 1191 # could depend on this one and "newrevid" for the summary line.
1191 1192 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1192 1193 newrevid = revid
1193 1194 action = b'skipped'
1194 1195
1195 1196 actiondesc = ui.label(
1196 1197 {
1197 1198 b'created': _(b'created'),
1198 1199 b'skipped': _(b'skipped'),
1199 1200 b'updated': _(b'updated'),
1200 1201 }[action],
1201 1202 b'phabricator.action.%s' % action,
1202 1203 )
1203 1204 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1204 1205 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1205 1206 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1206 1207 ui.write(
1207 1208 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1208 1209 )
1209 1210 drevids.append(newrevid)
1210 1211 lastrevphid = newrevphid
1211 1212
1212 1213 # Update commit messages and remove tags
1213 1214 if opts.get(b'amend'):
1214 1215 unfi = repo.unfiltered()
1215 1216 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1216 1217 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1217 1218 wnode = unfi[b'.'].node()
1218 1219 mapping = {} # {oldnode: [newnode]}
1219 1220 for i, rev in enumerate(revs):
1220 1221 old = unfi[rev]
1221 1222 drevid = drevids[i]
1222 1223 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1223 1224 newdesc = getdescfromdrev(drev)
1224 1225 # Make sure commit message contain "Differential Revision"
1225 1226 if old.description() != newdesc:
1226 1227 if old.phase() == phases.public:
1227 1228 ui.warn(
1228 1229 _(b"warning: not updating public commit %s\n")
1229 1230 % scmutil.formatchangeid(old)
1230 1231 )
1231 1232 continue
1232 1233 parents = [
1233 1234 mapping.get(old.p1().node(), (old.p1(),))[0],
1234 1235 mapping.get(old.p2().node(), (old.p2(),))[0],
1235 1236 ]
1236 1237 new = context.metadataonlyctx(
1237 1238 repo,
1238 1239 old,
1239 1240 parents=parents,
1240 1241 text=newdesc,
1241 1242 user=old.user(),
1242 1243 date=old.date(),
1243 1244 extra=old.extra(),
1244 1245 )
1245 1246
1246 1247 newnode = new.commit()
1247 1248
1248 1249 mapping[old.node()] = [newnode]
1249 1250 # Update diff property
1250 1251 # If it fails just warn and keep going, otherwise the DREV
1251 1252 # associations will be lost
1252 1253 try:
1253 1254 writediffproperties(unfi[newnode], diffmap[old.node()])
1254 1255 except util.urlerr.urlerror:
1255 1256 ui.warnnoi18n(
1256 1257 b'Failed to update metadata for D%d\n' % drevid
1257 1258 )
1258 1259 # Remove local tags since it's no longer necessary
1259 1260 tagname = b'D%d' % drevid
1260 1261 if tagname in repo.tags():
1261 1262 tags.tag(
1262 1263 repo,
1263 1264 tagname,
1264 1265 nullid,
1265 1266 message=None,
1266 1267 user=None,
1267 1268 date=None,
1268 1269 local=True,
1269 1270 )
1270 1271 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1271 1272 if wnode in mapping:
1272 1273 unfi.setparents(mapping[wnode][0])
1273 1274
1274 1275
1275 1276 # Map from "hg:meta" keys to header understood by "hg import". The order is
1276 1277 # consistent with "hg export" output.
1277 1278 _metanamemap = util.sortdict(
1278 1279 [
1279 1280 (b'user', b'User'),
1280 1281 (b'date', b'Date'),
1281 1282 (b'branch', b'Branch'),
1282 1283 (b'node', b'Node ID'),
1283 1284 (b'parent', b'Parent '),
1284 1285 ]
1285 1286 )
1286 1287
1287 1288
1288 1289 def _confirmbeforesend(repo, revs, oldmap):
1289 1290 url, token = readurltoken(repo.ui)
1290 1291 ui = repo.ui
1291 1292 for rev in revs:
1292 1293 ctx = repo[rev]
1293 1294 desc = ctx.description().splitlines()[0]
1294 1295 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1295 1296 if drevid:
1296 1297 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1297 1298 else:
1298 1299 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1299 1300
1300 1301 ui.write(
1301 1302 _(b'%s - %s: %s\n')
1302 1303 % (
1303 1304 drevdesc,
1304 1305 ui.label(bytes(ctx), b'phabricator.node'),
1305 1306 ui.label(desc, b'phabricator.desc'),
1306 1307 )
1307 1308 )
1308 1309
1309 1310 if ui.promptchoice(
1310 1311 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1311 1312 ):
1312 1313 return False
1313 1314
1314 1315 return True
1315 1316
1316 1317
1317 1318 _knownstatusnames = {
1318 1319 b'accepted',
1319 1320 b'needsreview',
1320 1321 b'needsrevision',
1321 1322 b'closed',
1322 1323 b'abandoned',
1323 1324 b'changesplanned',
1324 1325 }
1325 1326
1326 1327
1327 1328 def _getstatusname(drev):
1328 1329 """get normalized status name from a Differential Revision"""
1329 1330 return drev[b'statusName'].replace(b' ', b'').lower()
1330 1331
1331 1332
1332 1333 # Small language to specify differential revisions. Support symbols: (), :X,
1333 1334 # +, and -.
1334 1335
1335 1336 _elements = {
1336 1337 # token-type: binding-strength, primary, prefix, infix, suffix
1337 1338 b'(': (12, None, (b'group', 1, b')'), None, None),
1338 1339 b':': (8, None, (b'ancestors', 8), None, None),
1339 1340 b'&': (5, None, None, (b'and_', 5), None),
1340 1341 b'+': (4, None, None, (b'add', 4), None),
1341 1342 b'-': (4, None, None, (b'sub', 4), None),
1342 1343 b')': (0, None, None, None, None),
1343 1344 b'symbol': (0, b'symbol', None, None, None),
1344 1345 b'end': (0, None, None, None, None),
1345 1346 }
1346 1347
1347 1348
1348 1349 def _tokenize(text):
1349 1350 view = memoryview(text) # zero-copy slice
1350 1351 special = b'():+-& '
1351 1352 pos = 0
1352 1353 length = len(text)
1353 1354 while pos < length:
1354 1355 symbol = b''.join(
1355 1356 itertools.takewhile(
1356 1357 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1357 1358 )
1358 1359 )
1359 1360 if symbol:
1360 1361 yield (b'symbol', symbol, pos)
1361 1362 pos += len(symbol)
1362 1363 else: # special char, ignore space
1363 1364 if text[pos : pos + 1] != b' ':
1364 1365 yield (text[pos : pos + 1], None, pos)
1365 1366 pos += 1
1366 1367 yield (b'end', None, pos)
1367 1368
1368 1369
1369 1370 def _parse(text):
1370 1371 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1371 1372 if pos != len(text):
1372 1373 raise error.ParseError(b'invalid token', pos)
1373 1374 return tree
1374 1375
1375 1376
1376 1377 def _parsedrev(symbol):
1377 1378 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1378 1379 if symbol.startswith(b'D') and symbol[1:].isdigit():
1379 1380 return int(symbol[1:])
1380 1381 if symbol.isdigit():
1381 1382 return int(symbol)
1382 1383
1383 1384
1384 1385 def _prefetchdrevs(tree):
1385 1386 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1386 1387 drevs = set()
1387 1388 ancestordrevs = set()
1388 1389 op = tree[0]
1389 1390 if op == b'symbol':
1390 1391 r = _parsedrev(tree[1])
1391 1392 if r:
1392 1393 drevs.add(r)
1393 1394 elif op == b'ancestors':
1394 1395 r, a = _prefetchdrevs(tree[1])
1395 1396 drevs.update(r)
1396 1397 ancestordrevs.update(r)
1397 1398 ancestordrevs.update(a)
1398 1399 else:
1399 1400 for t in tree[1:]:
1400 1401 r, a = _prefetchdrevs(t)
1401 1402 drevs.update(r)
1402 1403 ancestordrevs.update(a)
1403 1404 return drevs, ancestordrevs
1404 1405
1405 1406
1406 1407 def querydrev(ui, spec):
1407 1408 """return a list of "Differential Revision" dicts
1408 1409
1409 1410 spec is a string using a simple query language, see docstring in phabread
1410 1411 for details.
1411 1412
1412 1413 A "Differential Revision dict" looks like:
1413 1414
1414 1415 {
1415 1416 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1416 1417 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1417 1418 "auxiliary": {
1418 1419 "phabricator:depends-on": [
1419 1420 "PHID-DREV-gbapp366kutjebt7agcd"
1420 1421 ]
1421 1422 "phabricator:projects": [],
1422 1423 },
1423 1424 "branch": "default",
1424 1425 "ccs": [],
1425 1426 "commits": [],
1426 1427 "dateCreated": "1499181406",
1427 1428 "dateModified": "1499182103",
1428 1429 "diffs": [
1429 1430 "3",
1430 1431 "4",
1431 1432 ],
1432 1433 "hashes": [],
1433 1434 "id": "2",
1434 1435 "lineCount": "2",
1435 1436 "phid": "PHID-DREV-672qvysjcczopag46qty",
1436 1437 "properties": {},
1437 1438 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1438 1439 "reviewers": [],
1439 1440 "sourcePath": null
1440 1441 "status": "0",
1441 1442 "statusName": "Needs Review",
1442 1443 "summary": "",
1443 1444 "testPlan": "",
1444 1445 "title": "example",
1445 1446 "uri": "https://phab.example.com/D2",
1446 1447 }
1447 1448 """
1448 1449 # TODO: replace differential.query and differential.querydiffs with
1449 1450 # differential.diff.search because the former (and their output) are
1450 1451 # frozen, and planned to be deprecated and removed.
1451 1452
1452 1453 def fetch(params):
1453 1454 """params -> single drev or None"""
1454 1455 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1455 1456 if key in prefetched:
1456 1457 return prefetched[key]
1457 1458 drevs = callconduit(ui, b'differential.query', params)
1458 1459 # Fill prefetched with the result
1459 1460 for drev in drevs:
1460 1461 prefetched[drev[b'phid']] = drev
1461 1462 prefetched[int(drev[b'id'])] = drev
1462 1463 if key not in prefetched:
1463 1464 raise error.Abort(
1464 1465 _(b'cannot get Differential Revision %r') % params
1465 1466 )
1466 1467 return prefetched[key]
1467 1468
1468 1469 def getstack(topdrevids):
1469 1470 """given a top, get a stack from the bottom, [id] -> [id]"""
1470 1471 visited = set()
1471 1472 result = []
1472 1473 queue = [{b'ids': [i]} for i in topdrevids]
1473 1474 while queue:
1474 1475 params = queue.pop()
1475 1476 drev = fetch(params)
1476 1477 if drev[b'id'] in visited:
1477 1478 continue
1478 1479 visited.add(drev[b'id'])
1479 1480 result.append(int(drev[b'id']))
1480 1481 auxiliary = drev.get(b'auxiliary', {})
1481 1482 depends = auxiliary.get(b'phabricator:depends-on', [])
1482 1483 for phid in depends:
1483 1484 queue.append({b'phids': [phid]})
1484 1485 result.reverse()
1485 1486 return smartset.baseset(result)
1486 1487
1487 1488 # Initialize prefetch cache
1488 1489 prefetched = {} # {id or phid: drev}
1489 1490
1490 1491 tree = _parse(spec)
1491 1492 drevs, ancestordrevs = _prefetchdrevs(tree)
1492 1493
1493 1494 # developer config: phabricator.batchsize
1494 1495 batchsize = ui.configint(b'phabricator', b'batchsize')
1495 1496
1496 1497 # Prefetch Differential Revisions in batch
1497 1498 tofetch = set(drevs)
1498 1499 for r in ancestordrevs:
1499 1500 tofetch.update(range(max(1, r - batchsize), r + 1))
1500 1501 if drevs:
1501 1502 fetch({b'ids': list(tofetch)})
1502 1503 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1503 1504
1504 1505 # Walk through the tree, return smartsets
1505 1506 def walk(tree):
1506 1507 op = tree[0]
1507 1508 if op == b'symbol':
1508 1509 drev = _parsedrev(tree[1])
1509 1510 if drev:
1510 1511 return smartset.baseset([drev])
1511 1512 elif tree[1] in _knownstatusnames:
1512 1513 drevs = [
1513 1514 r
1514 1515 for r in validids
1515 1516 if _getstatusname(prefetched[r]) == tree[1]
1516 1517 ]
1517 1518 return smartset.baseset(drevs)
1518 1519 else:
1519 1520 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1520 1521 elif op in {b'and_', b'add', b'sub'}:
1521 1522 assert len(tree) == 3
1522 1523 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1523 1524 elif op == b'group':
1524 1525 return walk(tree[1])
1525 1526 elif op == b'ancestors':
1526 1527 return getstack(walk(tree[1]))
1527 1528 else:
1528 1529 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1529 1530
1530 1531 return [prefetched[r] for r in walk(tree)]
1531 1532
1532 1533
1533 1534 def getdescfromdrev(drev):
1534 1535 """get description (commit message) from "Differential Revision"
1535 1536
1536 1537 This is similar to differential.getcommitmessage API. But we only care
1537 1538 about limited fields: title, summary, test plan, and URL.
1538 1539 """
1539 1540 title = drev[b'title']
1540 1541 summary = drev[b'summary'].rstrip()
1541 1542 testplan = drev[b'testPlan'].rstrip()
1542 1543 if testplan:
1543 1544 testplan = b'Test Plan:\n%s' % testplan
1544 1545 uri = b'Differential Revision: %s' % drev[b'uri']
1545 1546 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1546 1547
1547 1548
1548 1549 def getdiffmeta(diff):
1549 1550 """get commit metadata (date, node, user, p1) from a diff object
1550 1551
1551 1552 The metadata could be "hg:meta", sent by phabsend, like:
1552 1553
1553 1554 "properties": {
1554 1555 "hg:meta": {
1555 1556 "branch": "default",
1556 1557 "date": "1499571514 25200",
1557 1558 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1558 1559 "user": "Foo Bar <foo@example.com>",
1559 1560 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1560 1561 }
1561 1562 }
1562 1563
1563 1564 Or converted from "local:commits", sent by "arc", like:
1564 1565
1565 1566 "properties": {
1566 1567 "local:commits": {
1567 1568 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1568 1569 "author": "Foo Bar",
1569 1570 "authorEmail": "foo@example.com"
1570 1571 "branch": "default",
1571 1572 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1572 1573 "local": "1000",
1573 1574 "message": "...",
1574 1575 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1575 1576 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 1577 "summary": "...",
1577 1578 "tag": "",
1578 1579 "time": 1499546314,
1579 1580 }
1580 1581 }
1581 1582 }
1582 1583
1583 1584 Note: metadata extracted from "local:commits" will lose time zone
1584 1585 information.
1585 1586 """
1586 1587 props = diff.get(b'properties') or {}
1587 1588 meta = props.get(b'hg:meta')
1588 1589 if not meta:
1589 1590 if props.get(b'local:commits'):
1590 1591 commit = sorted(props[b'local:commits'].values())[0]
1591 1592 meta = {}
1592 1593 if b'author' in commit and b'authorEmail' in commit:
1593 1594 meta[b'user'] = b'%s <%s>' % (
1594 1595 commit[b'author'],
1595 1596 commit[b'authorEmail'],
1596 1597 )
1597 1598 if b'time' in commit:
1598 1599 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1599 1600 if b'branch' in commit:
1600 1601 meta[b'branch'] = commit[b'branch']
1601 1602 node = commit.get(b'commit', commit.get(b'rev'))
1602 1603 if node:
1603 1604 meta[b'node'] = node
1604 1605 if len(commit.get(b'parents', ())) >= 1:
1605 1606 meta[b'parent'] = commit[b'parents'][0]
1606 1607 else:
1607 1608 meta = {}
1608 1609 if b'date' not in meta and b'dateCreated' in diff:
1609 1610 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1610 1611 if b'branch' not in meta and diff.get(b'branch'):
1611 1612 meta[b'branch'] = diff[b'branch']
1612 1613 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1613 1614 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1614 1615 return meta
1615 1616
1616 1617
1617 1618 def readpatch(ui, drevs, write):
1618 1619 """generate plain-text patch readable by 'hg import'
1619 1620
1620 1621 write takes a list of (DREV, bytes), where DREV is the differential number
1621 1622 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1622 1623 to be imported. drevs is what "querydrev" returns, results of
1623 1624 "differential.query".
1624 1625 """
1625 1626 # Prefetch hg:meta property for all diffs
1626 1627 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1627 1628 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1628 1629
1629 1630 patches = []
1630 1631
1631 1632 # Generate patch for each drev
1632 1633 for drev in drevs:
1633 1634 ui.note(_(b'reading D%s\n') % drev[b'id'])
1634 1635
1635 1636 diffid = max(int(v) for v in drev[b'diffs'])
1636 1637 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1637 1638 desc = getdescfromdrev(drev)
1638 1639 header = b'# HG changeset patch\n'
1639 1640
1640 1641 # Try to preserve metadata from hg:meta property. Write hg patch
1641 1642 # headers that can be read by the "import" command. See patchheadermap
1642 1643 # and extract in mercurial/patch.py for supported headers.
1643 1644 meta = getdiffmeta(diffs[b'%d' % diffid])
1644 1645 for k in _metanamemap.keys():
1645 1646 if k in meta:
1646 1647 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1647 1648
1648 1649 content = b'%s%s\n%s' % (header, desc, body)
1649 1650 patches.append((drev[b'id'], content))
1650 1651
1651 1652 # Write patches to the supplied callback
1652 1653 write(patches)
1653 1654
1654 1655
1655 1656 @vcrcommand(
1656 1657 b'phabread',
1657 1658 [(b'', b'stack', False, _(b'read dependencies'))],
1658 1659 _(b'DREVSPEC [OPTIONS]'),
1659 1660 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1660 1661 optionalrepo=True,
1661 1662 )
1662 1663 def phabread(ui, repo, spec, **opts):
1663 1664 """print patches from Phabricator suitable for importing
1664 1665
1665 1666 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1666 1667 the number ``123``. It could also have common operators like ``+``, ``-``,
1667 1668 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1668 1669 select a stack.
1669 1670
1670 1671 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1671 1672 could be used to filter patches by status. For performance reason, they
1672 1673 only represent a subset of non-status selections and cannot be used alone.
1673 1674
1674 1675 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1675 1676 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1676 1677 stack up to D9.
1677 1678
1678 1679 If --stack is given, follow dependencies information and read all patches.
1679 1680 It is equivalent to the ``:`` operator.
1680 1681 """
1681 1682 opts = pycompat.byteskwargs(opts)
1682 1683 if opts.get(b'stack'):
1683 1684 spec = b':(%s)' % spec
1684 1685 drevs = querydrev(ui, spec)
1685 1686
1686 1687 def _write(patches):
1687 1688 for drev, content in patches:
1688 1689 ui.write(content)
1689 1690
1690 1691 readpatch(ui, drevs, _write)
1691 1692
1692 1693
1693 1694 @vcrcommand(
1694 1695 b'phabupdate',
1695 1696 [
1696 1697 (b'', b'accept', False, _(b'accept revisions')),
1697 1698 (b'', b'reject', False, _(b'reject revisions')),
1698 1699 (b'', b'abandon', False, _(b'abandon revisions')),
1699 1700 (b'', b'reclaim', False, _(b'reclaim revisions')),
1700 1701 (b'm', b'comment', b'', _(b'comment on the last revision')),
1701 1702 ],
1702 1703 _(b'DREVSPEC [OPTIONS]'),
1703 1704 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1704 1705 optionalrepo=True,
1705 1706 )
1706 1707 def phabupdate(ui, repo, spec, **opts):
1707 1708 """update Differential Revision in batch
1708 1709
1709 1710 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1710 1711 """
1711 1712 opts = pycompat.byteskwargs(opts)
1712 1713 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1713 1714 if len(flags) > 1:
1714 1715 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1715 1716
1716 1717 actions = []
1717 1718 for f in flags:
1718 1719 actions.append({b'type': f, b'value': True})
1719 1720
1720 1721 drevs = querydrev(ui, spec)
1721 1722 for i, drev in enumerate(drevs):
1722 1723 if i + 1 == len(drevs) and opts.get(b'comment'):
1723 1724 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1724 1725 if actions:
1725 1726 params = {
1726 1727 b'objectIdentifier': drev[b'phid'],
1727 1728 b'transactions': actions,
1728 1729 }
1729 1730 callconduit(ui, b'differential.revision.edit', params)
1730 1731
1731 1732
1732 1733 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1733 1734 def template_review(context, mapping):
1734 1735 """:phabreview: Object describing the review for this changeset.
1735 1736 Has attributes `url` and `id`.
1736 1737 """
1737 1738 ctx = context.resource(mapping, b'ctx')
1738 1739 m = _differentialrevisiondescre.search(ctx.description())
1739 1740 if m:
1740 1741 return templateutil.hybriddict(
1741 1742 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1742 1743 )
1743 1744 else:
1744 1745 tags = ctx.repo().nodetags(ctx.node())
1745 1746 for t in tags:
1746 1747 if _differentialrevisiontagre.match(t):
1747 1748 url = ctx.repo().ui.config(b'phabricator', b'url')
1748 1749 if not url.endswith(b'/'):
1749 1750 url += b'/'
1750 1751 url += t
1751 1752
1752 1753 return templateutil.hybriddict({b'url': url, b'id': t,})
1753 1754 return None
1754 1755
1755 1756
1756 1757 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1757 1758 def template_status(context, mapping):
1758 1759 """:phabstatus: String. Status of Phabricator differential.
1759 1760 """
1760 1761 ctx = context.resource(mapping, b'ctx')
1761 1762 repo = context.resource(mapping, b'repo')
1762 1763 ui = context.resource(mapping, b'ui')
1763 1764
1764 1765 rev = ctx.rev()
1765 1766 try:
1766 1767 drevid = getdrevmap(repo, [rev])[rev]
1767 1768 except KeyError:
1768 1769 return None
1769 1770 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1770 1771 for drev in drevs:
1771 1772 if int(drev[b'id']) == drevid:
1772 1773 return templateutil.hybriddict(
1773 1774 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1774 1775 )
1775 1776 return None
1776 1777
1777 1778
1778 1779 @show.showview(b'phabstatus', csettopic=b'work')
1779 1780 def phabstatusshowview(ui, repo, displayer):
1780 1781 """Phabricator differiential status"""
1781 1782 revs = repo.revs('sort(_underway(), topo)')
1782 1783 drevmap = getdrevmap(repo, revs)
1783 1784 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1784 1785 for rev, drevid in pycompat.iteritems(drevmap):
1785 1786 if drevid is not None:
1786 1787 drevids.add(drevid)
1787 1788 revsbydrevid.setdefault(drevid, set([])).add(rev)
1788 1789 else:
1789 1790 unknownrevs.append(rev)
1790 1791
1791 1792 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1792 1793 drevsbyrev = {}
1793 1794 for drev in drevs:
1794 1795 for rev in revsbydrevid[int(drev[b'id'])]:
1795 1796 drevsbyrev[rev] = drev
1796 1797
1797 1798 def phabstatus(ctx):
1798 1799 drev = drevsbyrev[ctx.rev()]
1799 1800 status = ui.label(
1800 1801 b'%(statusName)s' % drev,
1801 1802 b'phabricator.status.%s' % _getstatusname(drev),
1802 1803 )
1803 1804 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1804 1805
1805 1806 revs -= smartset.baseset(unknownrevs)
1806 1807 revdag = graphmod.dagwalker(repo, revs)
1807 1808
1808 1809 ui.setconfig(b'experimental', b'graphshorten', True)
1809 1810 displayer._exthook = phabstatus
1810 1811 nodelen = show.longestshortest(repo, revs)
1811 1812 logcmdutil.displaygraph(
1812 1813 ui,
1813 1814 repo,
1814 1815 revdag,
1815 1816 displayer,
1816 1817 graphmod.asciiedges,
1817 1818 props={b'nodelen': nodelen},
1818 1819 )
General Comments 0
You need to be logged in to leave comments. Login now