##// END OF EJS Templates
phabricator: also check parent fctx for binary where it is checked for UTF-8...
Matt Harbison -
r44914:4ce2330f default
parent child Browse files
Show More
@@ -1,1819 +1,1828 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 750 def addoldbinary(pchange, oldfctx, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753 753
754 754 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
755 755 version of the file, or None if the file is being removed.
756 756 """
757 757 if not fctx or fctx.cmp(oldfctx):
758 758 # Files differ, add the old one
759 759 pchange.metadata[b'old:file:size'] = oldfctx.size()
760 760 mimeguess, _enc = mimetypes.guess_type(
761 761 encoding.unifromlocal(oldfctx.path())
762 762 )
763 763 if mimeguess:
764 764 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
765 765 mimeguess
766 766 )
767 767 fphid = uploadfile(oldfctx)
768 768 pchange.metadata[b'old:binary-phid'] = fphid
769 769 else:
770 770 # If it's left as IMAGE/BINARY web UI might try to display it
771 771 pchange.fileType = DiffFileType.TEXT
772 772 pchange.copynewmetadatatoold()
773 773
774 774
775 775 def makebinary(pchange, fctx):
776 776 """populate the phabchange for a binary file"""
777 777 pchange.fileType = DiffFileType.BINARY
778 778 fphid = uploadfile(fctx)
779 779 pchange.metadata[b'new:binary-phid'] = fphid
780 780 pchange.metadata[b'new:file:size'] = fctx.size()
781 781 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
782 782 if mimeguess:
783 783 mimeguess = pycompat.bytestr(mimeguess)
784 784 pchange.metadata[b'new:file:mime-type'] = mimeguess
785 785 if mimeguess.startswith(b'image/'):
786 786 pchange.fileType = DiffFileType.IMAGE
787 787
788 788
789 789 # Copied from mercurial/patch.py
790 790 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
791 791
792 792
793 793 def notutf8(fctx):
794 794 """detect non-UTF-8 text files since Phabricator requires them to be marked
795 795 as binary
796 796 """
797 797 try:
798 798 fctx.data().decode('utf-8')
799 799 return False
800 800 except UnicodeDecodeError:
801 801 fctx.repo().ui.write(
802 802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 803 % fctx.path()
804 804 )
805 805 return True
806 806
807 807
808 808 def addremoved(pdiff, ctx, removed):
809 809 """add removed files to the phabdiff. Shouldn't include moves"""
810 810 for fname in removed:
811 811 pchange = phabchange(
812 812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 813 )
814 814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 815 oldfctx = ctx.p1()[fname]
816 816 if not (oldfctx.isbinary() or notutf8(oldfctx)):
817 817 maketext(pchange, ctx, fname)
818 818
819 819 pdiff.addchange(pchange)
820 820
821 821
822 822 def addmodified(pdiff, ctx, modified):
823 823 """add modified files to the phabdiff"""
824 824 for fname in modified:
825 825 fctx = ctx[fname]
826 826 oldfctx = fctx.p1()
827 827 pchange = phabchange(currentPath=fname, oldPath=fname)
828 828 filemode = gitmode[ctx[fname].flags()]
829 829 originalmode = gitmode[ctx.p1()[fname].flags()]
830 830 if filemode != originalmode:
831 831 pchange.addoldmode(originalmode)
832 832 pchange.addnewmode(filemode)
833 833
834 if fctx.isbinary() or notutf8(fctx) or notutf8(oldfctx):
834 if (
835 fctx.isbinary()
836 or notutf8(fctx)
837 or oldfctx.isbinary()
838 or notutf8(oldfctx)
839 ):
835 840 makebinary(pchange, fctx)
836 841 addoldbinary(pchange, fctx.p1(), fctx)
837 842 else:
838 843 maketext(pchange, ctx, fname)
839 844
840 845 pdiff.addchange(pchange)
841 846
842 847
843 848 def addadded(pdiff, ctx, added, removed):
844 849 """add file adds to the phabdiff, both new files and copies/moves"""
845 850 # Keep track of files that've been recorded as moved/copied, so if there are
846 851 # additional copies we can mark them (moves get removed from removed)
847 852 copiedchanges = {}
848 853 movedchanges = {}
849 854 for fname in added:
850 855 fctx = ctx[fname]
851 856 oldfctx = None
852 857 pchange = phabchange(currentPath=fname)
853 858
854 859 filemode = gitmode[ctx[fname].flags()]
855 860 renamed = fctx.renamed()
856 861
857 862 if renamed:
858 863 originalfname = renamed[0]
859 864 oldfctx = ctx.p1()[originalfname]
860 865 originalmode = gitmode[oldfctx.flags()]
861 866 pchange.oldPath = originalfname
862 867
863 868 if originalfname in removed:
864 869 origpchange = phabchange(
865 870 currentPath=originalfname,
866 871 oldPath=originalfname,
867 872 type=DiffChangeType.MOVE_AWAY,
868 873 awayPaths=[fname],
869 874 )
870 875 movedchanges[originalfname] = origpchange
871 876 removed.remove(originalfname)
872 877 pchange.type = DiffChangeType.MOVE_HERE
873 878 elif originalfname in movedchanges:
874 879 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
875 880 movedchanges[originalfname].awayPaths.append(fname)
876 881 pchange.type = DiffChangeType.COPY_HERE
877 882 else: # pure copy
878 883 if originalfname not in copiedchanges:
879 884 origpchange = phabchange(
880 885 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
881 886 )
882 887 copiedchanges[originalfname] = origpchange
883 888 else:
884 889 origpchange = copiedchanges[originalfname]
885 890 origpchange.awayPaths.append(fname)
886 891 pchange.type = DiffChangeType.COPY_HERE
887 892
888 893 if filemode != originalmode:
889 894 pchange.addoldmode(originalmode)
890 895 pchange.addnewmode(filemode)
891 896 else: # Brand-new file
892 897 pchange.addnewmode(gitmode[fctx.flags()])
893 898 pchange.type = DiffChangeType.ADD
894 899
895 if fctx.isbinary() or notutf8(fctx) or (oldfctx and notutf8(oldfctx)):
900 if (
901 fctx.isbinary()
902 or notutf8(fctx)
903 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
904 ):
896 905 makebinary(pchange, fctx)
897 906 if renamed:
898 907 addoldbinary(pchange, oldfctx, fctx)
899 908 else:
900 909 maketext(pchange, ctx, fname)
901 910
902 911 pdiff.addchange(pchange)
903 912
904 913 for _path, copiedchange in copiedchanges.items():
905 914 pdiff.addchange(copiedchange)
906 915 for _path, movedchange in movedchanges.items():
907 916 pdiff.addchange(movedchange)
908 917
909 918
910 919 def creatediff(ctx):
911 920 """create a Differential Diff"""
912 921 repo = ctx.repo()
913 922 repophid = getrepophid(repo)
914 923 # Create a "Differential Diff" via "differential.creatediff" API
915 924 pdiff = phabdiff(
916 925 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
917 926 branch=b'%s' % ctx.branch(),
918 927 )
919 928 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
920 929 # addadded will remove moved files from removed, so addremoved won't get
921 930 # them
922 931 addadded(pdiff, ctx, added, removed)
923 932 addmodified(pdiff, ctx, modified)
924 933 addremoved(pdiff, ctx, removed)
925 934 if repophid:
926 935 pdiff.repositoryPHID = repophid
927 936 diff = callconduit(
928 937 repo.ui,
929 938 b'differential.creatediff',
930 939 pycompat.byteskwargs(attr.asdict(pdiff)),
931 940 )
932 941 if not diff:
933 942 raise error.Abort(_(b'cannot create diff for %s') % ctx)
934 943 return diff
935 944
936 945
937 946 def writediffproperties(ctx, diff):
938 947 """write metadata to diff so patches could be applied losslessly"""
939 948 # creatediff returns with a diffid but query returns with an id
940 949 diffid = diff.get(b'diffid', diff.get(b'id'))
941 950 params = {
942 951 b'diff_id': diffid,
943 952 b'name': b'hg:meta',
944 953 b'data': templatefilters.json(
945 954 {
946 955 b'user': ctx.user(),
947 956 b'date': b'%d %d' % ctx.date(),
948 957 b'branch': ctx.branch(),
949 958 b'node': ctx.hex(),
950 959 b'parent': ctx.p1().hex(),
951 960 }
952 961 ),
953 962 }
954 963 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
955 964
956 965 params = {
957 966 b'diff_id': diffid,
958 967 b'name': b'local:commits',
959 968 b'data': templatefilters.json(
960 969 {
961 970 ctx.hex(): {
962 971 b'author': stringutil.person(ctx.user()),
963 972 b'authorEmail': stringutil.email(ctx.user()),
964 973 b'time': int(ctx.date()[0]),
965 974 b'commit': ctx.hex(),
966 975 b'parents': [ctx.p1().hex()],
967 976 b'branch': ctx.branch(),
968 977 },
969 978 }
970 979 ),
971 980 }
972 981 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
973 982
974 983
975 984 def createdifferentialrevision(
976 985 ctx,
977 986 revid=None,
978 987 parentrevphid=None,
979 988 oldnode=None,
980 989 olddiff=None,
981 990 actions=None,
982 991 comment=None,
983 992 ):
984 993 """create or update a Differential Revision
985 994
986 995 If revid is None, create a new Differential Revision, otherwise update
987 996 revid. If parentrevphid is not None, set it as a dependency.
988 997
989 998 If oldnode is not None, check if the patch content (without commit message
990 999 and metadata) has changed before creating another diff.
991 1000
992 1001 If actions is not None, they will be appended to the transaction.
993 1002 """
994 1003 repo = ctx.repo()
995 1004 if oldnode:
996 1005 diffopts = mdiff.diffopts(git=True, context=32767)
997 1006 oldctx = repo.unfiltered()[oldnode]
998 1007 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
999 1008 else:
1000 1009 neednewdiff = True
1001 1010
1002 1011 transactions = []
1003 1012 if neednewdiff:
1004 1013 diff = creatediff(ctx)
1005 1014 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1006 1015 if comment:
1007 1016 transactions.append({b'type': b'comment', b'value': comment})
1008 1017 else:
1009 1018 # Even if we don't need to upload a new diff because the patch content
1010 1019 # does not change. We might still need to update its metadata so
1011 1020 # pushers could know the correct node metadata.
1012 1021 assert olddiff
1013 1022 diff = olddiff
1014 1023 writediffproperties(ctx, diff)
1015 1024
1016 1025 # Set the parent Revision every time, so commit re-ordering is picked-up
1017 1026 if parentrevphid:
1018 1027 transactions.append(
1019 1028 {b'type': b'parents.set', b'value': [parentrevphid]}
1020 1029 )
1021 1030
1022 1031 if actions:
1023 1032 transactions += actions
1024 1033
1025 1034 # Parse commit message and update related fields.
1026 1035 desc = ctx.description()
1027 1036 info = callconduit(
1028 1037 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1029 1038 )
1030 1039 for k, v in info[b'fields'].items():
1031 1040 if k in [b'title', b'summary', b'testPlan']:
1032 1041 transactions.append({b'type': k, b'value': v})
1033 1042
1034 1043 params = {b'transactions': transactions}
1035 1044 if revid is not None:
1036 1045 # Update an existing Differential Revision
1037 1046 params[b'objectIdentifier'] = revid
1038 1047
1039 1048 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1040 1049 if not revision:
1041 1050 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1042 1051
1043 1052 return revision, diff
1044 1053
1045 1054
1046 1055 def userphids(ui, names):
1047 1056 """convert user names to PHIDs"""
1048 1057 names = [name.lower() for name in names]
1049 1058 query = {b'constraints': {b'usernames': names}}
1050 1059 result = callconduit(ui, b'user.search', query)
1051 1060 # username not found is not an error of the API. So check if we have missed
1052 1061 # some names here.
1053 1062 data = result[b'data']
1054 1063 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1055 1064 unresolved = set(names) - resolved
1056 1065 if unresolved:
1057 1066 raise error.Abort(
1058 1067 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1059 1068 )
1060 1069 return [entry[b'phid'] for entry in data]
1061 1070
1062 1071
1063 1072 @vcrcommand(
1064 1073 b'phabsend',
1065 1074 [
1066 1075 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1067 1076 (b'', b'amend', True, _(b'update commit messages')),
1068 1077 (b'', b'reviewer', [], _(b'specify reviewers')),
1069 1078 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1070 1079 (
1071 1080 b'm',
1072 1081 b'comment',
1073 1082 b'',
1074 1083 _(b'add a comment to Revisions with new/updated Diffs'),
1075 1084 ),
1076 1085 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1077 1086 ],
1078 1087 _(b'REV [OPTIONS]'),
1079 1088 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1080 1089 )
1081 1090 def phabsend(ui, repo, *revs, **opts):
1082 1091 """upload changesets to Phabricator
1083 1092
1084 1093 If there are multiple revisions specified, they will be send as a stack
1085 1094 with a linear dependencies relationship using the order specified by the
1086 1095 revset.
1087 1096
1088 1097 For the first time uploading changesets, local tags will be created to
1089 1098 maintain the association. After the first time, phabsend will check
1090 1099 obsstore and tags information so it can figure out whether to update an
1091 1100 existing Differential Revision, or create a new one.
1092 1101
1093 1102 If --amend is set, update commit messages so they have the
1094 1103 ``Differential Revision`` URL, remove related tags. This is similar to what
1095 1104 arcanist will do, and is more desired in author-push workflows. Otherwise,
1096 1105 use local tags to record the ``Differential Revision`` association.
1097 1106
1098 1107 The --confirm option lets you confirm changesets before sending them. You
1099 1108 can also add following to your configuration file to make it default
1100 1109 behaviour::
1101 1110
1102 1111 [phabsend]
1103 1112 confirm = true
1104 1113
1105 1114 phabsend will check obsstore and the above association to decide whether to
1106 1115 update an existing Differential Revision, or create a new one.
1107 1116 """
1108 1117 opts = pycompat.byteskwargs(opts)
1109 1118 revs = list(revs) + opts.get(b'rev', [])
1110 1119 revs = scmutil.revrange(repo, revs)
1111 1120 revs.sort() # ascending order to preserve topological parent/child in phab
1112 1121
1113 1122 if not revs:
1114 1123 raise error.Abort(_(b'phabsend requires at least one changeset'))
1115 1124 if opts.get(b'amend'):
1116 1125 cmdutil.checkunfinished(repo)
1117 1126
1118 1127 # {newnode: (oldnode, olddiff, olddrev}
1119 1128 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1120 1129
1121 1130 confirm = ui.configbool(b'phabsend', b'confirm')
1122 1131 confirm |= bool(opts.get(b'confirm'))
1123 1132 if confirm:
1124 1133 confirmed = _confirmbeforesend(repo, revs, oldmap)
1125 1134 if not confirmed:
1126 1135 raise error.Abort(_(b'phabsend cancelled'))
1127 1136
1128 1137 actions = []
1129 1138 reviewers = opts.get(b'reviewer', [])
1130 1139 blockers = opts.get(b'blocker', [])
1131 1140 phids = []
1132 1141 if reviewers:
1133 1142 phids.extend(userphids(repo.ui, reviewers))
1134 1143 if blockers:
1135 1144 phids.extend(
1136 1145 map(
1137 1146 lambda phid: b'blocking(%s)' % phid,
1138 1147 userphids(repo.ui, blockers),
1139 1148 )
1140 1149 )
1141 1150 if phids:
1142 1151 actions.append({b'type': b'reviewers.add', b'value': phids})
1143 1152
1144 1153 drevids = [] # [int]
1145 1154 diffmap = {} # {newnode: diff}
1146 1155
1147 1156 # Send patches one by one so we know their Differential Revision PHIDs and
1148 1157 # can provide dependency relationship
1149 1158 lastrevphid = None
1150 1159 for rev in revs:
1151 1160 ui.debug(b'sending rev %d\n' % rev)
1152 1161 ctx = repo[rev]
1153 1162
1154 1163 # Get Differential Revision ID
1155 1164 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1156 1165 if oldnode != ctx.node() or opts.get(b'amend'):
1157 1166 # Create or update Differential Revision
1158 1167 revision, diff = createdifferentialrevision(
1159 1168 ctx,
1160 1169 revid,
1161 1170 lastrevphid,
1162 1171 oldnode,
1163 1172 olddiff,
1164 1173 actions,
1165 1174 opts.get(b'comment'),
1166 1175 )
1167 1176 diffmap[ctx.node()] = diff
1168 1177 newrevid = int(revision[b'object'][b'id'])
1169 1178 newrevphid = revision[b'object'][b'phid']
1170 1179 if revid:
1171 1180 action = b'updated'
1172 1181 else:
1173 1182 action = b'created'
1174 1183
1175 1184 # Create a local tag to note the association, if commit message
1176 1185 # does not have it already
1177 1186 m = _differentialrevisiondescre.search(ctx.description())
1178 1187 if not m or int(m.group('id')) != newrevid:
1179 1188 tagname = b'D%d' % newrevid
1180 1189 tags.tag(
1181 1190 repo,
1182 1191 tagname,
1183 1192 ctx.node(),
1184 1193 message=None,
1185 1194 user=None,
1186 1195 date=None,
1187 1196 local=True,
1188 1197 )
1189 1198 else:
1190 1199 # Nothing changed. But still set "newrevphid" so the next revision
1191 1200 # could depend on this one and "newrevid" for the summary line.
1192 1201 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1193 1202 newrevid = revid
1194 1203 action = b'skipped'
1195 1204
1196 1205 actiondesc = ui.label(
1197 1206 {
1198 1207 b'created': _(b'created'),
1199 1208 b'skipped': _(b'skipped'),
1200 1209 b'updated': _(b'updated'),
1201 1210 }[action],
1202 1211 b'phabricator.action.%s' % action,
1203 1212 )
1204 1213 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1205 1214 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1206 1215 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1207 1216 ui.write(
1208 1217 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1209 1218 )
1210 1219 drevids.append(newrevid)
1211 1220 lastrevphid = newrevphid
1212 1221
1213 1222 # Update commit messages and remove tags
1214 1223 if opts.get(b'amend'):
1215 1224 unfi = repo.unfiltered()
1216 1225 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1217 1226 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1218 1227 wnode = unfi[b'.'].node()
1219 1228 mapping = {} # {oldnode: [newnode]}
1220 1229 for i, rev in enumerate(revs):
1221 1230 old = unfi[rev]
1222 1231 drevid = drevids[i]
1223 1232 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1224 1233 newdesc = getdescfromdrev(drev)
1225 1234 # Make sure commit message contain "Differential Revision"
1226 1235 if old.description() != newdesc:
1227 1236 if old.phase() == phases.public:
1228 1237 ui.warn(
1229 1238 _(b"warning: not updating public commit %s\n")
1230 1239 % scmutil.formatchangeid(old)
1231 1240 )
1232 1241 continue
1233 1242 parents = [
1234 1243 mapping.get(old.p1().node(), (old.p1(),))[0],
1235 1244 mapping.get(old.p2().node(), (old.p2(),))[0],
1236 1245 ]
1237 1246 new = context.metadataonlyctx(
1238 1247 repo,
1239 1248 old,
1240 1249 parents=parents,
1241 1250 text=newdesc,
1242 1251 user=old.user(),
1243 1252 date=old.date(),
1244 1253 extra=old.extra(),
1245 1254 )
1246 1255
1247 1256 newnode = new.commit()
1248 1257
1249 1258 mapping[old.node()] = [newnode]
1250 1259 # Update diff property
1251 1260 # If it fails just warn and keep going, otherwise the DREV
1252 1261 # associations will be lost
1253 1262 try:
1254 1263 writediffproperties(unfi[newnode], diffmap[old.node()])
1255 1264 except util.urlerr.urlerror:
1256 1265 ui.warnnoi18n(
1257 1266 b'Failed to update metadata for D%d\n' % drevid
1258 1267 )
1259 1268 # Remove local tags since it's no longer necessary
1260 1269 tagname = b'D%d' % drevid
1261 1270 if tagname in repo.tags():
1262 1271 tags.tag(
1263 1272 repo,
1264 1273 tagname,
1265 1274 nullid,
1266 1275 message=None,
1267 1276 user=None,
1268 1277 date=None,
1269 1278 local=True,
1270 1279 )
1271 1280 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1272 1281 if wnode in mapping:
1273 1282 unfi.setparents(mapping[wnode][0])
1274 1283
1275 1284
1276 1285 # Map from "hg:meta" keys to header understood by "hg import". The order is
1277 1286 # consistent with "hg export" output.
1278 1287 _metanamemap = util.sortdict(
1279 1288 [
1280 1289 (b'user', b'User'),
1281 1290 (b'date', b'Date'),
1282 1291 (b'branch', b'Branch'),
1283 1292 (b'node', b'Node ID'),
1284 1293 (b'parent', b'Parent '),
1285 1294 ]
1286 1295 )
1287 1296
1288 1297
1289 1298 def _confirmbeforesend(repo, revs, oldmap):
1290 1299 url, token = readurltoken(repo.ui)
1291 1300 ui = repo.ui
1292 1301 for rev in revs:
1293 1302 ctx = repo[rev]
1294 1303 desc = ctx.description().splitlines()[0]
1295 1304 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1296 1305 if drevid:
1297 1306 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1298 1307 else:
1299 1308 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1300 1309
1301 1310 ui.write(
1302 1311 _(b'%s - %s: %s\n')
1303 1312 % (
1304 1313 drevdesc,
1305 1314 ui.label(bytes(ctx), b'phabricator.node'),
1306 1315 ui.label(desc, b'phabricator.desc'),
1307 1316 )
1308 1317 )
1309 1318
1310 1319 if ui.promptchoice(
1311 1320 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1312 1321 ):
1313 1322 return False
1314 1323
1315 1324 return True
1316 1325
1317 1326
1318 1327 _knownstatusnames = {
1319 1328 b'accepted',
1320 1329 b'needsreview',
1321 1330 b'needsrevision',
1322 1331 b'closed',
1323 1332 b'abandoned',
1324 1333 b'changesplanned',
1325 1334 }
1326 1335
1327 1336
1328 1337 def _getstatusname(drev):
1329 1338 """get normalized status name from a Differential Revision"""
1330 1339 return drev[b'statusName'].replace(b' ', b'').lower()
1331 1340
1332 1341
1333 1342 # Small language to specify differential revisions. Support symbols: (), :X,
1334 1343 # +, and -.
1335 1344
1336 1345 _elements = {
1337 1346 # token-type: binding-strength, primary, prefix, infix, suffix
1338 1347 b'(': (12, None, (b'group', 1, b')'), None, None),
1339 1348 b':': (8, None, (b'ancestors', 8), None, None),
1340 1349 b'&': (5, None, None, (b'and_', 5), None),
1341 1350 b'+': (4, None, None, (b'add', 4), None),
1342 1351 b'-': (4, None, None, (b'sub', 4), None),
1343 1352 b')': (0, None, None, None, None),
1344 1353 b'symbol': (0, b'symbol', None, None, None),
1345 1354 b'end': (0, None, None, None, None),
1346 1355 }
1347 1356
1348 1357
1349 1358 def _tokenize(text):
1350 1359 view = memoryview(text) # zero-copy slice
1351 1360 special = b'():+-& '
1352 1361 pos = 0
1353 1362 length = len(text)
1354 1363 while pos < length:
1355 1364 symbol = b''.join(
1356 1365 itertools.takewhile(
1357 1366 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1358 1367 )
1359 1368 )
1360 1369 if symbol:
1361 1370 yield (b'symbol', symbol, pos)
1362 1371 pos += len(symbol)
1363 1372 else: # special char, ignore space
1364 1373 if text[pos : pos + 1] != b' ':
1365 1374 yield (text[pos : pos + 1], None, pos)
1366 1375 pos += 1
1367 1376 yield (b'end', None, pos)
1368 1377
1369 1378
1370 1379 def _parse(text):
1371 1380 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1372 1381 if pos != len(text):
1373 1382 raise error.ParseError(b'invalid token', pos)
1374 1383 return tree
1375 1384
1376 1385
1377 1386 def _parsedrev(symbol):
1378 1387 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1379 1388 if symbol.startswith(b'D') and symbol[1:].isdigit():
1380 1389 return int(symbol[1:])
1381 1390 if symbol.isdigit():
1382 1391 return int(symbol)
1383 1392
1384 1393
1385 1394 def _prefetchdrevs(tree):
1386 1395 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1387 1396 drevs = set()
1388 1397 ancestordrevs = set()
1389 1398 op = tree[0]
1390 1399 if op == b'symbol':
1391 1400 r = _parsedrev(tree[1])
1392 1401 if r:
1393 1402 drevs.add(r)
1394 1403 elif op == b'ancestors':
1395 1404 r, a = _prefetchdrevs(tree[1])
1396 1405 drevs.update(r)
1397 1406 ancestordrevs.update(r)
1398 1407 ancestordrevs.update(a)
1399 1408 else:
1400 1409 for t in tree[1:]:
1401 1410 r, a = _prefetchdrevs(t)
1402 1411 drevs.update(r)
1403 1412 ancestordrevs.update(a)
1404 1413 return drevs, ancestordrevs
1405 1414
1406 1415
1407 1416 def querydrev(ui, spec):
1408 1417 """return a list of "Differential Revision" dicts
1409 1418
1410 1419 spec is a string using a simple query language, see docstring in phabread
1411 1420 for details.
1412 1421
1413 1422 A "Differential Revision dict" looks like:
1414 1423
1415 1424 {
1416 1425 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1417 1426 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1418 1427 "auxiliary": {
1419 1428 "phabricator:depends-on": [
1420 1429 "PHID-DREV-gbapp366kutjebt7agcd"
1421 1430 ]
1422 1431 "phabricator:projects": [],
1423 1432 },
1424 1433 "branch": "default",
1425 1434 "ccs": [],
1426 1435 "commits": [],
1427 1436 "dateCreated": "1499181406",
1428 1437 "dateModified": "1499182103",
1429 1438 "diffs": [
1430 1439 "3",
1431 1440 "4",
1432 1441 ],
1433 1442 "hashes": [],
1434 1443 "id": "2",
1435 1444 "lineCount": "2",
1436 1445 "phid": "PHID-DREV-672qvysjcczopag46qty",
1437 1446 "properties": {},
1438 1447 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1439 1448 "reviewers": [],
1440 1449 "sourcePath": null
1441 1450 "status": "0",
1442 1451 "statusName": "Needs Review",
1443 1452 "summary": "",
1444 1453 "testPlan": "",
1445 1454 "title": "example",
1446 1455 "uri": "https://phab.example.com/D2",
1447 1456 }
1448 1457 """
1449 1458 # TODO: replace differential.query and differential.querydiffs with
1450 1459 # differential.diff.search because the former (and their output) are
1451 1460 # frozen, and planned to be deprecated and removed.
1452 1461
1453 1462 def fetch(params):
1454 1463 """params -> single drev or None"""
1455 1464 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1456 1465 if key in prefetched:
1457 1466 return prefetched[key]
1458 1467 drevs = callconduit(ui, b'differential.query', params)
1459 1468 # Fill prefetched with the result
1460 1469 for drev in drevs:
1461 1470 prefetched[drev[b'phid']] = drev
1462 1471 prefetched[int(drev[b'id'])] = drev
1463 1472 if key not in prefetched:
1464 1473 raise error.Abort(
1465 1474 _(b'cannot get Differential Revision %r') % params
1466 1475 )
1467 1476 return prefetched[key]
1468 1477
1469 1478 def getstack(topdrevids):
1470 1479 """given a top, get a stack from the bottom, [id] -> [id]"""
1471 1480 visited = set()
1472 1481 result = []
1473 1482 queue = [{b'ids': [i]} for i in topdrevids]
1474 1483 while queue:
1475 1484 params = queue.pop()
1476 1485 drev = fetch(params)
1477 1486 if drev[b'id'] in visited:
1478 1487 continue
1479 1488 visited.add(drev[b'id'])
1480 1489 result.append(int(drev[b'id']))
1481 1490 auxiliary = drev.get(b'auxiliary', {})
1482 1491 depends = auxiliary.get(b'phabricator:depends-on', [])
1483 1492 for phid in depends:
1484 1493 queue.append({b'phids': [phid]})
1485 1494 result.reverse()
1486 1495 return smartset.baseset(result)
1487 1496
1488 1497 # Initialize prefetch cache
1489 1498 prefetched = {} # {id or phid: drev}
1490 1499
1491 1500 tree = _parse(spec)
1492 1501 drevs, ancestordrevs = _prefetchdrevs(tree)
1493 1502
1494 1503 # developer config: phabricator.batchsize
1495 1504 batchsize = ui.configint(b'phabricator', b'batchsize')
1496 1505
1497 1506 # Prefetch Differential Revisions in batch
1498 1507 tofetch = set(drevs)
1499 1508 for r in ancestordrevs:
1500 1509 tofetch.update(range(max(1, r - batchsize), r + 1))
1501 1510 if drevs:
1502 1511 fetch({b'ids': list(tofetch)})
1503 1512 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1504 1513
1505 1514 # Walk through the tree, return smartsets
1506 1515 def walk(tree):
1507 1516 op = tree[0]
1508 1517 if op == b'symbol':
1509 1518 drev = _parsedrev(tree[1])
1510 1519 if drev:
1511 1520 return smartset.baseset([drev])
1512 1521 elif tree[1] in _knownstatusnames:
1513 1522 drevs = [
1514 1523 r
1515 1524 for r in validids
1516 1525 if _getstatusname(prefetched[r]) == tree[1]
1517 1526 ]
1518 1527 return smartset.baseset(drevs)
1519 1528 else:
1520 1529 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1521 1530 elif op in {b'and_', b'add', b'sub'}:
1522 1531 assert len(tree) == 3
1523 1532 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1524 1533 elif op == b'group':
1525 1534 return walk(tree[1])
1526 1535 elif op == b'ancestors':
1527 1536 return getstack(walk(tree[1]))
1528 1537 else:
1529 1538 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1530 1539
1531 1540 return [prefetched[r] for r in walk(tree)]
1532 1541
1533 1542
1534 1543 def getdescfromdrev(drev):
1535 1544 """get description (commit message) from "Differential Revision"
1536 1545
1537 1546 This is similar to differential.getcommitmessage API. But we only care
1538 1547 about limited fields: title, summary, test plan, and URL.
1539 1548 """
1540 1549 title = drev[b'title']
1541 1550 summary = drev[b'summary'].rstrip()
1542 1551 testplan = drev[b'testPlan'].rstrip()
1543 1552 if testplan:
1544 1553 testplan = b'Test Plan:\n%s' % testplan
1545 1554 uri = b'Differential Revision: %s' % drev[b'uri']
1546 1555 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1547 1556
1548 1557
1549 1558 def getdiffmeta(diff):
1550 1559 """get commit metadata (date, node, user, p1) from a diff object
1551 1560
1552 1561 The metadata could be "hg:meta", sent by phabsend, like:
1553 1562
1554 1563 "properties": {
1555 1564 "hg:meta": {
1556 1565 "branch": "default",
1557 1566 "date": "1499571514 25200",
1558 1567 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1559 1568 "user": "Foo Bar <foo@example.com>",
1560 1569 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1561 1570 }
1562 1571 }
1563 1572
1564 1573 Or converted from "local:commits", sent by "arc", like:
1565 1574
1566 1575 "properties": {
1567 1576 "local:commits": {
1568 1577 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1569 1578 "author": "Foo Bar",
1570 1579 "authorEmail": "foo@example.com"
1571 1580 "branch": "default",
1572 1581 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1573 1582 "local": "1000",
1574 1583 "message": "...",
1575 1584 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1576 1585 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1577 1586 "summary": "...",
1578 1587 "tag": "",
1579 1588 "time": 1499546314,
1580 1589 }
1581 1590 }
1582 1591 }
1583 1592
1584 1593 Note: metadata extracted from "local:commits" will lose time zone
1585 1594 information.
1586 1595 """
1587 1596 props = diff.get(b'properties') or {}
1588 1597 meta = props.get(b'hg:meta')
1589 1598 if not meta:
1590 1599 if props.get(b'local:commits'):
1591 1600 commit = sorted(props[b'local:commits'].values())[0]
1592 1601 meta = {}
1593 1602 if b'author' in commit and b'authorEmail' in commit:
1594 1603 meta[b'user'] = b'%s <%s>' % (
1595 1604 commit[b'author'],
1596 1605 commit[b'authorEmail'],
1597 1606 )
1598 1607 if b'time' in commit:
1599 1608 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1600 1609 if b'branch' in commit:
1601 1610 meta[b'branch'] = commit[b'branch']
1602 1611 node = commit.get(b'commit', commit.get(b'rev'))
1603 1612 if node:
1604 1613 meta[b'node'] = node
1605 1614 if len(commit.get(b'parents', ())) >= 1:
1606 1615 meta[b'parent'] = commit[b'parents'][0]
1607 1616 else:
1608 1617 meta = {}
1609 1618 if b'date' not in meta and b'dateCreated' in diff:
1610 1619 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1611 1620 if b'branch' not in meta and diff.get(b'branch'):
1612 1621 meta[b'branch'] = diff[b'branch']
1613 1622 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1614 1623 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1615 1624 return meta
1616 1625
1617 1626
1618 1627 def readpatch(ui, drevs, write):
1619 1628 """generate plain-text patch readable by 'hg import'
1620 1629
1621 1630 write takes a list of (DREV, bytes), where DREV is the differential number
1622 1631 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1623 1632 to be imported. drevs is what "querydrev" returns, results of
1624 1633 "differential.query".
1625 1634 """
1626 1635 # Prefetch hg:meta property for all diffs
1627 1636 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1628 1637 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1629 1638
1630 1639 patches = []
1631 1640
1632 1641 # Generate patch for each drev
1633 1642 for drev in drevs:
1634 1643 ui.note(_(b'reading D%s\n') % drev[b'id'])
1635 1644
1636 1645 diffid = max(int(v) for v in drev[b'diffs'])
1637 1646 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1638 1647 desc = getdescfromdrev(drev)
1639 1648 header = b'# HG changeset patch\n'
1640 1649
1641 1650 # Try to preserve metadata from hg:meta property. Write hg patch
1642 1651 # headers that can be read by the "import" command. See patchheadermap
1643 1652 # and extract in mercurial/patch.py for supported headers.
1644 1653 meta = getdiffmeta(diffs[b'%d' % diffid])
1645 1654 for k in _metanamemap.keys():
1646 1655 if k in meta:
1647 1656 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1648 1657
1649 1658 content = b'%s%s\n%s' % (header, desc, body)
1650 1659 patches.append((drev[b'id'], content))
1651 1660
1652 1661 # Write patches to the supplied callback
1653 1662 write(patches)
1654 1663
1655 1664
1656 1665 @vcrcommand(
1657 1666 b'phabread',
1658 1667 [(b'', b'stack', False, _(b'read dependencies'))],
1659 1668 _(b'DREVSPEC [OPTIONS]'),
1660 1669 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1661 1670 optionalrepo=True,
1662 1671 )
1663 1672 def phabread(ui, repo, spec, **opts):
1664 1673 """print patches from Phabricator suitable for importing
1665 1674
1666 1675 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1667 1676 the number ``123``. It could also have common operators like ``+``, ``-``,
1668 1677 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1669 1678 select a stack.
1670 1679
1671 1680 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1672 1681 could be used to filter patches by status. For performance reason, they
1673 1682 only represent a subset of non-status selections and cannot be used alone.
1674 1683
1675 1684 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1676 1685 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1677 1686 stack up to D9.
1678 1687
1679 1688 If --stack is given, follow dependencies information and read all patches.
1680 1689 It is equivalent to the ``:`` operator.
1681 1690 """
1682 1691 opts = pycompat.byteskwargs(opts)
1683 1692 if opts.get(b'stack'):
1684 1693 spec = b':(%s)' % spec
1685 1694 drevs = querydrev(ui, spec)
1686 1695
1687 1696 def _write(patches):
1688 1697 for drev, content in patches:
1689 1698 ui.write(content)
1690 1699
1691 1700 readpatch(ui, drevs, _write)
1692 1701
1693 1702
1694 1703 @vcrcommand(
1695 1704 b'phabupdate',
1696 1705 [
1697 1706 (b'', b'accept', False, _(b'accept revisions')),
1698 1707 (b'', b'reject', False, _(b'reject revisions')),
1699 1708 (b'', b'abandon', False, _(b'abandon revisions')),
1700 1709 (b'', b'reclaim', False, _(b'reclaim revisions')),
1701 1710 (b'm', b'comment', b'', _(b'comment on the last revision')),
1702 1711 ],
1703 1712 _(b'DREVSPEC [OPTIONS]'),
1704 1713 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1705 1714 optionalrepo=True,
1706 1715 )
1707 1716 def phabupdate(ui, repo, spec, **opts):
1708 1717 """update Differential Revision in batch
1709 1718
1710 1719 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1711 1720 """
1712 1721 opts = pycompat.byteskwargs(opts)
1713 1722 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1714 1723 if len(flags) > 1:
1715 1724 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1716 1725
1717 1726 actions = []
1718 1727 for f in flags:
1719 1728 actions.append({b'type': f, b'value': True})
1720 1729
1721 1730 drevs = querydrev(ui, spec)
1722 1731 for i, drev in enumerate(drevs):
1723 1732 if i + 1 == len(drevs) and opts.get(b'comment'):
1724 1733 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1725 1734 if actions:
1726 1735 params = {
1727 1736 b'objectIdentifier': drev[b'phid'],
1728 1737 b'transactions': actions,
1729 1738 }
1730 1739 callconduit(ui, b'differential.revision.edit', params)
1731 1740
1732 1741
1733 1742 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1734 1743 def template_review(context, mapping):
1735 1744 """:phabreview: Object describing the review for this changeset.
1736 1745 Has attributes `url` and `id`.
1737 1746 """
1738 1747 ctx = context.resource(mapping, b'ctx')
1739 1748 m = _differentialrevisiondescre.search(ctx.description())
1740 1749 if m:
1741 1750 return templateutil.hybriddict(
1742 1751 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1743 1752 )
1744 1753 else:
1745 1754 tags = ctx.repo().nodetags(ctx.node())
1746 1755 for t in tags:
1747 1756 if _differentialrevisiontagre.match(t):
1748 1757 url = ctx.repo().ui.config(b'phabricator', b'url')
1749 1758 if not url.endswith(b'/'):
1750 1759 url += b'/'
1751 1760 url += t
1752 1761
1753 1762 return templateutil.hybriddict({b'url': url, b'id': t,})
1754 1763 return None
1755 1764
1756 1765
1757 1766 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1758 1767 def template_status(context, mapping):
1759 1768 """:phabstatus: String. Status of Phabricator differential.
1760 1769 """
1761 1770 ctx = context.resource(mapping, b'ctx')
1762 1771 repo = context.resource(mapping, b'repo')
1763 1772 ui = context.resource(mapping, b'ui')
1764 1773
1765 1774 rev = ctx.rev()
1766 1775 try:
1767 1776 drevid = getdrevmap(repo, [rev])[rev]
1768 1777 except KeyError:
1769 1778 return None
1770 1779 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1771 1780 for drev in drevs:
1772 1781 if int(drev[b'id']) == drevid:
1773 1782 return templateutil.hybriddict(
1774 1783 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1775 1784 )
1776 1785 return None
1777 1786
1778 1787
1779 1788 @show.showview(b'phabstatus', csettopic=b'work')
1780 1789 def phabstatusshowview(ui, repo, displayer):
1781 1790 """Phabricator differiential status"""
1782 1791 revs = repo.revs('sort(_underway(), topo)')
1783 1792 drevmap = getdrevmap(repo, revs)
1784 1793 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1785 1794 for rev, drevid in pycompat.iteritems(drevmap):
1786 1795 if drevid is not None:
1787 1796 drevids.add(drevid)
1788 1797 revsbydrevid.setdefault(drevid, set([])).add(rev)
1789 1798 else:
1790 1799 unknownrevs.append(rev)
1791 1800
1792 1801 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1793 1802 drevsbyrev = {}
1794 1803 for drev in drevs:
1795 1804 for rev in revsbydrevid[int(drev[b'id'])]:
1796 1805 drevsbyrev[rev] = drev
1797 1806
1798 1807 def phabstatus(ctx):
1799 1808 drev = drevsbyrev[ctx.rev()]
1800 1809 status = ui.label(
1801 1810 b'%(statusName)s' % drev,
1802 1811 b'phabricator.status.%s' % _getstatusname(drev),
1803 1812 )
1804 1813 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1805 1814
1806 1815 revs -= smartset.baseset(unknownrevs)
1807 1816 revdag = graphmod.dagwalker(repo, revs)
1808 1817
1809 1818 ui.setconfig(b'experimental', b'graphshorten', True)
1810 1819 displayer._exthook = phabstatus
1811 1820 nodelen = show.longestshortest(repo, revs)
1812 1821 logcmdutil.displaygraph(
1813 1822 ui,
1814 1823 repo,
1815 1824 revdag,
1816 1825 displayer,
1817 1826 graphmod.asciiedges,
1818 1827 props={b'nodelen': nodelen},
1819 1828 )
General Comments 0
You need to be logged in to leave comments. Login now