##// END OF EJS Templates
phabricator: add the DiffChangeType and DiffFileType constants...
Ian Moody -
r43452:a66e2844 default
parent child Browse files
Show More
@@ -1,1259 +1,1276
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import contextlib
45 45 import itertools
46 46 import json
47 47 import operator
48 48 import re
49 49
50 50 from mercurial.node import bin, nullid
51 51 from mercurial.i18n import _
52 52 from mercurial.pycompat import getattr
53 53 from mercurial import (
54 54 cmdutil,
55 55 context,
56 56 encoding,
57 57 error,
58 58 exthelper,
59 59 httpconnection as httpconnectionmod,
60 60 mdiff,
61 61 obsutil,
62 62 parser,
63 63 patch,
64 64 phases,
65 65 pycompat,
66 66 scmutil,
67 67 smartset,
68 68 tags,
69 69 templatefilters,
70 70 templateutil,
71 71 url as urlmod,
72 72 util,
73 73 )
74 74 from mercurial.utils import (
75 75 procutil,
76 76 stringutil,
77 77 )
78 78
79 79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 81 # be specifying the version(s) of Mercurial they are tested with, or
82 82 # leave the attribute unspecified.
83 83 testedwith = b'ships-with-hg-core'
84 84
85 85 eh = exthelper.exthelper()
86 86
87 87 cmdtable = eh.cmdtable
88 88 command = eh.command
89 89 configtable = eh.configtable
90 90 templatekeyword = eh.templatekeyword
91 91
92 92 # developer config: phabricator.batchsize
93 93 eh.configitem(
94 94 b'phabricator', b'batchsize', default=12,
95 95 )
96 96 eh.configitem(
97 97 b'phabricator', b'callsign', default=None,
98 98 )
99 99 eh.configitem(
100 100 b'phabricator', b'curlcmd', default=None,
101 101 )
102 102 # developer config: phabricator.repophid
103 103 eh.configitem(
104 104 b'phabricator', b'repophid', default=None,
105 105 )
106 106 eh.configitem(
107 107 b'phabricator', b'url', default=None,
108 108 )
109 109 eh.configitem(
110 110 b'phabsend', b'confirm', default=False,
111 111 )
112 112
113 113 colortable = {
114 114 b'phabricator.action.created': b'green',
115 115 b'phabricator.action.skipped': b'magenta',
116 116 b'phabricator.action.updated': b'magenta',
117 117 b'phabricator.desc': b'',
118 118 b'phabricator.drev': b'bold',
119 119 b'phabricator.node': b'',
120 120 }
121 121
122 122 _VCR_FLAGS = [
123 123 (
124 124 b'',
125 125 b'test-vcr',
126 126 b'',
127 127 _(
128 128 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 129 b', otherwise will mock all http requests using the specified vcr file.'
130 130 b' (ADVANCED)'
131 131 ),
132 132 ),
133 133 ]
134 134
135 135
136 136 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 137 fullflags = flags + _VCR_FLAGS
138 138
139 139 def hgmatcher(r1, r2):
140 140 if r1.uri != r2.uri or r1.method != r2.method:
141 141 return False
142 142 r1params = r1.body.split(b'&')
143 143 r2params = r2.body.split(b'&')
144 144 return set(r1params) == set(r2params)
145 145
146 146 def sanitiserequest(request):
147 147 request.body = re.sub(
148 148 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 149 )
150 150 return request
151 151
152 152 def sanitiseresponse(response):
153 153 if r'set-cookie' in response[r'headers']:
154 154 del response[r'headers'][r'set-cookie']
155 155 return response
156 156
157 157 def decorate(fn):
158 158 def inner(*args, **kwargs):
159 159 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 160 if cassette:
161 161 import hgdemandimport
162 162
163 163 with hgdemandimport.deactivated():
164 164 import vcr as vcrmod
165 165 import vcr.stubs as stubs
166 166
167 167 vcr = vcrmod.VCR(
168 168 serializer=r'json',
169 169 before_record_request=sanitiserequest,
170 170 before_record_response=sanitiseresponse,
171 171 custom_patches=[
172 172 (
173 173 urlmod,
174 174 r'httpconnection',
175 175 stubs.VCRHTTPConnection,
176 176 ),
177 177 (
178 178 urlmod,
179 179 r'httpsconnection',
180 180 stubs.VCRHTTPSConnection,
181 181 ),
182 182 ],
183 183 )
184 184 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 185 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 186 return fn(*args, **kwargs)
187 187 return fn(*args, **kwargs)
188 188
189 189 inner.__name__ = fn.__name__
190 190 inner.__doc__ = fn.__doc__
191 191 return command(
192 192 name,
193 193 fullflags,
194 194 spec,
195 195 helpcategory=helpcategory,
196 196 optionalrepo=optionalrepo,
197 197 )(inner)
198 198
199 199 return decorate
200 200
201 201
202 202 def urlencodenested(params):
203 203 """like urlencode, but works with nested parameters.
204 204
205 205 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 206 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 207 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 208 """
209 209 flatparams = util.sortdict()
210 210
211 211 def process(prefix, obj):
212 212 if isinstance(obj, bool):
213 213 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 214 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 215 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 216 if items is None:
217 217 flatparams[prefix] = obj
218 218 else:
219 219 for k, v in items(obj):
220 220 if prefix:
221 221 process(b'%s[%s]' % (prefix, k), v)
222 222 else:
223 223 process(k, v)
224 224
225 225 process(b'', params)
226 226 return util.urlreq.urlencode(flatparams)
227 227
228 228
229 229 def readurltoken(ui):
230 230 """return conduit url, token and make sure they exist
231 231
232 232 Currently read from [auth] config section. In the future, it might
233 233 make sense to read from .arcconfig and .arcrc as well.
234 234 """
235 235 url = ui.config(b'phabricator', b'url')
236 236 if not url:
237 237 raise error.Abort(
238 238 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 239 )
240 240
241 241 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 242 token = None
243 243
244 244 if res:
245 245 group, auth = res
246 246
247 247 ui.debug(b"using auth.%s.* for authentication\n" % group)
248 248
249 249 token = auth.get(b'phabtoken')
250 250
251 251 if not token:
252 252 raise error.Abort(
253 253 _(b'Can\'t find conduit token associated to %s') % (url,)
254 254 )
255 255
256 256 return url, token
257 257
258 258
259 259 def callconduit(ui, name, params):
260 260 """call Conduit API, params is a dict. return json.loads result, or None"""
261 261 host, token = readurltoken(ui)
262 262 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 263 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 264 params = params.copy()
265 265 params[b'api.token'] = token
266 266 data = urlencodenested(params)
267 267 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 268 if curlcmd:
269 269 sin, sout = procutil.popen2(
270 270 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 271 )
272 272 sin.write(data)
273 273 sin.close()
274 274 body = sout.read()
275 275 else:
276 276 urlopener = urlmod.opener(ui, authinfo)
277 277 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 278 with contextlib.closing(urlopener.open(request)) as rsp:
279 279 body = rsp.read()
280 280 ui.debug(b'Conduit Response: %s\n' % body)
281 281 parsed = pycompat.rapply(
282 282 lambda x: encoding.unitolocal(x)
283 283 if isinstance(x, pycompat.unicode)
284 284 else x,
285 285 # json.loads only accepts bytes from py3.6+
286 286 json.loads(encoding.unifromlocal(body)),
287 287 )
288 288 if parsed.get(b'error_code'):
289 289 msg = _(b'Conduit Error (%s): %s') % (
290 290 parsed[b'error_code'],
291 291 parsed[b'error_info'],
292 292 )
293 293 raise error.Abort(msg)
294 294 return parsed[b'result']
295 295
296 296
297 297 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 298 def debugcallconduit(ui, repo, name):
299 299 """call Conduit API
300 300
301 301 Call parameters are read from stdin as a JSON blob. Result will be written
302 302 to stdout as a JSON blob.
303 303 """
304 304 # json.loads only accepts bytes from 3.6+
305 305 rawparams = encoding.unifromlocal(ui.fin.read())
306 306 # json.loads only returns unicode strings
307 307 params = pycompat.rapply(
308 308 lambda x: encoding.unitolocal(x)
309 309 if isinstance(x, pycompat.unicode)
310 310 else x,
311 311 json.loads(rawparams),
312 312 )
313 313 # json.dumps only accepts unicode strings
314 314 result = pycompat.rapply(
315 315 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 316 callconduit(ui, name, params),
317 317 )
318 318 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 319 ui.write(b'%s\n' % encoding.unitolocal(s))
320 320
321 321
322 322 def getrepophid(repo):
323 323 """given callsign, return repository PHID or None"""
324 324 # developer config: phabricator.repophid
325 325 repophid = repo.ui.config(b'phabricator', b'repophid')
326 326 if repophid:
327 327 return repophid
328 328 callsign = repo.ui.config(b'phabricator', b'callsign')
329 329 if not callsign:
330 330 return None
331 331 query = callconduit(
332 332 repo.ui,
333 333 b'diffusion.repository.search',
334 334 {b'constraints': {b'callsigns': [callsign]}},
335 335 )
336 336 if len(query[b'data']) == 0:
337 337 return None
338 338 repophid = query[b'data'][0][b'phid']
339 339 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 340 return repophid
341 341
342 342
343 343 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 344 _differentialrevisiondescre = re.compile(
345 345 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 346 )
347 347
348 348
349 349 def getoldnodedrevmap(repo, nodelist):
350 350 """find previous nodes that has been sent to Phabricator
351 351
352 352 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 353 for node in nodelist with known previous sent versions, or associated
354 354 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 355 be ``None``.
356 356
357 357 Examines commit messages like "Differential Revision:" to get the
358 358 association information.
359 359
360 360 If such commit message line is not found, examines all precursors and their
361 361 tags. Tags with format like "D1234" are considered a match and the node
362 362 with that tag, and the number after "D" (ex. 1234) will be returned.
363 363
364 364 The ``old node``, if not None, is guaranteed to be the last diff of
365 365 corresponding Differential Revision, and exist in the repo.
366 366 """
367 367 unfi = repo.unfiltered()
368 368 nodemap = unfi.changelog.nodemap
369 369
370 370 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 371 toconfirm = {} # {node: (force, {precnode}, drev)}
372 372 for node in nodelist:
373 373 ctx = unfi[node]
374 374 # For tags like "D123", put them into "toconfirm" to verify later
375 375 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 376 for n in precnodes:
377 377 if n in nodemap:
378 378 for tag in unfi.nodetags(n):
379 379 m = _differentialrevisiontagre.match(tag)
380 380 if m:
381 381 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 382 continue
383 383
384 384 # Check commit message
385 385 m = _differentialrevisiondescre.search(ctx.description())
386 386 if m:
387 387 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388 388
389 389 # Double check if tags are genuine by collecting all old nodes from
390 390 # Phabricator, and expect precursors overlap with it.
391 391 if toconfirm:
392 392 drevs = [drev for force, precs, drev in toconfirm.values()]
393 393 alldiffs = callconduit(
394 394 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 395 )
396 396 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 397 for newnode, (force, precset, drev) in toconfirm.items():
398 398 diffs = [
399 399 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 400 ]
401 401
402 402 # "precursors" as known by Phabricator
403 403 phprecset = set(getnode(d) for d in diffs)
404 404
405 405 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 406 # and force is not set (when commit message says nothing)
407 407 if not force and not bool(phprecset & precset):
408 408 tagname = b'D%d' % drev
409 409 tags.tag(
410 410 repo,
411 411 tagname,
412 412 nullid,
413 413 message=None,
414 414 user=None,
415 415 date=None,
416 416 local=True,
417 417 )
418 418 unfi.ui.warn(
419 419 _(
420 420 b'D%s: local tag removed - does not match '
421 421 b'Differential history\n'
422 422 )
423 423 % drev
424 424 )
425 425 continue
426 426
427 427 # Find the last node using Phabricator metadata, and make sure it
428 428 # exists in the repo
429 429 oldnode = lastdiff = None
430 430 if diffs:
431 431 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 432 oldnode = getnode(lastdiff)
433 433 if oldnode and oldnode not in nodemap:
434 434 oldnode = None
435 435
436 436 result[newnode] = (oldnode, lastdiff, drev)
437 437
438 438 return result
439 439
440 440
441 441 def getdiff(ctx, diffopts):
442 442 """plain-text diff without header (user, commit message, etc)"""
443 443 output = util.stringio()
444 444 for chunk, _label in patch.diffui(
445 445 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 446 ):
447 447 output.write(chunk)
448 448 return output.getvalue()
449 449
450 450
451 class DiffChangeType(object):
452 ADD = 1
453 CHANGE = 2
454 DELETE = 3
455 MOVE_AWAY = 4
456 COPY_AWAY = 5
457 MOVE_HERE = 6
458 COPY_HERE = 7
459 MULTICOPY = 8
460
461
462 class DiffFileType(object):
463 TEXT = 1
464 IMAGE = 2
465 BINARY = 3
466
467
451 468 def creatediff(ctx):
452 469 """create a Differential Diff"""
453 470 repo = ctx.repo()
454 471 repophid = getrepophid(repo)
455 472 # Create a "Differential Diff" via "differential.createrawdiff" API
456 473 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
457 474 if repophid:
458 475 params[b'repositoryPHID'] = repophid
459 476 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
460 477 if not diff:
461 478 raise error.Abort(_(b'cannot create diff for %s') % ctx)
462 479 return diff
463 480
464 481
465 482 def writediffproperties(ctx, diff):
466 483 """write metadata to diff so patches could be applied losslessly"""
467 484 params = {
468 485 b'diff_id': diff[b'id'],
469 486 b'name': b'hg:meta',
470 487 b'data': templatefilters.json(
471 488 {
472 489 b'user': ctx.user(),
473 490 b'date': b'%d %d' % ctx.date(),
474 491 b'branch': ctx.branch(),
475 492 b'node': ctx.hex(),
476 493 b'parent': ctx.p1().hex(),
477 494 }
478 495 ),
479 496 }
480 497 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
481 498
482 499 params = {
483 500 b'diff_id': diff[b'id'],
484 501 b'name': b'local:commits',
485 502 b'data': templatefilters.json(
486 503 {
487 504 ctx.hex(): {
488 505 b'author': stringutil.person(ctx.user()),
489 506 b'authorEmail': stringutil.email(ctx.user()),
490 507 b'time': int(ctx.date()[0]),
491 508 b'commit': ctx.hex(),
492 509 b'parents': [ctx.p1().hex()],
493 510 b'branch': ctx.branch(),
494 511 },
495 512 }
496 513 ),
497 514 }
498 515 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
499 516
500 517
501 518 def createdifferentialrevision(
502 519 ctx,
503 520 revid=None,
504 521 parentrevphid=None,
505 522 oldnode=None,
506 523 olddiff=None,
507 524 actions=None,
508 525 comment=None,
509 526 ):
510 527 """create or update a Differential Revision
511 528
512 529 If revid is None, create a new Differential Revision, otherwise update
513 530 revid. If parentrevphid is not None, set it as a dependency.
514 531
515 532 If oldnode is not None, check if the patch content (without commit message
516 533 and metadata) has changed before creating another diff.
517 534
518 535 If actions is not None, they will be appended to the transaction.
519 536 """
520 537 repo = ctx.repo()
521 538 if oldnode:
522 539 diffopts = mdiff.diffopts(git=True, context=32767)
523 540 oldctx = repo.unfiltered()[oldnode]
524 541 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
525 542 else:
526 543 neednewdiff = True
527 544
528 545 transactions = []
529 546 if neednewdiff:
530 547 diff = creatediff(ctx)
531 548 transactions.append({b'type': b'update', b'value': diff[b'phid']})
532 549 if comment:
533 550 transactions.append({b'type': b'comment', b'value': comment})
534 551 else:
535 552 # Even if we don't need to upload a new diff because the patch content
536 553 # does not change. We might still need to update its metadata so
537 554 # pushers could know the correct node metadata.
538 555 assert olddiff
539 556 diff = olddiff
540 557 writediffproperties(ctx, diff)
541 558
542 559 # Set the parent Revision every time, so commit re-ordering is picked-up
543 560 if parentrevphid:
544 561 transactions.append(
545 562 {b'type': b'parents.set', b'value': [parentrevphid]}
546 563 )
547 564
548 565 if actions:
549 566 transactions += actions
550 567
551 568 # Parse commit message and update related fields.
552 569 desc = ctx.description()
553 570 info = callconduit(
554 571 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
555 572 )
556 573 for k, v in info[b'fields'].items():
557 574 if k in [b'title', b'summary', b'testPlan']:
558 575 transactions.append({b'type': k, b'value': v})
559 576
560 577 params = {b'transactions': transactions}
561 578 if revid is not None:
562 579 # Update an existing Differential Revision
563 580 params[b'objectIdentifier'] = revid
564 581
565 582 revision = callconduit(repo.ui, b'differential.revision.edit', params)
566 583 if not revision:
567 584 raise error.Abort(_(b'cannot create revision for %s') % ctx)
568 585
569 586 return revision, diff
570 587
571 588
572 589 def userphids(repo, names):
573 590 """convert user names to PHIDs"""
574 591 names = [name.lower() for name in names]
575 592 query = {b'constraints': {b'usernames': names}}
576 593 result = callconduit(repo.ui, b'user.search', query)
577 594 # username not found is not an error of the API. So check if we have missed
578 595 # some names here.
579 596 data = result[b'data']
580 597 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
581 598 unresolved = set(names) - resolved
582 599 if unresolved:
583 600 raise error.Abort(
584 601 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
585 602 )
586 603 return [entry[b'phid'] for entry in data]
587 604
588 605
589 606 @vcrcommand(
590 607 b'phabsend',
591 608 [
592 609 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
593 610 (b'', b'amend', True, _(b'update commit messages')),
594 611 (b'', b'reviewer', [], _(b'specify reviewers')),
595 612 (b'', b'blocker', [], _(b'specify blocking reviewers')),
596 613 (
597 614 b'm',
598 615 b'comment',
599 616 b'',
600 617 _(b'add a comment to Revisions with new/updated Diffs'),
601 618 ),
602 619 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
603 620 ],
604 621 _(b'REV [OPTIONS]'),
605 622 helpcategory=command.CATEGORY_IMPORT_EXPORT,
606 623 )
607 624 def phabsend(ui, repo, *revs, **opts):
608 625 """upload changesets to Phabricator
609 626
610 627 If there are multiple revisions specified, they will be send as a stack
611 628 with a linear dependencies relationship using the order specified by the
612 629 revset.
613 630
614 631 For the first time uploading changesets, local tags will be created to
615 632 maintain the association. After the first time, phabsend will check
616 633 obsstore and tags information so it can figure out whether to update an
617 634 existing Differential Revision, or create a new one.
618 635
619 636 If --amend is set, update commit messages so they have the
620 637 ``Differential Revision`` URL, remove related tags. This is similar to what
621 638 arcanist will do, and is more desired in author-push workflows. Otherwise,
622 639 use local tags to record the ``Differential Revision`` association.
623 640
624 641 The --confirm option lets you confirm changesets before sending them. You
625 642 can also add following to your configuration file to make it default
626 643 behaviour::
627 644
628 645 [phabsend]
629 646 confirm = true
630 647
631 648 phabsend will check obsstore and the above association to decide whether to
632 649 update an existing Differential Revision, or create a new one.
633 650 """
634 651 opts = pycompat.byteskwargs(opts)
635 652 revs = list(revs) + opts.get(b'rev', [])
636 653 revs = scmutil.revrange(repo, revs)
637 654
638 655 if not revs:
639 656 raise error.Abort(_(b'phabsend requires at least one changeset'))
640 657 if opts.get(b'amend'):
641 658 cmdutil.checkunfinished(repo)
642 659
643 660 # {newnode: (oldnode, olddiff, olddrev}
644 661 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
645 662
646 663 confirm = ui.configbool(b'phabsend', b'confirm')
647 664 confirm |= bool(opts.get(b'confirm'))
648 665 if confirm:
649 666 confirmed = _confirmbeforesend(repo, revs, oldmap)
650 667 if not confirmed:
651 668 raise error.Abort(_(b'phabsend cancelled'))
652 669
653 670 actions = []
654 671 reviewers = opts.get(b'reviewer', [])
655 672 blockers = opts.get(b'blocker', [])
656 673 phids = []
657 674 if reviewers:
658 675 phids.extend(userphids(repo, reviewers))
659 676 if blockers:
660 677 phids.extend(
661 678 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
662 679 )
663 680 if phids:
664 681 actions.append({b'type': b'reviewers.add', b'value': phids})
665 682
666 683 drevids = [] # [int]
667 684 diffmap = {} # {newnode: diff}
668 685
669 686 # Send patches one by one so we know their Differential Revision PHIDs and
670 687 # can provide dependency relationship
671 688 lastrevphid = None
672 689 for rev in revs:
673 690 ui.debug(b'sending rev %d\n' % rev)
674 691 ctx = repo[rev]
675 692
676 693 # Get Differential Revision ID
677 694 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
678 695 if oldnode != ctx.node() or opts.get(b'amend'):
679 696 # Create or update Differential Revision
680 697 revision, diff = createdifferentialrevision(
681 698 ctx,
682 699 revid,
683 700 lastrevphid,
684 701 oldnode,
685 702 olddiff,
686 703 actions,
687 704 opts.get(b'comment'),
688 705 )
689 706 diffmap[ctx.node()] = diff
690 707 newrevid = int(revision[b'object'][b'id'])
691 708 newrevphid = revision[b'object'][b'phid']
692 709 if revid:
693 710 action = b'updated'
694 711 else:
695 712 action = b'created'
696 713
697 714 # Create a local tag to note the association, if commit message
698 715 # does not have it already
699 716 m = _differentialrevisiondescre.search(ctx.description())
700 717 if not m or int(m.group(r'id')) != newrevid:
701 718 tagname = b'D%d' % newrevid
702 719 tags.tag(
703 720 repo,
704 721 tagname,
705 722 ctx.node(),
706 723 message=None,
707 724 user=None,
708 725 date=None,
709 726 local=True,
710 727 )
711 728 else:
712 729 # Nothing changed. But still set "newrevphid" so the next revision
713 730 # could depend on this one and "newrevid" for the summary line.
714 731 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
715 732 newrevid = revid
716 733 action = b'skipped'
717 734
718 735 actiondesc = ui.label(
719 736 {
720 737 b'created': _(b'created'),
721 738 b'skipped': _(b'skipped'),
722 739 b'updated': _(b'updated'),
723 740 }[action],
724 741 b'phabricator.action.%s' % action,
725 742 )
726 743 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
727 744 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
728 745 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
729 746 ui.write(
730 747 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
731 748 )
732 749 drevids.append(newrevid)
733 750 lastrevphid = newrevphid
734 751
735 752 # Update commit messages and remove tags
736 753 if opts.get(b'amend'):
737 754 unfi = repo.unfiltered()
738 755 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
739 756 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
740 757 wnode = unfi[b'.'].node()
741 758 mapping = {} # {oldnode: [newnode]}
742 759 for i, rev in enumerate(revs):
743 760 old = unfi[rev]
744 761 drevid = drevids[i]
745 762 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
746 763 newdesc = getdescfromdrev(drev)
747 764 # Make sure commit message contain "Differential Revision"
748 765 if old.description() != newdesc:
749 766 if old.phase() == phases.public:
750 767 ui.warn(
751 768 _(b"warning: not updating public commit %s\n")
752 769 % scmutil.formatchangeid(old)
753 770 )
754 771 continue
755 772 parents = [
756 773 mapping.get(old.p1().node(), (old.p1(),))[0],
757 774 mapping.get(old.p2().node(), (old.p2(),))[0],
758 775 ]
759 776 new = context.metadataonlyctx(
760 777 repo,
761 778 old,
762 779 parents=parents,
763 780 text=newdesc,
764 781 user=old.user(),
765 782 date=old.date(),
766 783 extra=old.extra(),
767 784 )
768 785
769 786 newnode = new.commit()
770 787
771 788 mapping[old.node()] = [newnode]
772 789 # Update diff property
773 790 # If it fails just warn and keep going, otherwise the DREV
774 791 # associations will be lost
775 792 try:
776 793 writediffproperties(unfi[newnode], diffmap[old.node()])
777 794 except util.urlerr.urlerror:
778 795 ui.warnnoi18n(
779 796 b'Failed to update metadata for D%s\n' % drevid
780 797 )
781 798 # Remove local tags since it's no longer necessary
782 799 tagname = b'D%d' % drevid
783 800 if tagname in repo.tags():
784 801 tags.tag(
785 802 repo,
786 803 tagname,
787 804 nullid,
788 805 message=None,
789 806 user=None,
790 807 date=None,
791 808 local=True,
792 809 )
793 810 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
794 811 if wnode in mapping:
795 812 unfi.setparents(mapping[wnode][0])
796 813
797 814
798 815 # Map from "hg:meta" keys to header understood by "hg import". The order is
799 816 # consistent with "hg export" output.
800 817 _metanamemap = util.sortdict(
801 818 [
802 819 (b'user', b'User'),
803 820 (b'date', b'Date'),
804 821 (b'branch', b'Branch'),
805 822 (b'node', b'Node ID'),
806 823 (b'parent', b'Parent '),
807 824 ]
808 825 )
809 826
810 827
811 828 def _confirmbeforesend(repo, revs, oldmap):
812 829 url, token = readurltoken(repo.ui)
813 830 ui = repo.ui
814 831 for rev in revs:
815 832 ctx = repo[rev]
816 833 desc = ctx.description().splitlines()[0]
817 834 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
818 835 if drevid:
819 836 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
820 837 else:
821 838 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
822 839
823 840 ui.write(
824 841 _(b'%s - %s: %s\n')
825 842 % (
826 843 drevdesc,
827 844 ui.label(bytes(ctx), b'phabricator.node'),
828 845 ui.label(desc, b'phabricator.desc'),
829 846 )
830 847 )
831 848
832 849 if ui.promptchoice(
833 850 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
834 851 ):
835 852 return False
836 853
837 854 return True
838 855
839 856
840 857 _knownstatusnames = {
841 858 b'accepted',
842 859 b'needsreview',
843 860 b'needsrevision',
844 861 b'closed',
845 862 b'abandoned',
846 863 }
847 864
848 865
849 866 def _getstatusname(drev):
850 867 """get normalized status name from a Differential Revision"""
851 868 return drev[b'statusName'].replace(b' ', b'').lower()
852 869
853 870
854 871 # Small language to specify differential revisions. Support symbols: (), :X,
855 872 # +, and -.
856 873
857 874 _elements = {
858 875 # token-type: binding-strength, primary, prefix, infix, suffix
859 876 b'(': (12, None, (b'group', 1, b')'), None, None),
860 877 b':': (8, None, (b'ancestors', 8), None, None),
861 878 b'&': (5, None, None, (b'and_', 5), None),
862 879 b'+': (4, None, None, (b'add', 4), None),
863 880 b'-': (4, None, None, (b'sub', 4), None),
864 881 b')': (0, None, None, None, None),
865 882 b'symbol': (0, b'symbol', None, None, None),
866 883 b'end': (0, None, None, None, None),
867 884 }
868 885
869 886
870 887 def _tokenize(text):
871 888 view = memoryview(text) # zero-copy slice
872 889 special = b'():+-& '
873 890 pos = 0
874 891 length = len(text)
875 892 while pos < length:
876 893 symbol = b''.join(
877 894 itertools.takewhile(
878 895 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
879 896 )
880 897 )
881 898 if symbol:
882 899 yield (b'symbol', symbol, pos)
883 900 pos += len(symbol)
884 901 else: # special char, ignore space
885 902 if text[pos] != b' ':
886 903 yield (text[pos], None, pos)
887 904 pos += 1
888 905 yield (b'end', None, pos)
889 906
890 907
891 908 def _parse(text):
892 909 tree, pos = parser.parser(_elements).parse(_tokenize(text))
893 910 if pos != len(text):
894 911 raise error.ParseError(b'invalid token', pos)
895 912 return tree
896 913
897 914
898 915 def _parsedrev(symbol):
899 916 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
900 917 if symbol.startswith(b'D') and symbol[1:].isdigit():
901 918 return int(symbol[1:])
902 919 if symbol.isdigit():
903 920 return int(symbol)
904 921
905 922
906 923 def _prefetchdrevs(tree):
907 924 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
908 925 drevs = set()
909 926 ancestordrevs = set()
910 927 op = tree[0]
911 928 if op == b'symbol':
912 929 r = _parsedrev(tree[1])
913 930 if r:
914 931 drevs.add(r)
915 932 elif op == b'ancestors':
916 933 r, a = _prefetchdrevs(tree[1])
917 934 drevs.update(r)
918 935 ancestordrevs.update(r)
919 936 ancestordrevs.update(a)
920 937 else:
921 938 for t in tree[1:]:
922 939 r, a = _prefetchdrevs(t)
923 940 drevs.update(r)
924 941 ancestordrevs.update(a)
925 942 return drevs, ancestordrevs
926 943
927 944
928 945 def querydrev(repo, spec):
929 946 """return a list of "Differential Revision" dicts
930 947
931 948 spec is a string using a simple query language, see docstring in phabread
932 949 for details.
933 950
934 951 A "Differential Revision dict" looks like:
935 952
936 953 {
937 954 "id": "2",
938 955 "phid": "PHID-DREV-672qvysjcczopag46qty",
939 956 "title": "example",
940 957 "uri": "https://phab.example.com/D2",
941 958 "dateCreated": "1499181406",
942 959 "dateModified": "1499182103",
943 960 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
944 961 "status": "0",
945 962 "statusName": "Needs Review",
946 963 "properties": [],
947 964 "branch": null,
948 965 "summary": "",
949 966 "testPlan": "",
950 967 "lineCount": "2",
951 968 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
952 969 "diffs": [
953 970 "3",
954 971 "4",
955 972 ],
956 973 "commits": [],
957 974 "reviewers": [],
958 975 "ccs": [],
959 976 "hashes": [],
960 977 "auxiliary": {
961 978 "phabricator:projects": [],
962 979 "phabricator:depends-on": [
963 980 "PHID-DREV-gbapp366kutjebt7agcd"
964 981 ]
965 982 },
966 983 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
967 984 "sourcePath": null
968 985 }
969 986 """
970 987
971 988 def fetch(params):
972 989 """params -> single drev or None"""
973 990 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
974 991 if key in prefetched:
975 992 return prefetched[key]
976 993 drevs = callconduit(repo.ui, b'differential.query', params)
977 994 # Fill prefetched with the result
978 995 for drev in drevs:
979 996 prefetched[drev[b'phid']] = drev
980 997 prefetched[int(drev[b'id'])] = drev
981 998 if key not in prefetched:
982 999 raise error.Abort(
983 1000 _(b'cannot get Differential Revision %r') % params
984 1001 )
985 1002 return prefetched[key]
986 1003
987 1004 def getstack(topdrevids):
988 1005 """given a top, get a stack from the bottom, [id] -> [id]"""
989 1006 visited = set()
990 1007 result = []
991 1008 queue = [{b'ids': [i]} for i in topdrevids]
992 1009 while queue:
993 1010 params = queue.pop()
994 1011 drev = fetch(params)
995 1012 if drev[b'id'] in visited:
996 1013 continue
997 1014 visited.add(drev[b'id'])
998 1015 result.append(int(drev[b'id']))
999 1016 auxiliary = drev.get(b'auxiliary', {})
1000 1017 depends = auxiliary.get(b'phabricator:depends-on', [])
1001 1018 for phid in depends:
1002 1019 queue.append({b'phids': [phid]})
1003 1020 result.reverse()
1004 1021 return smartset.baseset(result)
1005 1022
1006 1023 # Initialize prefetch cache
1007 1024 prefetched = {} # {id or phid: drev}
1008 1025
1009 1026 tree = _parse(spec)
1010 1027 drevs, ancestordrevs = _prefetchdrevs(tree)
1011 1028
1012 1029 # developer config: phabricator.batchsize
1013 1030 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1014 1031
1015 1032 # Prefetch Differential Revisions in batch
1016 1033 tofetch = set(drevs)
1017 1034 for r in ancestordrevs:
1018 1035 tofetch.update(range(max(1, r - batchsize), r + 1))
1019 1036 if drevs:
1020 1037 fetch({b'ids': list(tofetch)})
1021 1038 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1022 1039
1023 1040 # Walk through the tree, return smartsets
1024 1041 def walk(tree):
1025 1042 op = tree[0]
1026 1043 if op == b'symbol':
1027 1044 drev = _parsedrev(tree[1])
1028 1045 if drev:
1029 1046 return smartset.baseset([drev])
1030 1047 elif tree[1] in _knownstatusnames:
1031 1048 drevs = [
1032 1049 r
1033 1050 for r in validids
1034 1051 if _getstatusname(prefetched[r]) == tree[1]
1035 1052 ]
1036 1053 return smartset.baseset(drevs)
1037 1054 else:
1038 1055 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1039 1056 elif op in {b'and_', b'add', b'sub'}:
1040 1057 assert len(tree) == 3
1041 1058 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1042 1059 elif op == b'group':
1043 1060 return walk(tree[1])
1044 1061 elif op == b'ancestors':
1045 1062 return getstack(walk(tree[1]))
1046 1063 else:
1047 1064 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1048 1065
1049 1066 return [prefetched[r] for r in walk(tree)]
1050 1067
1051 1068
1052 1069 def getdescfromdrev(drev):
1053 1070 """get description (commit message) from "Differential Revision"
1054 1071
1055 1072 This is similar to differential.getcommitmessage API. But we only care
1056 1073 about limited fields: title, summary, test plan, and URL.
1057 1074 """
1058 1075 title = drev[b'title']
1059 1076 summary = drev[b'summary'].rstrip()
1060 1077 testplan = drev[b'testPlan'].rstrip()
1061 1078 if testplan:
1062 1079 testplan = b'Test Plan:\n%s' % testplan
1063 1080 uri = b'Differential Revision: %s' % drev[b'uri']
1064 1081 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1065 1082
1066 1083
1067 1084 def getdiffmeta(diff):
1068 1085 """get commit metadata (date, node, user, p1) from a diff object
1069 1086
1070 1087 The metadata could be "hg:meta", sent by phabsend, like:
1071 1088
1072 1089 "properties": {
1073 1090 "hg:meta": {
1074 1091 "date": "1499571514 25200",
1075 1092 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1076 1093 "user": "Foo Bar <foo@example.com>",
1077 1094 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1078 1095 }
1079 1096 }
1080 1097
1081 1098 Or converted from "local:commits", sent by "arc", like:
1082 1099
1083 1100 "properties": {
1084 1101 "local:commits": {
1085 1102 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1086 1103 "author": "Foo Bar",
1087 1104 "time": 1499546314,
1088 1105 "branch": "default",
1089 1106 "tag": "",
1090 1107 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1091 1108 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1092 1109 "local": "1000",
1093 1110 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1094 1111 "summary": "...",
1095 1112 "message": "...",
1096 1113 "authorEmail": "foo@example.com"
1097 1114 }
1098 1115 }
1099 1116 }
1100 1117
1101 1118 Note: metadata extracted from "local:commits" will lose time zone
1102 1119 information.
1103 1120 """
1104 1121 props = diff.get(b'properties') or {}
1105 1122 meta = props.get(b'hg:meta')
1106 1123 if not meta:
1107 1124 if props.get(b'local:commits'):
1108 1125 commit = sorted(props[b'local:commits'].values())[0]
1109 1126 meta = {}
1110 1127 if b'author' in commit and b'authorEmail' in commit:
1111 1128 meta[b'user'] = b'%s <%s>' % (
1112 1129 commit[b'author'],
1113 1130 commit[b'authorEmail'],
1114 1131 )
1115 1132 if b'time' in commit:
1116 1133 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1117 1134 if b'branch' in commit:
1118 1135 meta[b'branch'] = commit[b'branch']
1119 1136 node = commit.get(b'commit', commit.get(b'rev'))
1120 1137 if node:
1121 1138 meta[b'node'] = node
1122 1139 if len(commit.get(b'parents', ())) >= 1:
1123 1140 meta[b'parent'] = commit[b'parents'][0]
1124 1141 else:
1125 1142 meta = {}
1126 1143 if b'date' not in meta and b'dateCreated' in diff:
1127 1144 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1128 1145 if b'branch' not in meta and diff.get(b'branch'):
1129 1146 meta[b'branch'] = diff[b'branch']
1130 1147 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1131 1148 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1132 1149 return meta
1133 1150
1134 1151
1135 1152 def readpatch(repo, drevs, write):
1136 1153 """generate plain-text patch readable by 'hg import'
1137 1154
1138 1155 write is usually ui.write. drevs is what "querydrev" returns, results of
1139 1156 "differential.query".
1140 1157 """
1141 1158 # Prefetch hg:meta property for all diffs
1142 1159 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1143 1160 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1144 1161
1145 1162 # Generate patch for each drev
1146 1163 for drev in drevs:
1147 1164 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1148 1165
1149 1166 diffid = max(int(v) for v in drev[b'diffs'])
1150 1167 body = callconduit(
1151 1168 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1152 1169 )
1153 1170 desc = getdescfromdrev(drev)
1154 1171 header = b'# HG changeset patch\n'
1155 1172
1156 1173 # Try to preserve metadata from hg:meta property. Write hg patch
1157 1174 # headers that can be read by the "import" command. See patchheadermap
1158 1175 # and extract in mercurial/patch.py for supported headers.
1159 1176 meta = getdiffmeta(diffs[b'%d' % diffid])
1160 1177 for k in _metanamemap.keys():
1161 1178 if k in meta:
1162 1179 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1163 1180
1164 1181 content = b'%s%s\n%s' % (header, desc, body)
1165 1182 write(content)
1166 1183
1167 1184
1168 1185 @vcrcommand(
1169 1186 b'phabread',
1170 1187 [(b'', b'stack', False, _(b'read dependencies'))],
1171 1188 _(b'DREVSPEC [OPTIONS]'),
1172 1189 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1173 1190 )
1174 1191 def phabread(ui, repo, spec, **opts):
1175 1192 """print patches from Phabricator suitable for importing
1176 1193
1177 1194 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1178 1195 the number ``123``. It could also have common operators like ``+``, ``-``,
1179 1196 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1180 1197 select a stack.
1181 1198
1182 1199 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1183 1200 could be used to filter patches by status. For performance reason, they
1184 1201 only represent a subset of non-status selections and cannot be used alone.
1185 1202
1186 1203 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1187 1204 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1188 1205 stack up to D9.
1189 1206
1190 1207 If --stack is given, follow dependencies information and read all patches.
1191 1208 It is equivalent to the ``:`` operator.
1192 1209 """
1193 1210 opts = pycompat.byteskwargs(opts)
1194 1211 if opts.get(b'stack'):
1195 1212 spec = b':(%s)' % spec
1196 1213 drevs = querydrev(repo, spec)
1197 1214 readpatch(repo, drevs, ui.write)
1198 1215
1199 1216
1200 1217 @vcrcommand(
1201 1218 b'phabupdate',
1202 1219 [
1203 1220 (b'', b'accept', False, _(b'accept revisions')),
1204 1221 (b'', b'reject', False, _(b'reject revisions')),
1205 1222 (b'', b'abandon', False, _(b'abandon revisions')),
1206 1223 (b'', b'reclaim', False, _(b'reclaim revisions')),
1207 1224 (b'm', b'comment', b'', _(b'comment on the last revision')),
1208 1225 ],
1209 1226 _(b'DREVSPEC [OPTIONS]'),
1210 1227 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1211 1228 )
1212 1229 def phabupdate(ui, repo, spec, **opts):
1213 1230 """update Differential Revision in batch
1214 1231
1215 1232 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1216 1233 """
1217 1234 opts = pycompat.byteskwargs(opts)
1218 1235 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1219 1236 if len(flags) > 1:
1220 1237 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1221 1238
1222 1239 actions = []
1223 1240 for f in flags:
1224 1241 actions.append({b'type': f, b'value': b'true'})
1225 1242
1226 1243 drevs = querydrev(repo, spec)
1227 1244 for i, drev in enumerate(drevs):
1228 1245 if i + 1 == len(drevs) and opts.get(b'comment'):
1229 1246 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1230 1247 if actions:
1231 1248 params = {
1232 1249 b'objectIdentifier': drev[b'phid'],
1233 1250 b'transactions': actions,
1234 1251 }
1235 1252 callconduit(ui, b'differential.revision.edit', params)
1236 1253
1237 1254
1238 1255 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1239 1256 def template_review(context, mapping):
1240 1257 """:phabreview: Object describing the review for this changeset.
1241 1258 Has attributes `url` and `id`.
1242 1259 """
1243 1260 ctx = context.resource(mapping, b'ctx')
1244 1261 m = _differentialrevisiondescre.search(ctx.description())
1245 1262 if m:
1246 1263 return templateutil.hybriddict(
1247 1264 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1248 1265 )
1249 1266 else:
1250 1267 tags = ctx.repo().nodetags(ctx.node())
1251 1268 for t in tags:
1252 1269 if _differentialrevisiontagre.match(t):
1253 1270 url = ctx.repo().ui.config(b'phabricator', b'url')
1254 1271 if not url.endswith(b'/'):
1255 1272 url += b'/'
1256 1273 url += t
1257 1274
1258 1275 return templateutil.hybriddict({b'url': url, b'id': t,})
1259 1276 return None
General Comments 0
You need to be logged in to leave comments. Login now