##// END OF EJS Templates
phabricator: add the phabhunk data structure...
Ian Moody -
r43453:73d4bc60 default
parent child Browse files
Show More
@@ -1,1276 +1,1292 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import contextlib
45 45 import itertools
46 46 import json
47 47 import operator
48 48 import re
49 49
50 50 from mercurial.node import bin, nullid
51 51 from mercurial.i18n import _
52 52 from mercurial.pycompat import getattr
53 from mercurial.thirdparty import attr
53 54 from mercurial import (
54 55 cmdutil,
55 56 context,
56 57 encoding,
57 58 error,
58 59 exthelper,
59 60 httpconnection as httpconnectionmod,
60 61 mdiff,
61 62 obsutil,
62 63 parser,
63 64 patch,
64 65 phases,
65 66 pycompat,
66 67 scmutil,
67 68 smartset,
68 69 tags,
69 70 templatefilters,
70 71 templateutil,
71 72 url as urlmod,
72 73 util,
73 74 )
74 75 from mercurial.utils import (
75 76 procutil,
76 77 stringutil,
77 78 )
78 79
79 80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 82 # be specifying the version(s) of Mercurial they are tested with, or
82 83 # leave the attribute unspecified.
83 84 testedwith = b'ships-with-hg-core'
84 85
85 86 eh = exthelper.exthelper()
86 87
87 88 cmdtable = eh.cmdtable
88 89 command = eh.command
89 90 configtable = eh.configtable
90 91 templatekeyword = eh.templatekeyword
91 92
92 93 # developer config: phabricator.batchsize
93 94 eh.configitem(
94 95 b'phabricator', b'batchsize', default=12,
95 96 )
96 97 eh.configitem(
97 98 b'phabricator', b'callsign', default=None,
98 99 )
99 100 eh.configitem(
100 101 b'phabricator', b'curlcmd', default=None,
101 102 )
102 103 # developer config: phabricator.repophid
103 104 eh.configitem(
104 105 b'phabricator', b'repophid', default=None,
105 106 )
106 107 eh.configitem(
107 108 b'phabricator', b'url', default=None,
108 109 )
109 110 eh.configitem(
110 111 b'phabsend', b'confirm', default=False,
111 112 )
112 113
113 114 colortable = {
114 115 b'phabricator.action.created': b'green',
115 116 b'phabricator.action.skipped': b'magenta',
116 117 b'phabricator.action.updated': b'magenta',
117 118 b'phabricator.desc': b'',
118 119 b'phabricator.drev': b'bold',
119 120 b'phabricator.node': b'',
120 121 }
121 122
122 123 _VCR_FLAGS = [
123 124 (
124 125 b'',
125 126 b'test-vcr',
126 127 b'',
127 128 _(
128 129 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 130 b', otherwise will mock all http requests using the specified vcr file.'
130 131 b' (ADVANCED)'
131 132 ),
132 133 ),
133 134 ]
134 135
135 136
136 137 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 138 fullflags = flags + _VCR_FLAGS
138 139
139 140 def hgmatcher(r1, r2):
140 141 if r1.uri != r2.uri or r1.method != r2.method:
141 142 return False
142 143 r1params = r1.body.split(b'&')
143 144 r2params = r2.body.split(b'&')
144 145 return set(r1params) == set(r2params)
145 146
146 147 def sanitiserequest(request):
147 148 request.body = re.sub(
148 149 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 150 )
150 151 return request
151 152
152 153 def sanitiseresponse(response):
153 154 if r'set-cookie' in response[r'headers']:
154 155 del response[r'headers'][r'set-cookie']
155 156 return response
156 157
157 158 def decorate(fn):
158 159 def inner(*args, **kwargs):
159 160 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 161 if cassette:
161 162 import hgdemandimport
162 163
163 164 with hgdemandimport.deactivated():
164 165 import vcr as vcrmod
165 166 import vcr.stubs as stubs
166 167
167 168 vcr = vcrmod.VCR(
168 169 serializer=r'json',
169 170 before_record_request=sanitiserequest,
170 171 before_record_response=sanitiseresponse,
171 172 custom_patches=[
172 173 (
173 174 urlmod,
174 175 r'httpconnection',
175 176 stubs.VCRHTTPConnection,
176 177 ),
177 178 (
178 179 urlmod,
179 180 r'httpsconnection',
180 181 stubs.VCRHTTPSConnection,
181 182 ),
182 183 ],
183 184 )
184 185 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 186 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 187 return fn(*args, **kwargs)
187 188 return fn(*args, **kwargs)
188 189
189 190 inner.__name__ = fn.__name__
190 191 inner.__doc__ = fn.__doc__
191 192 return command(
192 193 name,
193 194 fullflags,
194 195 spec,
195 196 helpcategory=helpcategory,
196 197 optionalrepo=optionalrepo,
197 198 )(inner)
198 199
199 200 return decorate
200 201
201 202
202 203 def urlencodenested(params):
203 204 """like urlencode, but works with nested parameters.
204 205
205 206 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 207 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 208 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 209 """
209 210 flatparams = util.sortdict()
210 211
211 212 def process(prefix, obj):
212 213 if isinstance(obj, bool):
213 214 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 215 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 216 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 217 if items is None:
217 218 flatparams[prefix] = obj
218 219 else:
219 220 for k, v in items(obj):
220 221 if prefix:
221 222 process(b'%s[%s]' % (prefix, k), v)
222 223 else:
223 224 process(k, v)
224 225
225 226 process(b'', params)
226 227 return util.urlreq.urlencode(flatparams)
227 228
228 229
229 230 def readurltoken(ui):
230 231 """return conduit url, token and make sure they exist
231 232
232 233 Currently read from [auth] config section. In the future, it might
233 234 make sense to read from .arcconfig and .arcrc as well.
234 235 """
235 236 url = ui.config(b'phabricator', b'url')
236 237 if not url:
237 238 raise error.Abort(
238 239 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 240 )
240 241
241 242 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 243 token = None
243 244
244 245 if res:
245 246 group, auth = res
246 247
247 248 ui.debug(b"using auth.%s.* for authentication\n" % group)
248 249
249 250 token = auth.get(b'phabtoken')
250 251
251 252 if not token:
252 253 raise error.Abort(
253 254 _(b'Can\'t find conduit token associated to %s') % (url,)
254 255 )
255 256
256 257 return url, token
257 258
258 259
259 260 def callconduit(ui, name, params):
260 261 """call Conduit API, params is a dict. return json.loads result, or None"""
261 262 host, token = readurltoken(ui)
262 263 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 264 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 265 params = params.copy()
265 266 params[b'api.token'] = token
266 267 data = urlencodenested(params)
267 268 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 269 if curlcmd:
269 270 sin, sout = procutil.popen2(
270 271 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 272 )
272 273 sin.write(data)
273 274 sin.close()
274 275 body = sout.read()
275 276 else:
276 277 urlopener = urlmod.opener(ui, authinfo)
277 278 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 279 with contextlib.closing(urlopener.open(request)) as rsp:
279 280 body = rsp.read()
280 281 ui.debug(b'Conduit Response: %s\n' % body)
281 282 parsed = pycompat.rapply(
282 283 lambda x: encoding.unitolocal(x)
283 284 if isinstance(x, pycompat.unicode)
284 285 else x,
285 286 # json.loads only accepts bytes from py3.6+
286 287 json.loads(encoding.unifromlocal(body)),
287 288 )
288 289 if parsed.get(b'error_code'):
289 290 msg = _(b'Conduit Error (%s): %s') % (
290 291 parsed[b'error_code'],
291 292 parsed[b'error_info'],
292 293 )
293 294 raise error.Abort(msg)
294 295 return parsed[b'result']
295 296
296 297
297 298 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 299 def debugcallconduit(ui, repo, name):
299 300 """call Conduit API
300 301
301 302 Call parameters are read from stdin as a JSON blob. Result will be written
302 303 to stdout as a JSON blob.
303 304 """
304 305 # json.loads only accepts bytes from 3.6+
305 306 rawparams = encoding.unifromlocal(ui.fin.read())
306 307 # json.loads only returns unicode strings
307 308 params = pycompat.rapply(
308 309 lambda x: encoding.unitolocal(x)
309 310 if isinstance(x, pycompat.unicode)
310 311 else x,
311 312 json.loads(rawparams),
312 313 )
313 314 # json.dumps only accepts unicode strings
314 315 result = pycompat.rapply(
315 316 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 317 callconduit(ui, name, params),
317 318 )
318 319 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 320 ui.write(b'%s\n' % encoding.unitolocal(s))
320 321
321 322
322 323 def getrepophid(repo):
323 324 """given callsign, return repository PHID or None"""
324 325 # developer config: phabricator.repophid
325 326 repophid = repo.ui.config(b'phabricator', b'repophid')
326 327 if repophid:
327 328 return repophid
328 329 callsign = repo.ui.config(b'phabricator', b'callsign')
329 330 if not callsign:
330 331 return None
331 332 query = callconduit(
332 333 repo.ui,
333 334 b'diffusion.repository.search',
334 335 {b'constraints': {b'callsigns': [callsign]}},
335 336 )
336 337 if len(query[b'data']) == 0:
337 338 return None
338 339 repophid = query[b'data'][0][b'phid']
339 340 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 341 return repophid
341 342
342 343
343 344 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 345 _differentialrevisiondescre = re.compile(
345 346 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 347 )
347 348
348 349
349 350 def getoldnodedrevmap(repo, nodelist):
350 351 """find previous nodes that has been sent to Phabricator
351 352
352 353 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 354 for node in nodelist with known previous sent versions, or associated
354 355 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 356 be ``None``.
356 357
357 358 Examines commit messages like "Differential Revision:" to get the
358 359 association information.
359 360
360 361 If such commit message line is not found, examines all precursors and their
361 362 tags. Tags with format like "D1234" are considered a match and the node
362 363 with that tag, and the number after "D" (ex. 1234) will be returned.
363 364
364 365 The ``old node``, if not None, is guaranteed to be the last diff of
365 366 corresponding Differential Revision, and exist in the repo.
366 367 """
367 368 unfi = repo.unfiltered()
368 369 nodemap = unfi.changelog.nodemap
369 370
370 371 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 372 toconfirm = {} # {node: (force, {precnode}, drev)}
372 373 for node in nodelist:
373 374 ctx = unfi[node]
374 375 # For tags like "D123", put them into "toconfirm" to verify later
375 376 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 377 for n in precnodes:
377 378 if n in nodemap:
378 379 for tag in unfi.nodetags(n):
379 380 m = _differentialrevisiontagre.match(tag)
380 381 if m:
381 382 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 383 continue
383 384
384 385 # Check commit message
385 386 m = _differentialrevisiondescre.search(ctx.description())
386 387 if m:
387 388 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388 389
389 390 # Double check if tags are genuine by collecting all old nodes from
390 391 # Phabricator, and expect precursors overlap with it.
391 392 if toconfirm:
392 393 drevs = [drev for force, precs, drev in toconfirm.values()]
393 394 alldiffs = callconduit(
394 395 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 396 )
396 397 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 398 for newnode, (force, precset, drev) in toconfirm.items():
398 399 diffs = [
399 400 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 401 ]
401 402
402 403 # "precursors" as known by Phabricator
403 404 phprecset = set(getnode(d) for d in diffs)
404 405
405 406 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 407 # and force is not set (when commit message says nothing)
407 408 if not force and not bool(phprecset & precset):
408 409 tagname = b'D%d' % drev
409 410 tags.tag(
410 411 repo,
411 412 tagname,
412 413 nullid,
413 414 message=None,
414 415 user=None,
415 416 date=None,
416 417 local=True,
417 418 )
418 419 unfi.ui.warn(
419 420 _(
420 421 b'D%s: local tag removed - does not match '
421 422 b'Differential history\n'
422 423 )
423 424 % drev
424 425 )
425 426 continue
426 427
427 428 # Find the last node using Phabricator metadata, and make sure it
428 429 # exists in the repo
429 430 oldnode = lastdiff = None
430 431 if diffs:
431 432 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 433 oldnode = getnode(lastdiff)
433 434 if oldnode and oldnode not in nodemap:
434 435 oldnode = None
435 436
436 437 result[newnode] = (oldnode, lastdiff, drev)
437 438
438 439 return result
439 440
440 441
441 442 def getdiff(ctx, diffopts):
442 443 """plain-text diff without header (user, commit message, etc)"""
443 444 output = util.stringio()
444 445 for chunk, _label in patch.diffui(
445 446 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 447 ):
447 448 output.write(chunk)
448 449 return output.getvalue()
449 450
450 451
451 452 class DiffChangeType(object):
452 453 ADD = 1
453 454 CHANGE = 2
454 455 DELETE = 3
455 456 MOVE_AWAY = 4
456 457 COPY_AWAY = 5
457 458 MOVE_HERE = 6
458 459 COPY_HERE = 7
459 460 MULTICOPY = 8
460 461
461 462
462 463 class DiffFileType(object):
463 464 TEXT = 1
464 465 IMAGE = 2
465 466 BINARY = 3
466 467
467 468
469 @attr.s
470 class phabhunk(dict):
471 """Represents a Differential hunk, which is owned by a Differential change
472 """
473
474 oldOffset = attr.ib(default=0) # camelcase-required
475 oldLength = attr.ib(default=0) # camelcase-required
476 newOffset = attr.ib(default=0) # camelcase-required
477 newLength = attr.ib(default=0) # camelcase-required
478 corpus = attr.ib(default='')
479 # These get added to the phabchange's equivalents
480 addLines = attr.ib(default=0) # camelcase-required
481 delLines = attr.ib(default=0) # camelcase-required
482
483
468 484 def creatediff(ctx):
469 485 """create a Differential Diff"""
470 486 repo = ctx.repo()
471 487 repophid = getrepophid(repo)
472 488 # Create a "Differential Diff" via "differential.createrawdiff" API
473 489 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
474 490 if repophid:
475 491 params[b'repositoryPHID'] = repophid
476 492 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
477 493 if not diff:
478 494 raise error.Abort(_(b'cannot create diff for %s') % ctx)
479 495 return diff
480 496
481 497
482 498 def writediffproperties(ctx, diff):
483 499 """write metadata to diff so patches could be applied losslessly"""
484 500 params = {
485 501 b'diff_id': diff[b'id'],
486 502 b'name': b'hg:meta',
487 503 b'data': templatefilters.json(
488 504 {
489 505 b'user': ctx.user(),
490 506 b'date': b'%d %d' % ctx.date(),
491 507 b'branch': ctx.branch(),
492 508 b'node': ctx.hex(),
493 509 b'parent': ctx.p1().hex(),
494 510 }
495 511 ),
496 512 }
497 513 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
498 514
499 515 params = {
500 516 b'diff_id': diff[b'id'],
501 517 b'name': b'local:commits',
502 518 b'data': templatefilters.json(
503 519 {
504 520 ctx.hex(): {
505 521 b'author': stringutil.person(ctx.user()),
506 522 b'authorEmail': stringutil.email(ctx.user()),
507 523 b'time': int(ctx.date()[0]),
508 524 b'commit': ctx.hex(),
509 525 b'parents': [ctx.p1().hex()],
510 526 b'branch': ctx.branch(),
511 527 },
512 528 }
513 529 ),
514 530 }
515 531 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
516 532
517 533
518 534 def createdifferentialrevision(
519 535 ctx,
520 536 revid=None,
521 537 parentrevphid=None,
522 538 oldnode=None,
523 539 olddiff=None,
524 540 actions=None,
525 541 comment=None,
526 542 ):
527 543 """create or update a Differential Revision
528 544
529 545 If revid is None, create a new Differential Revision, otherwise update
530 546 revid. If parentrevphid is not None, set it as a dependency.
531 547
532 548 If oldnode is not None, check if the patch content (without commit message
533 549 and metadata) has changed before creating another diff.
534 550
535 551 If actions is not None, they will be appended to the transaction.
536 552 """
537 553 repo = ctx.repo()
538 554 if oldnode:
539 555 diffopts = mdiff.diffopts(git=True, context=32767)
540 556 oldctx = repo.unfiltered()[oldnode]
541 557 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
542 558 else:
543 559 neednewdiff = True
544 560
545 561 transactions = []
546 562 if neednewdiff:
547 563 diff = creatediff(ctx)
548 564 transactions.append({b'type': b'update', b'value': diff[b'phid']})
549 565 if comment:
550 566 transactions.append({b'type': b'comment', b'value': comment})
551 567 else:
552 568 # Even if we don't need to upload a new diff because the patch content
553 569 # does not change. We might still need to update its metadata so
554 570 # pushers could know the correct node metadata.
555 571 assert olddiff
556 572 diff = olddiff
557 573 writediffproperties(ctx, diff)
558 574
559 575 # Set the parent Revision every time, so commit re-ordering is picked-up
560 576 if parentrevphid:
561 577 transactions.append(
562 578 {b'type': b'parents.set', b'value': [parentrevphid]}
563 579 )
564 580
565 581 if actions:
566 582 transactions += actions
567 583
568 584 # Parse commit message and update related fields.
569 585 desc = ctx.description()
570 586 info = callconduit(
571 587 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
572 588 )
573 589 for k, v in info[b'fields'].items():
574 590 if k in [b'title', b'summary', b'testPlan']:
575 591 transactions.append({b'type': k, b'value': v})
576 592
577 593 params = {b'transactions': transactions}
578 594 if revid is not None:
579 595 # Update an existing Differential Revision
580 596 params[b'objectIdentifier'] = revid
581 597
582 598 revision = callconduit(repo.ui, b'differential.revision.edit', params)
583 599 if not revision:
584 600 raise error.Abort(_(b'cannot create revision for %s') % ctx)
585 601
586 602 return revision, diff
587 603
588 604
589 605 def userphids(repo, names):
590 606 """convert user names to PHIDs"""
591 607 names = [name.lower() for name in names]
592 608 query = {b'constraints': {b'usernames': names}}
593 609 result = callconduit(repo.ui, b'user.search', query)
594 610 # username not found is not an error of the API. So check if we have missed
595 611 # some names here.
596 612 data = result[b'data']
597 613 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
598 614 unresolved = set(names) - resolved
599 615 if unresolved:
600 616 raise error.Abort(
601 617 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
602 618 )
603 619 return [entry[b'phid'] for entry in data]
604 620
605 621
606 622 @vcrcommand(
607 623 b'phabsend',
608 624 [
609 625 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
610 626 (b'', b'amend', True, _(b'update commit messages')),
611 627 (b'', b'reviewer', [], _(b'specify reviewers')),
612 628 (b'', b'blocker', [], _(b'specify blocking reviewers')),
613 629 (
614 630 b'm',
615 631 b'comment',
616 632 b'',
617 633 _(b'add a comment to Revisions with new/updated Diffs'),
618 634 ),
619 635 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
620 636 ],
621 637 _(b'REV [OPTIONS]'),
622 638 helpcategory=command.CATEGORY_IMPORT_EXPORT,
623 639 )
624 640 def phabsend(ui, repo, *revs, **opts):
625 641 """upload changesets to Phabricator
626 642
627 643 If there are multiple revisions specified, they will be send as a stack
628 644 with a linear dependencies relationship using the order specified by the
629 645 revset.
630 646
631 647 For the first time uploading changesets, local tags will be created to
632 648 maintain the association. After the first time, phabsend will check
633 649 obsstore and tags information so it can figure out whether to update an
634 650 existing Differential Revision, or create a new one.
635 651
636 652 If --amend is set, update commit messages so they have the
637 653 ``Differential Revision`` URL, remove related tags. This is similar to what
638 654 arcanist will do, and is more desired in author-push workflows. Otherwise,
639 655 use local tags to record the ``Differential Revision`` association.
640 656
641 657 The --confirm option lets you confirm changesets before sending them. You
642 658 can also add following to your configuration file to make it default
643 659 behaviour::
644 660
645 661 [phabsend]
646 662 confirm = true
647 663
648 664 phabsend will check obsstore and the above association to decide whether to
649 665 update an existing Differential Revision, or create a new one.
650 666 """
651 667 opts = pycompat.byteskwargs(opts)
652 668 revs = list(revs) + opts.get(b'rev', [])
653 669 revs = scmutil.revrange(repo, revs)
654 670
655 671 if not revs:
656 672 raise error.Abort(_(b'phabsend requires at least one changeset'))
657 673 if opts.get(b'amend'):
658 674 cmdutil.checkunfinished(repo)
659 675
660 676 # {newnode: (oldnode, olddiff, olddrev}
661 677 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
662 678
663 679 confirm = ui.configbool(b'phabsend', b'confirm')
664 680 confirm |= bool(opts.get(b'confirm'))
665 681 if confirm:
666 682 confirmed = _confirmbeforesend(repo, revs, oldmap)
667 683 if not confirmed:
668 684 raise error.Abort(_(b'phabsend cancelled'))
669 685
670 686 actions = []
671 687 reviewers = opts.get(b'reviewer', [])
672 688 blockers = opts.get(b'blocker', [])
673 689 phids = []
674 690 if reviewers:
675 691 phids.extend(userphids(repo, reviewers))
676 692 if blockers:
677 693 phids.extend(
678 694 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
679 695 )
680 696 if phids:
681 697 actions.append({b'type': b'reviewers.add', b'value': phids})
682 698
683 699 drevids = [] # [int]
684 700 diffmap = {} # {newnode: diff}
685 701
686 702 # Send patches one by one so we know their Differential Revision PHIDs and
687 703 # can provide dependency relationship
688 704 lastrevphid = None
689 705 for rev in revs:
690 706 ui.debug(b'sending rev %d\n' % rev)
691 707 ctx = repo[rev]
692 708
693 709 # Get Differential Revision ID
694 710 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
695 711 if oldnode != ctx.node() or opts.get(b'amend'):
696 712 # Create or update Differential Revision
697 713 revision, diff = createdifferentialrevision(
698 714 ctx,
699 715 revid,
700 716 lastrevphid,
701 717 oldnode,
702 718 olddiff,
703 719 actions,
704 720 opts.get(b'comment'),
705 721 )
706 722 diffmap[ctx.node()] = diff
707 723 newrevid = int(revision[b'object'][b'id'])
708 724 newrevphid = revision[b'object'][b'phid']
709 725 if revid:
710 726 action = b'updated'
711 727 else:
712 728 action = b'created'
713 729
714 730 # Create a local tag to note the association, if commit message
715 731 # does not have it already
716 732 m = _differentialrevisiondescre.search(ctx.description())
717 733 if not m or int(m.group(r'id')) != newrevid:
718 734 tagname = b'D%d' % newrevid
719 735 tags.tag(
720 736 repo,
721 737 tagname,
722 738 ctx.node(),
723 739 message=None,
724 740 user=None,
725 741 date=None,
726 742 local=True,
727 743 )
728 744 else:
729 745 # Nothing changed. But still set "newrevphid" so the next revision
730 746 # could depend on this one and "newrevid" for the summary line.
731 747 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
732 748 newrevid = revid
733 749 action = b'skipped'
734 750
735 751 actiondesc = ui.label(
736 752 {
737 753 b'created': _(b'created'),
738 754 b'skipped': _(b'skipped'),
739 755 b'updated': _(b'updated'),
740 756 }[action],
741 757 b'phabricator.action.%s' % action,
742 758 )
743 759 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
744 760 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
745 761 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
746 762 ui.write(
747 763 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
748 764 )
749 765 drevids.append(newrevid)
750 766 lastrevphid = newrevphid
751 767
752 768 # Update commit messages and remove tags
753 769 if opts.get(b'amend'):
754 770 unfi = repo.unfiltered()
755 771 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
756 772 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
757 773 wnode = unfi[b'.'].node()
758 774 mapping = {} # {oldnode: [newnode]}
759 775 for i, rev in enumerate(revs):
760 776 old = unfi[rev]
761 777 drevid = drevids[i]
762 778 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
763 779 newdesc = getdescfromdrev(drev)
764 780 # Make sure commit message contain "Differential Revision"
765 781 if old.description() != newdesc:
766 782 if old.phase() == phases.public:
767 783 ui.warn(
768 784 _(b"warning: not updating public commit %s\n")
769 785 % scmutil.formatchangeid(old)
770 786 )
771 787 continue
772 788 parents = [
773 789 mapping.get(old.p1().node(), (old.p1(),))[0],
774 790 mapping.get(old.p2().node(), (old.p2(),))[0],
775 791 ]
776 792 new = context.metadataonlyctx(
777 793 repo,
778 794 old,
779 795 parents=parents,
780 796 text=newdesc,
781 797 user=old.user(),
782 798 date=old.date(),
783 799 extra=old.extra(),
784 800 )
785 801
786 802 newnode = new.commit()
787 803
788 804 mapping[old.node()] = [newnode]
789 805 # Update diff property
790 806 # If it fails just warn and keep going, otherwise the DREV
791 807 # associations will be lost
792 808 try:
793 809 writediffproperties(unfi[newnode], diffmap[old.node()])
794 810 except util.urlerr.urlerror:
795 811 ui.warnnoi18n(
796 812 b'Failed to update metadata for D%s\n' % drevid
797 813 )
798 814 # Remove local tags since it's no longer necessary
799 815 tagname = b'D%d' % drevid
800 816 if tagname in repo.tags():
801 817 tags.tag(
802 818 repo,
803 819 tagname,
804 820 nullid,
805 821 message=None,
806 822 user=None,
807 823 date=None,
808 824 local=True,
809 825 )
810 826 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
811 827 if wnode in mapping:
812 828 unfi.setparents(mapping[wnode][0])
813 829
814 830
815 831 # Map from "hg:meta" keys to header understood by "hg import". The order is
816 832 # consistent with "hg export" output.
817 833 _metanamemap = util.sortdict(
818 834 [
819 835 (b'user', b'User'),
820 836 (b'date', b'Date'),
821 837 (b'branch', b'Branch'),
822 838 (b'node', b'Node ID'),
823 839 (b'parent', b'Parent '),
824 840 ]
825 841 )
826 842
827 843
828 844 def _confirmbeforesend(repo, revs, oldmap):
829 845 url, token = readurltoken(repo.ui)
830 846 ui = repo.ui
831 847 for rev in revs:
832 848 ctx = repo[rev]
833 849 desc = ctx.description().splitlines()[0]
834 850 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
835 851 if drevid:
836 852 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
837 853 else:
838 854 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
839 855
840 856 ui.write(
841 857 _(b'%s - %s: %s\n')
842 858 % (
843 859 drevdesc,
844 860 ui.label(bytes(ctx), b'phabricator.node'),
845 861 ui.label(desc, b'phabricator.desc'),
846 862 )
847 863 )
848 864
849 865 if ui.promptchoice(
850 866 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
851 867 ):
852 868 return False
853 869
854 870 return True
855 871
856 872
857 873 _knownstatusnames = {
858 874 b'accepted',
859 875 b'needsreview',
860 876 b'needsrevision',
861 877 b'closed',
862 878 b'abandoned',
863 879 }
864 880
865 881
866 882 def _getstatusname(drev):
867 883 """get normalized status name from a Differential Revision"""
868 884 return drev[b'statusName'].replace(b' ', b'').lower()
869 885
870 886
871 887 # Small language to specify differential revisions. Support symbols: (), :X,
872 888 # +, and -.
873 889
874 890 _elements = {
875 891 # token-type: binding-strength, primary, prefix, infix, suffix
876 892 b'(': (12, None, (b'group', 1, b')'), None, None),
877 893 b':': (8, None, (b'ancestors', 8), None, None),
878 894 b'&': (5, None, None, (b'and_', 5), None),
879 895 b'+': (4, None, None, (b'add', 4), None),
880 896 b'-': (4, None, None, (b'sub', 4), None),
881 897 b')': (0, None, None, None, None),
882 898 b'symbol': (0, b'symbol', None, None, None),
883 899 b'end': (0, None, None, None, None),
884 900 }
885 901
886 902
887 903 def _tokenize(text):
888 904 view = memoryview(text) # zero-copy slice
889 905 special = b'():+-& '
890 906 pos = 0
891 907 length = len(text)
892 908 while pos < length:
893 909 symbol = b''.join(
894 910 itertools.takewhile(
895 911 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
896 912 )
897 913 )
898 914 if symbol:
899 915 yield (b'symbol', symbol, pos)
900 916 pos += len(symbol)
901 917 else: # special char, ignore space
902 918 if text[pos] != b' ':
903 919 yield (text[pos], None, pos)
904 920 pos += 1
905 921 yield (b'end', None, pos)
906 922
907 923
908 924 def _parse(text):
909 925 tree, pos = parser.parser(_elements).parse(_tokenize(text))
910 926 if pos != len(text):
911 927 raise error.ParseError(b'invalid token', pos)
912 928 return tree
913 929
914 930
915 931 def _parsedrev(symbol):
916 932 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
917 933 if symbol.startswith(b'D') and symbol[1:].isdigit():
918 934 return int(symbol[1:])
919 935 if symbol.isdigit():
920 936 return int(symbol)
921 937
922 938
923 939 def _prefetchdrevs(tree):
924 940 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
925 941 drevs = set()
926 942 ancestordrevs = set()
927 943 op = tree[0]
928 944 if op == b'symbol':
929 945 r = _parsedrev(tree[1])
930 946 if r:
931 947 drevs.add(r)
932 948 elif op == b'ancestors':
933 949 r, a = _prefetchdrevs(tree[1])
934 950 drevs.update(r)
935 951 ancestordrevs.update(r)
936 952 ancestordrevs.update(a)
937 953 else:
938 954 for t in tree[1:]:
939 955 r, a = _prefetchdrevs(t)
940 956 drevs.update(r)
941 957 ancestordrevs.update(a)
942 958 return drevs, ancestordrevs
943 959
944 960
945 961 def querydrev(repo, spec):
946 962 """return a list of "Differential Revision" dicts
947 963
948 964 spec is a string using a simple query language, see docstring in phabread
949 965 for details.
950 966
951 967 A "Differential Revision dict" looks like:
952 968
953 969 {
954 970 "id": "2",
955 971 "phid": "PHID-DREV-672qvysjcczopag46qty",
956 972 "title": "example",
957 973 "uri": "https://phab.example.com/D2",
958 974 "dateCreated": "1499181406",
959 975 "dateModified": "1499182103",
960 976 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
961 977 "status": "0",
962 978 "statusName": "Needs Review",
963 979 "properties": [],
964 980 "branch": null,
965 981 "summary": "",
966 982 "testPlan": "",
967 983 "lineCount": "2",
968 984 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
969 985 "diffs": [
970 986 "3",
971 987 "4",
972 988 ],
973 989 "commits": [],
974 990 "reviewers": [],
975 991 "ccs": [],
976 992 "hashes": [],
977 993 "auxiliary": {
978 994 "phabricator:projects": [],
979 995 "phabricator:depends-on": [
980 996 "PHID-DREV-gbapp366kutjebt7agcd"
981 997 ]
982 998 },
983 999 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
984 1000 "sourcePath": null
985 1001 }
986 1002 """
987 1003
988 1004 def fetch(params):
989 1005 """params -> single drev or None"""
990 1006 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
991 1007 if key in prefetched:
992 1008 return prefetched[key]
993 1009 drevs = callconduit(repo.ui, b'differential.query', params)
994 1010 # Fill prefetched with the result
995 1011 for drev in drevs:
996 1012 prefetched[drev[b'phid']] = drev
997 1013 prefetched[int(drev[b'id'])] = drev
998 1014 if key not in prefetched:
999 1015 raise error.Abort(
1000 1016 _(b'cannot get Differential Revision %r') % params
1001 1017 )
1002 1018 return prefetched[key]
1003 1019
1004 1020 def getstack(topdrevids):
1005 1021 """given a top, get a stack from the bottom, [id] -> [id]"""
1006 1022 visited = set()
1007 1023 result = []
1008 1024 queue = [{b'ids': [i]} for i in topdrevids]
1009 1025 while queue:
1010 1026 params = queue.pop()
1011 1027 drev = fetch(params)
1012 1028 if drev[b'id'] in visited:
1013 1029 continue
1014 1030 visited.add(drev[b'id'])
1015 1031 result.append(int(drev[b'id']))
1016 1032 auxiliary = drev.get(b'auxiliary', {})
1017 1033 depends = auxiliary.get(b'phabricator:depends-on', [])
1018 1034 for phid in depends:
1019 1035 queue.append({b'phids': [phid]})
1020 1036 result.reverse()
1021 1037 return smartset.baseset(result)
1022 1038
1023 1039 # Initialize prefetch cache
1024 1040 prefetched = {} # {id or phid: drev}
1025 1041
1026 1042 tree = _parse(spec)
1027 1043 drevs, ancestordrevs = _prefetchdrevs(tree)
1028 1044
1029 1045 # developer config: phabricator.batchsize
1030 1046 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1031 1047
1032 1048 # Prefetch Differential Revisions in batch
1033 1049 tofetch = set(drevs)
1034 1050 for r in ancestordrevs:
1035 1051 tofetch.update(range(max(1, r - batchsize), r + 1))
1036 1052 if drevs:
1037 1053 fetch({b'ids': list(tofetch)})
1038 1054 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1039 1055
1040 1056 # Walk through the tree, return smartsets
1041 1057 def walk(tree):
1042 1058 op = tree[0]
1043 1059 if op == b'symbol':
1044 1060 drev = _parsedrev(tree[1])
1045 1061 if drev:
1046 1062 return smartset.baseset([drev])
1047 1063 elif tree[1] in _knownstatusnames:
1048 1064 drevs = [
1049 1065 r
1050 1066 for r in validids
1051 1067 if _getstatusname(prefetched[r]) == tree[1]
1052 1068 ]
1053 1069 return smartset.baseset(drevs)
1054 1070 else:
1055 1071 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1056 1072 elif op in {b'and_', b'add', b'sub'}:
1057 1073 assert len(tree) == 3
1058 1074 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1059 1075 elif op == b'group':
1060 1076 return walk(tree[1])
1061 1077 elif op == b'ancestors':
1062 1078 return getstack(walk(tree[1]))
1063 1079 else:
1064 1080 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1065 1081
1066 1082 return [prefetched[r] for r in walk(tree)]
1067 1083
1068 1084
1069 1085 def getdescfromdrev(drev):
1070 1086 """get description (commit message) from "Differential Revision"
1071 1087
1072 1088 This is similar to differential.getcommitmessage API. But we only care
1073 1089 about limited fields: title, summary, test plan, and URL.
1074 1090 """
1075 1091 title = drev[b'title']
1076 1092 summary = drev[b'summary'].rstrip()
1077 1093 testplan = drev[b'testPlan'].rstrip()
1078 1094 if testplan:
1079 1095 testplan = b'Test Plan:\n%s' % testplan
1080 1096 uri = b'Differential Revision: %s' % drev[b'uri']
1081 1097 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1082 1098
1083 1099
1084 1100 def getdiffmeta(diff):
1085 1101 """get commit metadata (date, node, user, p1) from a diff object
1086 1102
1087 1103 The metadata could be "hg:meta", sent by phabsend, like:
1088 1104
1089 1105 "properties": {
1090 1106 "hg:meta": {
1091 1107 "date": "1499571514 25200",
1092 1108 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1093 1109 "user": "Foo Bar <foo@example.com>",
1094 1110 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1095 1111 }
1096 1112 }
1097 1113
1098 1114 Or converted from "local:commits", sent by "arc", like:
1099 1115
1100 1116 "properties": {
1101 1117 "local:commits": {
1102 1118 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1103 1119 "author": "Foo Bar",
1104 1120 "time": 1499546314,
1105 1121 "branch": "default",
1106 1122 "tag": "",
1107 1123 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1108 1124 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1109 1125 "local": "1000",
1110 1126 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1111 1127 "summary": "...",
1112 1128 "message": "...",
1113 1129 "authorEmail": "foo@example.com"
1114 1130 }
1115 1131 }
1116 1132 }
1117 1133
1118 1134 Note: metadata extracted from "local:commits" will lose time zone
1119 1135 information.
1120 1136 """
1121 1137 props = diff.get(b'properties') or {}
1122 1138 meta = props.get(b'hg:meta')
1123 1139 if not meta:
1124 1140 if props.get(b'local:commits'):
1125 1141 commit = sorted(props[b'local:commits'].values())[0]
1126 1142 meta = {}
1127 1143 if b'author' in commit and b'authorEmail' in commit:
1128 1144 meta[b'user'] = b'%s <%s>' % (
1129 1145 commit[b'author'],
1130 1146 commit[b'authorEmail'],
1131 1147 )
1132 1148 if b'time' in commit:
1133 1149 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1134 1150 if b'branch' in commit:
1135 1151 meta[b'branch'] = commit[b'branch']
1136 1152 node = commit.get(b'commit', commit.get(b'rev'))
1137 1153 if node:
1138 1154 meta[b'node'] = node
1139 1155 if len(commit.get(b'parents', ())) >= 1:
1140 1156 meta[b'parent'] = commit[b'parents'][0]
1141 1157 else:
1142 1158 meta = {}
1143 1159 if b'date' not in meta and b'dateCreated' in diff:
1144 1160 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1145 1161 if b'branch' not in meta and diff.get(b'branch'):
1146 1162 meta[b'branch'] = diff[b'branch']
1147 1163 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1148 1164 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1149 1165 return meta
1150 1166
1151 1167
1152 1168 def readpatch(repo, drevs, write):
1153 1169 """generate plain-text patch readable by 'hg import'
1154 1170
1155 1171 write is usually ui.write. drevs is what "querydrev" returns, results of
1156 1172 "differential.query".
1157 1173 """
1158 1174 # Prefetch hg:meta property for all diffs
1159 1175 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1160 1176 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1161 1177
1162 1178 # Generate patch for each drev
1163 1179 for drev in drevs:
1164 1180 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1165 1181
1166 1182 diffid = max(int(v) for v in drev[b'diffs'])
1167 1183 body = callconduit(
1168 1184 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1169 1185 )
1170 1186 desc = getdescfromdrev(drev)
1171 1187 header = b'# HG changeset patch\n'
1172 1188
1173 1189 # Try to preserve metadata from hg:meta property. Write hg patch
1174 1190 # headers that can be read by the "import" command. See patchheadermap
1175 1191 # and extract in mercurial/patch.py for supported headers.
1176 1192 meta = getdiffmeta(diffs[b'%d' % diffid])
1177 1193 for k in _metanamemap.keys():
1178 1194 if k in meta:
1179 1195 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1180 1196
1181 1197 content = b'%s%s\n%s' % (header, desc, body)
1182 1198 write(content)
1183 1199
1184 1200
1185 1201 @vcrcommand(
1186 1202 b'phabread',
1187 1203 [(b'', b'stack', False, _(b'read dependencies'))],
1188 1204 _(b'DREVSPEC [OPTIONS]'),
1189 1205 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1190 1206 )
1191 1207 def phabread(ui, repo, spec, **opts):
1192 1208 """print patches from Phabricator suitable for importing
1193 1209
1194 1210 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1195 1211 the number ``123``. It could also have common operators like ``+``, ``-``,
1196 1212 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1197 1213 select a stack.
1198 1214
1199 1215 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1200 1216 could be used to filter patches by status. For performance reason, they
1201 1217 only represent a subset of non-status selections and cannot be used alone.
1202 1218
1203 1219 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1204 1220 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1205 1221 stack up to D9.
1206 1222
1207 1223 If --stack is given, follow dependencies information and read all patches.
1208 1224 It is equivalent to the ``:`` operator.
1209 1225 """
1210 1226 opts = pycompat.byteskwargs(opts)
1211 1227 if opts.get(b'stack'):
1212 1228 spec = b':(%s)' % spec
1213 1229 drevs = querydrev(repo, spec)
1214 1230 readpatch(repo, drevs, ui.write)
1215 1231
1216 1232
1217 1233 @vcrcommand(
1218 1234 b'phabupdate',
1219 1235 [
1220 1236 (b'', b'accept', False, _(b'accept revisions')),
1221 1237 (b'', b'reject', False, _(b'reject revisions')),
1222 1238 (b'', b'abandon', False, _(b'abandon revisions')),
1223 1239 (b'', b'reclaim', False, _(b'reclaim revisions')),
1224 1240 (b'm', b'comment', b'', _(b'comment on the last revision')),
1225 1241 ],
1226 1242 _(b'DREVSPEC [OPTIONS]'),
1227 1243 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1228 1244 )
1229 1245 def phabupdate(ui, repo, spec, **opts):
1230 1246 """update Differential Revision in batch
1231 1247
1232 1248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1233 1249 """
1234 1250 opts = pycompat.byteskwargs(opts)
1235 1251 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1236 1252 if len(flags) > 1:
1237 1253 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1238 1254
1239 1255 actions = []
1240 1256 for f in flags:
1241 1257 actions.append({b'type': f, b'value': b'true'})
1242 1258
1243 1259 drevs = querydrev(repo, spec)
1244 1260 for i, drev in enumerate(drevs):
1245 1261 if i + 1 == len(drevs) and opts.get(b'comment'):
1246 1262 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1247 1263 if actions:
1248 1264 params = {
1249 1265 b'objectIdentifier': drev[b'phid'],
1250 1266 b'transactions': actions,
1251 1267 }
1252 1268 callconduit(ui, b'differential.revision.edit', params)
1253 1269
1254 1270
1255 1271 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1256 1272 def template_review(context, mapping):
1257 1273 """:phabreview: Object describing the review for this changeset.
1258 1274 Has attributes `url` and `id`.
1259 1275 """
1260 1276 ctx = context.resource(mapping, b'ctx')
1261 1277 m = _differentialrevisiondescre.search(ctx.description())
1262 1278 if m:
1263 1279 return templateutil.hybriddict(
1264 1280 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1265 1281 )
1266 1282 else:
1267 1283 tags = ctx.repo().nodetags(ctx.node())
1268 1284 for t in tags:
1269 1285 if _differentialrevisiontagre.match(t):
1270 1286 url = ctx.repo().ui.config(b'phabricator', b'url')
1271 1287 if not url.endswith(b'/'):
1272 1288 url += b'/'
1273 1289 url += t
1274 1290
1275 1291 return templateutil.hybriddict({b'url': url, b'id': t,})
1276 1292 return None
General Comments 0
You need to be logged in to leave comments. Login now