##// END OF EJS Templates
phabricator: add a config knob to create obsolete markers when importing...
Matt Harbison -
r45041:f10055b0 default
parent child Browse files
Show More
@@ -1,1895 +1,1900 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127 eh.configitem(
128 128 b'phabimport', b'secret', default=False,
129 129 )
130 eh.configitem(
131 b'phabimport', b'obsolete', default=False,
132 )
130 133
131 134 colortable = {
132 135 b'phabricator.action.created': b'green',
133 136 b'phabricator.action.skipped': b'magenta',
134 137 b'phabricator.action.updated': b'magenta',
135 138 b'phabricator.desc': b'',
136 139 b'phabricator.drev': b'bold',
137 140 b'phabricator.node': b'',
138 141 b'phabricator.status.abandoned': b'magenta dim',
139 142 b'phabricator.status.accepted': b'green bold',
140 143 b'phabricator.status.closed': b'green',
141 144 b'phabricator.status.needsreview': b'yellow',
142 145 b'phabricator.status.needsrevision': b'red',
143 146 b'phabricator.status.changesplanned': b'red',
144 147 }
145 148
146 149 _VCR_FLAGS = [
147 150 (
148 151 b'',
149 152 b'test-vcr',
150 153 b'',
151 154 _(
152 155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
153 156 b', otherwise will mock all http requests using the specified vcr file.'
154 157 b' (ADVANCED)'
155 158 ),
156 159 ),
157 160 ]
158 161
159 162
160 163 @eh.wrapfunction(localrepo, "loadhgrc")
161 164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
162 165 """Load ``.arcconfig`` content into a ui instance on repository open.
163 166 """
164 167 result = False
165 168 arcconfig = {}
166 169
167 170 try:
168 171 # json.loads only accepts bytes from 3.6+
169 172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
170 173 # json.loads only returns unicode strings
171 174 arcconfig = pycompat.rapply(
172 175 lambda x: encoding.unitolocal(x)
173 176 if isinstance(x, pycompat.unicode)
174 177 else x,
175 178 pycompat.json_loads(rawparams),
176 179 )
177 180
178 181 result = True
179 182 except ValueError:
180 183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
181 184 except IOError:
182 185 pass
183 186
184 187 cfg = util.sortdict()
185 188
186 189 if b"repository.callsign" in arcconfig:
187 190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
188 191
189 192 if b"phabricator.uri" in arcconfig:
190 193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
191 194
192 195 if cfg:
193 196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
194 197
195 198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
196 199
197 200
198 201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
199 202 fullflags = flags + _VCR_FLAGS
200 203
201 204 def hgmatcher(r1, r2):
202 205 if r1.uri != r2.uri or r1.method != r2.method:
203 206 return False
204 207 r1params = util.urlreq.parseqs(r1.body)
205 208 r2params = util.urlreq.parseqs(r2.body)
206 209 for key in r1params:
207 210 if key not in r2params:
208 211 return False
209 212 value = r1params[key][0]
210 213 # we want to compare json payloads without worrying about ordering
211 214 if value.startswith(b'{') and value.endswith(b'}'):
212 215 r1json = pycompat.json_loads(value)
213 216 r2json = pycompat.json_loads(r2params[key][0])
214 217 if r1json != r2json:
215 218 return False
216 219 elif r2params[key][0] != value:
217 220 return False
218 221 return True
219 222
220 223 def sanitiserequest(request):
221 224 request.body = re.sub(
222 225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
223 226 )
224 227 return request
225 228
226 229 def sanitiseresponse(response):
227 230 if 'set-cookie' in response['headers']:
228 231 del response['headers']['set-cookie']
229 232 return response
230 233
231 234 def decorate(fn):
232 235 def inner(*args, **kwargs):
233 236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
234 237 if cassette:
235 238 import hgdemandimport
236 239
237 240 with hgdemandimport.deactivated():
238 241 import vcr as vcrmod
239 242 import vcr.stubs as stubs
240 243
241 244 vcr = vcrmod.VCR(
242 245 serializer='json',
243 246 before_record_request=sanitiserequest,
244 247 before_record_response=sanitiseresponse,
245 248 custom_patches=[
246 249 (
247 250 urlmod,
248 251 'httpconnection',
249 252 stubs.VCRHTTPConnection,
250 253 ),
251 254 (
252 255 urlmod,
253 256 'httpsconnection',
254 257 stubs.VCRHTTPSConnection,
255 258 ),
256 259 ],
257 260 )
258 261 vcr.register_matcher('hgmatcher', hgmatcher)
259 262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
260 263 return fn(*args, **kwargs)
261 264 return fn(*args, **kwargs)
262 265
263 266 cmd = util.checksignature(inner, depth=2)
264 267 cmd.__name__ = fn.__name__
265 268 cmd.__doc__ = fn.__doc__
266 269
267 270 return command(
268 271 name,
269 272 fullflags,
270 273 spec,
271 274 helpcategory=helpcategory,
272 275 optionalrepo=optionalrepo,
273 276 )(cmd)
274 277
275 278 return decorate
276 279
277 280
278 281 def urlencodenested(params):
279 282 """like urlencode, but works with nested parameters.
280 283
281 284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
282 285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
283 286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
284 287 """
285 288 flatparams = util.sortdict()
286 289
287 290 def process(prefix, obj):
288 291 if isinstance(obj, bool):
289 292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
290 293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
291 294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
292 295 if items is None:
293 296 flatparams[prefix] = obj
294 297 else:
295 298 for k, v in items(obj):
296 299 if prefix:
297 300 process(b'%s[%s]' % (prefix, k), v)
298 301 else:
299 302 process(k, v)
300 303
301 304 process(b'', params)
302 305 return util.urlreq.urlencode(flatparams)
303 306
304 307
305 308 def readurltoken(ui):
306 309 """return conduit url, token and make sure they exist
307 310
308 311 Currently read from [auth] config section. In the future, it might
309 312 make sense to read from .arcconfig and .arcrc as well.
310 313 """
311 314 url = ui.config(b'phabricator', b'url')
312 315 if not url:
313 316 raise error.Abort(
314 317 _(b'config %s.%s is required') % (b'phabricator', b'url')
315 318 )
316 319
317 320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
318 321 token = None
319 322
320 323 if res:
321 324 group, auth = res
322 325
323 326 ui.debug(b"using auth.%s.* for authentication\n" % group)
324 327
325 328 token = auth.get(b'phabtoken')
326 329
327 330 if not token:
328 331 raise error.Abort(
329 332 _(b'Can\'t find conduit token associated to %s') % (url,)
330 333 )
331 334
332 335 return url, token
333 336
334 337
335 338 def callconduit(ui, name, params):
336 339 """call Conduit API, params is a dict. return json.loads result, or None"""
337 340 host, token = readurltoken(ui)
338 341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
339 342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
340 343 params = params.copy()
341 344 params[b'__conduit__'] = {
342 345 b'token': token,
343 346 }
344 347 rawdata = {
345 348 b'params': templatefilters.json(params),
346 349 b'output': b'json',
347 350 b'__conduit__': 1,
348 351 }
349 352 data = urlencodenested(rawdata)
350 353 curlcmd = ui.config(b'phabricator', b'curlcmd')
351 354 if curlcmd:
352 355 sin, sout = procutil.popen2(
353 356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
354 357 )
355 358 sin.write(data)
356 359 sin.close()
357 360 body = sout.read()
358 361 else:
359 362 urlopener = urlmod.opener(ui, authinfo)
360 363 request = util.urlreq.request(pycompat.strurl(url), data=data)
361 364 with contextlib.closing(urlopener.open(request)) as rsp:
362 365 body = rsp.read()
363 366 ui.debug(b'Conduit Response: %s\n' % body)
364 367 parsed = pycompat.rapply(
365 368 lambda x: encoding.unitolocal(x)
366 369 if isinstance(x, pycompat.unicode)
367 370 else x,
368 371 # json.loads only accepts bytes from py3.6+
369 372 pycompat.json_loads(encoding.unifromlocal(body)),
370 373 )
371 374 if parsed.get(b'error_code'):
372 375 msg = _(b'Conduit Error (%s): %s') % (
373 376 parsed[b'error_code'],
374 377 parsed[b'error_info'],
375 378 )
376 379 raise error.Abort(msg)
377 380 return parsed[b'result']
378 381
379 382
380 383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
381 384 def debugcallconduit(ui, repo, name):
382 385 """call Conduit API
383 386
384 387 Call parameters are read from stdin as a JSON blob. Result will be written
385 388 to stdout as a JSON blob.
386 389 """
387 390 # json.loads only accepts bytes from 3.6+
388 391 rawparams = encoding.unifromlocal(ui.fin.read())
389 392 # json.loads only returns unicode strings
390 393 params = pycompat.rapply(
391 394 lambda x: encoding.unitolocal(x)
392 395 if isinstance(x, pycompat.unicode)
393 396 else x,
394 397 pycompat.json_loads(rawparams),
395 398 )
396 399 # json.dumps only accepts unicode strings
397 400 result = pycompat.rapply(
398 401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
399 402 callconduit(ui, name, params),
400 403 )
401 404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
402 405 ui.write(b'%s\n' % encoding.unitolocal(s))
403 406
404 407
405 408 def getrepophid(repo):
406 409 """given callsign, return repository PHID or None"""
407 410 # developer config: phabricator.repophid
408 411 repophid = repo.ui.config(b'phabricator', b'repophid')
409 412 if repophid:
410 413 return repophid
411 414 callsign = repo.ui.config(b'phabricator', b'callsign')
412 415 if not callsign:
413 416 return None
414 417 query = callconduit(
415 418 repo.ui,
416 419 b'diffusion.repository.search',
417 420 {b'constraints': {b'callsigns': [callsign]}},
418 421 )
419 422 if len(query[b'data']) == 0:
420 423 return None
421 424 repophid = query[b'data'][0][b'phid']
422 425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
423 426 return repophid
424 427
425 428
426 429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
427 430 _differentialrevisiondescre = re.compile(
428 431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
429 432 )
430 433
431 434
432 435 def getoldnodedrevmap(repo, nodelist):
433 436 """find previous nodes that has been sent to Phabricator
434 437
435 438 return {node: (oldnode, Differential diff, Differential Revision ID)}
436 439 for node in nodelist with known previous sent versions, or associated
437 440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
438 441 be ``None``.
439 442
440 443 Examines commit messages like "Differential Revision:" to get the
441 444 association information.
442 445
443 446 If such commit message line is not found, examines all precursors and their
444 447 tags. Tags with format like "D1234" are considered a match and the node
445 448 with that tag, and the number after "D" (ex. 1234) will be returned.
446 449
447 450 The ``old node``, if not None, is guaranteed to be the last diff of
448 451 corresponding Differential Revision, and exist in the repo.
449 452 """
450 453 unfi = repo.unfiltered()
451 454 has_node = unfi.changelog.index.has_node
452 455
453 456 result = {} # {node: (oldnode?, lastdiff?, drev)}
454 457 toconfirm = {} # {node: (force, {precnode}, drev)}
455 458 for node in nodelist:
456 459 ctx = unfi[node]
457 460 # For tags like "D123", put them into "toconfirm" to verify later
458 461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
459 462 for n in precnodes:
460 463 if has_node(n):
461 464 for tag in unfi.nodetags(n):
462 465 m = _differentialrevisiontagre.match(tag)
463 466 if m:
464 467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
465 468 break
466 469 else:
467 470 continue # move to next predecessor
468 471 break # found a tag, stop
469 472 else:
470 473 # Check commit message
471 474 m = _differentialrevisiondescre.search(ctx.description())
472 475 if m:
473 476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
474 477
475 478 # Double check if tags are genuine by collecting all old nodes from
476 479 # Phabricator, and expect precursors overlap with it.
477 480 if toconfirm:
478 481 drevs = [drev for force, precs, drev in toconfirm.values()]
479 482 alldiffs = callconduit(
480 483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
481 484 )
482 485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
483 486 for newnode, (force, precset, drev) in toconfirm.items():
484 487 diffs = [
485 488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
486 489 ]
487 490
488 491 # "precursors" as known by Phabricator
489 492 phprecset = {getnode(d) for d in diffs}
490 493
491 494 # Ignore if precursors (Phabricator and local repo) do not overlap,
492 495 # and force is not set (when commit message says nothing)
493 496 if not force and not bool(phprecset & precset):
494 497 tagname = b'D%d' % drev
495 498 tags.tag(
496 499 repo,
497 500 tagname,
498 501 nullid,
499 502 message=None,
500 503 user=None,
501 504 date=None,
502 505 local=True,
503 506 )
504 507 unfi.ui.warn(
505 508 _(
506 509 b'D%d: local tag removed - does not match '
507 510 b'Differential history\n'
508 511 )
509 512 % drev
510 513 )
511 514 continue
512 515
513 516 # Find the last node using Phabricator metadata, and make sure it
514 517 # exists in the repo
515 518 oldnode = lastdiff = None
516 519 if diffs:
517 520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
518 521 oldnode = getnode(lastdiff)
519 522 if oldnode and not has_node(oldnode):
520 523 oldnode = None
521 524
522 525 result[newnode] = (oldnode, lastdiff, drev)
523 526
524 527 return result
525 528
526 529
527 530 def getdrevmap(repo, revs):
528 531 """Return a dict mapping each rev in `revs` to their Differential Revision
529 532 ID or None.
530 533 """
531 534 result = {}
532 535 for rev in revs:
533 536 result[rev] = None
534 537 ctx = repo[rev]
535 538 # Check commit message
536 539 m = _differentialrevisiondescre.search(ctx.description())
537 540 if m:
538 541 result[rev] = int(m.group('id'))
539 542 continue
540 543 # Check tags
541 544 for tag in repo.nodetags(ctx.node()):
542 545 m = _differentialrevisiontagre.match(tag)
543 546 if m:
544 547 result[rev] = int(m.group(1))
545 548 break
546 549
547 550 return result
548 551
549 552
550 553 def getdiff(ctx, diffopts):
551 554 """plain-text diff without header (user, commit message, etc)"""
552 555 output = util.stringio()
553 556 for chunk, _label in patch.diffui(
554 557 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
555 558 ):
556 559 output.write(chunk)
557 560 return output.getvalue()
558 561
559 562
560 563 class DiffChangeType(object):
561 564 ADD = 1
562 565 CHANGE = 2
563 566 DELETE = 3
564 567 MOVE_AWAY = 4
565 568 COPY_AWAY = 5
566 569 MOVE_HERE = 6
567 570 COPY_HERE = 7
568 571 MULTICOPY = 8
569 572
570 573
571 574 class DiffFileType(object):
572 575 TEXT = 1
573 576 IMAGE = 2
574 577 BINARY = 3
575 578
576 579
577 580 @attr.s
578 581 class phabhunk(dict):
579 582 """Represents a Differential hunk, which is owned by a Differential change
580 583 """
581 584
582 585 oldOffset = attr.ib(default=0) # camelcase-required
583 586 oldLength = attr.ib(default=0) # camelcase-required
584 587 newOffset = attr.ib(default=0) # camelcase-required
585 588 newLength = attr.ib(default=0) # camelcase-required
586 589 corpus = attr.ib(default='')
587 590 # These get added to the phabchange's equivalents
588 591 addLines = attr.ib(default=0) # camelcase-required
589 592 delLines = attr.ib(default=0) # camelcase-required
590 593
591 594
592 595 @attr.s
593 596 class phabchange(object):
594 597 """Represents a Differential change, owns Differential hunks and owned by a
595 598 Differential diff. Each one represents one file in a diff.
596 599 """
597 600
598 601 currentPath = attr.ib(default=None) # camelcase-required
599 602 oldPath = attr.ib(default=None) # camelcase-required
600 603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
601 604 metadata = attr.ib(default=attr.Factory(dict))
602 605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
603 606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
604 607 type = attr.ib(default=DiffChangeType.CHANGE)
605 608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
606 609 commitHash = attr.ib(default=None) # camelcase-required
607 610 addLines = attr.ib(default=0) # camelcase-required
608 611 delLines = attr.ib(default=0) # camelcase-required
609 612 hunks = attr.ib(default=attr.Factory(list))
610 613
611 614 def copynewmetadatatoold(self):
612 615 for key in list(self.metadata.keys()):
613 616 newkey = key.replace(b'new:', b'old:')
614 617 self.metadata[newkey] = self.metadata[key]
615 618
616 619 def addoldmode(self, value):
617 620 self.oldProperties[b'unix:filemode'] = value
618 621
619 622 def addnewmode(self, value):
620 623 self.newProperties[b'unix:filemode'] = value
621 624
622 625 def addhunk(self, hunk):
623 626 if not isinstance(hunk, phabhunk):
624 627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
625 628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
626 629 # It's useful to include these stats since the Phab web UI shows them,
627 630 # and uses them to estimate how large a change a Revision is. Also used
628 631 # in email subjects for the [+++--] bit.
629 632 self.addLines += hunk.addLines
630 633 self.delLines += hunk.delLines
631 634
632 635
633 636 @attr.s
634 637 class phabdiff(object):
635 638 """Represents a Differential diff, owns Differential changes. Corresponds
636 639 to a commit.
637 640 """
638 641
639 642 # Doesn't seem to be any reason to send this (output of uname -n)
640 643 sourceMachine = attr.ib(default=b'') # camelcase-required
641 644 sourcePath = attr.ib(default=b'/') # camelcase-required
642 645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
643 646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
644 647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
645 648 branch = attr.ib(default=b'default')
646 649 bookmark = attr.ib(default=None)
647 650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
648 651 lintStatus = attr.ib(default=b'none') # camelcase-required
649 652 unitStatus = attr.ib(default=b'none') # camelcase-required
650 653 changes = attr.ib(default=attr.Factory(dict))
651 654 repositoryPHID = attr.ib(default=None) # camelcase-required
652 655
653 656 def addchange(self, change):
654 657 if not isinstance(change, phabchange):
655 658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
656 659 self.changes[change.currentPath] = pycompat.byteskwargs(
657 660 attr.asdict(change)
658 661 )
659 662
660 663
661 664 def maketext(pchange, ctx, fname):
662 665 """populate the phabchange for a text file"""
663 666 repo = ctx.repo()
664 667 fmatcher = match.exact([fname])
665 668 diffopts = mdiff.diffopts(git=True, context=32767)
666 669 _pfctx, _fctx, header, fhunks = next(
667 670 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
668 671 )
669 672
670 673 for fhunk in fhunks:
671 674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
672 675 corpus = b''.join(lines[1:])
673 676 shunk = list(header)
674 677 shunk.extend(lines)
675 678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
676 679 patch.diffstatdata(util.iterlines(shunk))
677 680 )
678 681 pchange.addhunk(
679 682 phabhunk(
680 683 oldOffset,
681 684 oldLength,
682 685 newOffset,
683 686 newLength,
684 687 corpus,
685 688 addLines,
686 689 delLines,
687 690 )
688 691 )
689 692
690 693
691 694 def uploadchunks(fctx, fphid):
692 695 """upload large binary files as separate chunks.
693 696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
694 697 """
695 698 ui = fctx.repo().ui
696 699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
697 700 with ui.makeprogress(
698 701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
699 702 ) as progress:
700 703 for chunk in chunks:
701 704 progress.increment()
702 705 if chunk[b'complete']:
703 706 continue
704 707 bstart = int(chunk[b'byteStart'])
705 708 bend = int(chunk[b'byteEnd'])
706 709 callconduit(
707 710 ui,
708 711 b'file.uploadchunk',
709 712 {
710 713 b'filePHID': fphid,
711 714 b'byteStart': bstart,
712 715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
713 716 b'dataEncoding': b'base64',
714 717 },
715 718 )
716 719
717 720
718 721 def uploadfile(fctx):
719 722 """upload binary files to Phabricator"""
720 723 repo = fctx.repo()
721 724 ui = repo.ui
722 725 fname = fctx.path()
723 726 size = fctx.size()
724 727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
725 728
726 729 # an allocate call is required first to see if an upload is even required
727 730 # (Phab might already have it) and to determine if chunking is needed
728 731 allocateparams = {
729 732 b'name': fname,
730 733 b'contentLength': size,
731 734 b'contentHash': fhash,
732 735 }
733 736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
734 737 fphid = filealloc[b'filePHID']
735 738
736 739 if filealloc[b'upload']:
737 740 ui.write(_(b'uploading %s\n') % bytes(fctx))
738 741 if not fphid:
739 742 uploadparams = {
740 743 b'name': fname,
741 744 b'data_base64': base64.b64encode(fctx.data()),
742 745 }
743 746 fphid = callconduit(ui, b'file.upload', uploadparams)
744 747 else:
745 748 uploadchunks(fctx, fphid)
746 749 else:
747 750 ui.debug(b'server already has %s\n' % bytes(fctx))
748 751
749 752 if not fphid:
750 753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
751 754
752 755 return fphid
753 756
754 757
755 758 def addoldbinary(pchange, oldfctx, fctx):
756 759 """add the metadata for the previous version of a binary file to the
757 760 phabchange for the new version
758 761
759 762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
760 763 version of the file, or None if the file is being removed.
761 764 """
762 765 if not fctx or fctx.cmp(oldfctx):
763 766 # Files differ, add the old one
764 767 pchange.metadata[b'old:file:size'] = oldfctx.size()
765 768 mimeguess, _enc = mimetypes.guess_type(
766 769 encoding.unifromlocal(oldfctx.path())
767 770 )
768 771 if mimeguess:
769 772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
770 773 mimeguess
771 774 )
772 775 fphid = uploadfile(oldfctx)
773 776 pchange.metadata[b'old:binary-phid'] = fphid
774 777 else:
775 778 # If it's left as IMAGE/BINARY web UI might try to display it
776 779 pchange.fileType = DiffFileType.TEXT
777 780 pchange.copynewmetadatatoold()
778 781
779 782
780 783 def makebinary(pchange, fctx):
781 784 """populate the phabchange for a binary file"""
782 785 pchange.fileType = DiffFileType.BINARY
783 786 fphid = uploadfile(fctx)
784 787 pchange.metadata[b'new:binary-phid'] = fphid
785 788 pchange.metadata[b'new:file:size'] = fctx.size()
786 789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
787 790 if mimeguess:
788 791 mimeguess = pycompat.bytestr(mimeguess)
789 792 pchange.metadata[b'new:file:mime-type'] = mimeguess
790 793 if mimeguess.startswith(b'image/'):
791 794 pchange.fileType = DiffFileType.IMAGE
792 795
793 796
794 797 # Copied from mercurial/patch.py
795 798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
796 799
797 800
798 801 def notutf8(fctx):
799 802 """detect non-UTF-8 text files since Phabricator requires them to be marked
800 803 as binary
801 804 """
802 805 try:
803 806 fctx.data().decode('utf-8')
804 807 return False
805 808 except UnicodeDecodeError:
806 809 fctx.repo().ui.write(
807 810 _(b'file %s detected as non-UTF-8, marked as binary\n')
808 811 % fctx.path()
809 812 )
810 813 return True
811 814
812 815
813 816 def addremoved(pdiff, ctx, removed):
814 817 """add removed files to the phabdiff. Shouldn't include moves"""
815 818 for fname in removed:
816 819 pchange = phabchange(
817 820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
818 821 )
819 822 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
820 823 oldfctx = ctx.p1()[fname]
821 824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
822 825 maketext(pchange, ctx, fname)
823 826
824 827 pdiff.addchange(pchange)
825 828
826 829
827 830 def addmodified(pdiff, ctx, modified):
828 831 """add modified files to the phabdiff"""
829 832 for fname in modified:
830 833 fctx = ctx[fname]
831 834 oldfctx = fctx.p1()
832 835 pchange = phabchange(currentPath=fname, oldPath=fname)
833 836 filemode = gitmode[ctx[fname].flags()]
834 837 originalmode = gitmode[ctx.p1()[fname].flags()]
835 838 if filemode != originalmode:
836 839 pchange.addoldmode(originalmode)
837 840 pchange.addnewmode(filemode)
838 841
839 842 if (
840 843 fctx.isbinary()
841 844 or notutf8(fctx)
842 845 or oldfctx.isbinary()
843 846 or notutf8(oldfctx)
844 847 ):
845 848 makebinary(pchange, fctx)
846 849 addoldbinary(pchange, fctx.p1(), fctx)
847 850 else:
848 851 maketext(pchange, ctx, fname)
849 852
850 853 pdiff.addchange(pchange)
851 854
852 855
853 856 def addadded(pdiff, ctx, added, removed):
854 857 """add file adds to the phabdiff, both new files and copies/moves"""
855 858 # Keep track of files that've been recorded as moved/copied, so if there are
856 859 # additional copies we can mark them (moves get removed from removed)
857 860 copiedchanges = {}
858 861 movedchanges = {}
859 862 for fname in added:
860 863 fctx = ctx[fname]
861 864 oldfctx = None
862 865 pchange = phabchange(currentPath=fname)
863 866
864 867 filemode = gitmode[ctx[fname].flags()]
865 868 renamed = fctx.renamed()
866 869
867 870 if renamed:
868 871 originalfname = renamed[0]
869 872 oldfctx = ctx.p1()[originalfname]
870 873 originalmode = gitmode[oldfctx.flags()]
871 874 pchange.oldPath = originalfname
872 875
873 876 if originalfname in removed:
874 877 origpchange = phabchange(
875 878 currentPath=originalfname,
876 879 oldPath=originalfname,
877 880 type=DiffChangeType.MOVE_AWAY,
878 881 awayPaths=[fname],
879 882 )
880 883 movedchanges[originalfname] = origpchange
881 884 removed.remove(originalfname)
882 885 pchange.type = DiffChangeType.MOVE_HERE
883 886 elif originalfname in movedchanges:
884 887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
885 888 movedchanges[originalfname].awayPaths.append(fname)
886 889 pchange.type = DiffChangeType.COPY_HERE
887 890 else: # pure copy
888 891 if originalfname not in copiedchanges:
889 892 origpchange = phabchange(
890 893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
891 894 )
892 895 copiedchanges[originalfname] = origpchange
893 896 else:
894 897 origpchange = copiedchanges[originalfname]
895 898 origpchange.awayPaths.append(fname)
896 899 pchange.type = DiffChangeType.COPY_HERE
897 900
898 901 if filemode != originalmode:
899 902 pchange.addoldmode(originalmode)
900 903 pchange.addnewmode(filemode)
901 904 else: # Brand-new file
902 905 pchange.addnewmode(gitmode[fctx.flags()])
903 906 pchange.type = DiffChangeType.ADD
904 907
905 908 if (
906 909 fctx.isbinary()
907 910 or notutf8(fctx)
908 911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
909 912 ):
910 913 makebinary(pchange, fctx)
911 914 if renamed:
912 915 addoldbinary(pchange, oldfctx, fctx)
913 916 else:
914 917 maketext(pchange, ctx, fname)
915 918
916 919 pdiff.addchange(pchange)
917 920
918 921 for _path, copiedchange in copiedchanges.items():
919 922 pdiff.addchange(copiedchange)
920 923 for _path, movedchange in movedchanges.items():
921 924 pdiff.addchange(movedchange)
922 925
923 926
924 927 def creatediff(ctx):
925 928 """create a Differential Diff"""
926 929 repo = ctx.repo()
927 930 repophid = getrepophid(repo)
928 931 # Create a "Differential Diff" via "differential.creatediff" API
929 932 pdiff = phabdiff(
930 933 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
931 934 branch=b'%s' % ctx.branch(),
932 935 )
933 936 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
934 937 # addadded will remove moved files from removed, so addremoved won't get
935 938 # them
936 939 addadded(pdiff, ctx, added, removed)
937 940 addmodified(pdiff, ctx, modified)
938 941 addremoved(pdiff, ctx, removed)
939 942 if repophid:
940 943 pdiff.repositoryPHID = repophid
941 944 diff = callconduit(
942 945 repo.ui,
943 946 b'differential.creatediff',
944 947 pycompat.byteskwargs(attr.asdict(pdiff)),
945 948 )
946 949 if not diff:
947 950 raise error.Abort(_(b'cannot create diff for %s') % ctx)
948 951 return diff
949 952
950 953
951 954 def writediffproperties(ctx, diff):
952 955 """write metadata to diff so patches could be applied losslessly"""
953 956 # creatediff returns with a diffid but query returns with an id
954 957 diffid = diff.get(b'diffid', diff.get(b'id'))
955 958 params = {
956 959 b'diff_id': diffid,
957 960 b'name': b'hg:meta',
958 961 b'data': templatefilters.json(
959 962 {
960 963 b'user': ctx.user(),
961 964 b'date': b'%d %d' % ctx.date(),
962 965 b'branch': ctx.branch(),
963 966 b'node': ctx.hex(),
964 967 b'parent': ctx.p1().hex(),
965 968 }
966 969 ),
967 970 }
968 971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
969 972
970 973 params = {
971 974 b'diff_id': diffid,
972 975 b'name': b'local:commits',
973 976 b'data': templatefilters.json(
974 977 {
975 978 ctx.hex(): {
976 979 b'author': stringutil.person(ctx.user()),
977 980 b'authorEmail': stringutil.email(ctx.user()),
978 981 b'time': int(ctx.date()[0]),
979 982 b'commit': ctx.hex(),
980 983 b'parents': [ctx.p1().hex()],
981 984 b'branch': ctx.branch(),
982 985 },
983 986 }
984 987 ),
985 988 }
986 989 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
987 990
988 991
989 992 def createdifferentialrevision(
990 993 ctx,
991 994 revid=None,
992 995 parentrevphid=None,
993 996 oldnode=None,
994 997 olddiff=None,
995 998 actions=None,
996 999 comment=None,
997 1000 ):
998 1001 """create or update a Differential Revision
999 1002
1000 1003 If revid is None, create a new Differential Revision, otherwise update
1001 1004 revid. If parentrevphid is not None, set it as a dependency.
1002 1005
1003 1006 If oldnode is not None, check if the patch content (without commit message
1004 1007 and metadata) has changed before creating another diff.
1005 1008
1006 1009 If actions is not None, they will be appended to the transaction.
1007 1010 """
1008 1011 repo = ctx.repo()
1009 1012 if oldnode:
1010 1013 diffopts = mdiff.diffopts(git=True, context=32767)
1011 1014 oldctx = repo.unfiltered()[oldnode]
1012 1015 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1013 1016 else:
1014 1017 neednewdiff = True
1015 1018
1016 1019 transactions = []
1017 1020 if neednewdiff:
1018 1021 diff = creatediff(ctx)
1019 1022 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1020 1023 if comment:
1021 1024 transactions.append({b'type': b'comment', b'value': comment})
1022 1025 else:
1023 1026 # Even if we don't need to upload a new diff because the patch content
1024 1027 # does not change. We might still need to update its metadata so
1025 1028 # pushers could know the correct node metadata.
1026 1029 assert olddiff
1027 1030 diff = olddiff
1028 1031 writediffproperties(ctx, diff)
1029 1032
1030 1033 # Set the parent Revision every time, so commit re-ordering is picked-up
1031 1034 if parentrevphid:
1032 1035 transactions.append(
1033 1036 {b'type': b'parents.set', b'value': [parentrevphid]}
1034 1037 )
1035 1038
1036 1039 if actions:
1037 1040 transactions += actions
1038 1041
1039 1042 # Parse commit message and update related fields.
1040 1043 desc = ctx.description()
1041 1044 info = callconduit(
1042 1045 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1043 1046 )
1044 1047 for k, v in info[b'fields'].items():
1045 1048 if k in [b'title', b'summary', b'testPlan']:
1046 1049 transactions.append({b'type': k, b'value': v})
1047 1050
1048 1051 params = {b'transactions': transactions}
1049 1052 if revid is not None:
1050 1053 # Update an existing Differential Revision
1051 1054 params[b'objectIdentifier'] = revid
1052 1055
1053 1056 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1054 1057 if not revision:
1055 1058 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1056 1059
1057 1060 return revision, diff
1058 1061
1059 1062
1060 1063 def userphids(ui, names):
1061 1064 """convert user names to PHIDs"""
1062 1065 names = [name.lower() for name in names]
1063 1066 query = {b'constraints': {b'usernames': names}}
1064 1067 result = callconduit(ui, b'user.search', query)
1065 1068 # username not found is not an error of the API. So check if we have missed
1066 1069 # some names here.
1067 1070 data = result[b'data']
1068 1071 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1069 1072 unresolved = set(names) - resolved
1070 1073 if unresolved:
1071 1074 raise error.Abort(
1072 1075 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1073 1076 )
1074 1077 return [entry[b'phid'] for entry in data]
1075 1078
1076 1079
1077 1080 @vcrcommand(
1078 1081 b'phabsend',
1079 1082 [
1080 1083 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1081 1084 (b'', b'amend', True, _(b'update commit messages')),
1082 1085 (b'', b'reviewer', [], _(b'specify reviewers')),
1083 1086 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1084 1087 (
1085 1088 b'm',
1086 1089 b'comment',
1087 1090 b'',
1088 1091 _(b'add a comment to Revisions with new/updated Diffs'),
1089 1092 ),
1090 1093 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1091 1094 ],
1092 1095 _(b'REV [OPTIONS]'),
1093 1096 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1094 1097 )
1095 1098 def phabsend(ui, repo, *revs, **opts):
1096 1099 """upload changesets to Phabricator
1097 1100
1098 1101 If there are multiple revisions specified, they will be send as a stack
1099 1102 with a linear dependencies relationship using the order specified by the
1100 1103 revset.
1101 1104
1102 1105 For the first time uploading changesets, local tags will be created to
1103 1106 maintain the association. After the first time, phabsend will check
1104 1107 obsstore and tags information so it can figure out whether to update an
1105 1108 existing Differential Revision, or create a new one.
1106 1109
1107 1110 If --amend is set, update commit messages so they have the
1108 1111 ``Differential Revision`` URL, remove related tags. This is similar to what
1109 1112 arcanist will do, and is more desired in author-push workflows. Otherwise,
1110 1113 use local tags to record the ``Differential Revision`` association.
1111 1114
1112 1115 The --confirm option lets you confirm changesets before sending them. You
1113 1116 can also add following to your configuration file to make it default
1114 1117 behaviour::
1115 1118
1116 1119 [phabsend]
1117 1120 confirm = true
1118 1121
1119 1122 phabsend will check obsstore and the above association to decide whether to
1120 1123 update an existing Differential Revision, or create a new one.
1121 1124 """
1122 1125 opts = pycompat.byteskwargs(opts)
1123 1126 revs = list(revs) + opts.get(b'rev', [])
1124 1127 revs = scmutil.revrange(repo, revs)
1125 1128 revs.sort() # ascending order to preserve topological parent/child in phab
1126 1129
1127 1130 if not revs:
1128 1131 raise error.Abort(_(b'phabsend requires at least one changeset'))
1129 1132 if opts.get(b'amend'):
1130 1133 cmdutil.checkunfinished(repo)
1131 1134
1132 1135 # {newnode: (oldnode, olddiff, olddrev}
1133 1136 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1134 1137
1135 1138 confirm = ui.configbool(b'phabsend', b'confirm')
1136 1139 confirm |= bool(opts.get(b'confirm'))
1137 1140 if confirm:
1138 1141 confirmed = _confirmbeforesend(repo, revs, oldmap)
1139 1142 if not confirmed:
1140 1143 raise error.Abort(_(b'phabsend cancelled'))
1141 1144
1142 1145 actions = []
1143 1146 reviewers = opts.get(b'reviewer', [])
1144 1147 blockers = opts.get(b'blocker', [])
1145 1148 phids = []
1146 1149 if reviewers:
1147 1150 phids.extend(userphids(repo.ui, reviewers))
1148 1151 if blockers:
1149 1152 phids.extend(
1150 1153 map(
1151 1154 lambda phid: b'blocking(%s)' % phid,
1152 1155 userphids(repo.ui, blockers),
1153 1156 )
1154 1157 )
1155 1158 if phids:
1156 1159 actions.append({b'type': b'reviewers.add', b'value': phids})
1157 1160
1158 1161 drevids = [] # [int]
1159 1162 diffmap = {} # {newnode: diff}
1160 1163
1161 1164 # Send patches one by one so we know their Differential Revision PHIDs and
1162 1165 # can provide dependency relationship
1163 1166 lastrevphid = None
1164 1167 for rev in revs:
1165 1168 ui.debug(b'sending rev %d\n' % rev)
1166 1169 ctx = repo[rev]
1167 1170
1168 1171 # Get Differential Revision ID
1169 1172 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1170 1173 if oldnode != ctx.node() or opts.get(b'amend'):
1171 1174 # Create or update Differential Revision
1172 1175 revision, diff = createdifferentialrevision(
1173 1176 ctx,
1174 1177 revid,
1175 1178 lastrevphid,
1176 1179 oldnode,
1177 1180 olddiff,
1178 1181 actions,
1179 1182 opts.get(b'comment'),
1180 1183 )
1181 1184 diffmap[ctx.node()] = diff
1182 1185 newrevid = int(revision[b'object'][b'id'])
1183 1186 newrevphid = revision[b'object'][b'phid']
1184 1187 if revid:
1185 1188 action = b'updated'
1186 1189 else:
1187 1190 action = b'created'
1188 1191
1189 1192 # Create a local tag to note the association, if commit message
1190 1193 # does not have it already
1191 1194 m = _differentialrevisiondescre.search(ctx.description())
1192 1195 if not m or int(m.group('id')) != newrevid:
1193 1196 tagname = b'D%d' % newrevid
1194 1197 tags.tag(
1195 1198 repo,
1196 1199 tagname,
1197 1200 ctx.node(),
1198 1201 message=None,
1199 1202 user=None,
1200 1203 date=None,
1201 1204 local=True,
1202 1205 )
1203 1206 else:
1204 1207 # Nothing changed. But still set "newrevphid" so the next revision
1205 1208 # could depend on this one and "newrevid" for the summary line.
1206 1209 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1207 1210 newrevid = revid
1208 1211 action = b'skipped'
1209 1212
1210 1213 actiondesc = ui.label(
1211 1214 {
1212 1215 b'created': _(b'created'),
1213 1216 b'skipped': _(b'skipped'),
1214 1217 b'updated': _(b'updated'),
1215 1218 }[action],
1216 1219 b'phabricator.action.%s' % action,
1217 1220 )
1218 1221 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 1222 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 1223 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 1224 ui.write(
1222 1225 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1223 1226 )
1224 1227 drevids.append(newrevid)
1225 1228 lastrevphid = newrevphid
1226 1229
1227 1230 # Update commit messages and remove tags
1228 1231 if opts.get(b'amend'):
1229 1232 unfi = repo.unfiltered()
1230 1233 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1231 1234 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1232 1235 wnode = unfi[b'.'].node()
1233 1236 mapping = {} # {oldnode: [newnode]}
1234 1237 for i, rev in enumerate(revs):
1235 1238 old = unfi[rev]
1236 1239 drevid = drevids[i]
1237 1240 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1238 1241 newdesc = getdescfromdrev(drev)
1239 1242 # Make sure commit message contain "Differential Revision"
1240 1243 if old.description() != newdesc:
1241 1244 if old.phase() == phases.public:
1242 1245 ui.warn(
1243 1246 _(b"warning: not updating public commit %s\n")
1244 1247 % scmutil.formatchangeid(old)
1245 1248 )
1246 1249 continue
1247 1250 parents = [
1248 1251 mapping.get(old.p1().node(), (old.p1(),))[0],
1249 1252 mapping.get(old.p2().node(), (old.p2(),))[0],
1250 1253 ]
1251 1254 new = context.metadataonlyctx(
1252 1255 repo,
1253 1256 old,
1254 1257 parents=parents,
1255 1258 text=newdesc,
1256 1259 user=old.user(),
1257 1260 date=old.date(),
1258 1261 extra=old.extra(),
1259 1262 )
1260 1263
1261 1264 newnode = new.commit()
1262 1265
1263 1266 mapping[old.node()] = [newnode]
1264 1267 # Update diff property
1265 1268 # If it fails just warn and keep going, otherwise the DREV
1266 1269 # associations will be lost
1267 1270 try:
1268 1271 writediffproperties(unfi[newnode], diffmap[old.node()])
1269 1272 except util.urlerr.urlerror:
1270 1273 ui.warnnoi18n(
1271 1274 b'Failed to update metadata for D%d\n' % drevid
1272 1275 )
1273 1276 # Remove local tags since it's no longer necessary
1274 1277 tagname = b'D%d' % drevid
1275 1278 if tagname in repo.tags():
1276 1279 tags.tag(
1277 1280 repo,
1278 1281 tagname,
1279 1282 nullid,
1280 1283 message=None,
1281 1284 user=None,
1282 1285 date=None,
1283 1286 local=True,
1284 1287 )
1285 1288 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1286 1289 if wnode in mapping:
1287 1290 unfi.setparents(mapping[wnode][0])
1288 1291
1289 1292
1290 1293 # Map from "hg:meta" keys to header understood by "hg import". The order is
1291 1294 # consistent with "hg export" output.
1292 1295 _metanamemap = util.sortdict(
1293 1296 [
1294 1297 (b'user', b'User'),
1295 1298 (b'date', b'Date'),
1296 1299 (b'branch', b'Branch'),
1297 1300 (b'node', b'Node ID'),
1298 1301 (b'parent', b'Parent '),
1299 1302 ]
1300 1303 )
1301 1304
1302 1305
1303 1306 def _confirmbeforesend(repo, revs, oldmap):
1304 1307 url, token = readurltoken(repo.ui)
1305 1308 ui = repo.ui
1306 1309 for rev in revs:
1307 1310 ctx = repo[rev]
1308 1311 desc = ctx.description().splitlines()[0]
1309 1312 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1310 1313 if drevid:
1311 1314 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1312 1315 else:
1313 1316 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1314 1317
1315 1318 ui.write(
1316 1319 _(b'%s - %s: %s\n')
1317 1320 % (
1318 1321 drevdesc,
1319 1322 ui.label(bytes(ctx), b'phabricator.node'),
1320 1323 ui.label(desc, b'phabricator.desc'),
1321 1324 )
1322 1325 )
1323 1326
1324 1327 if ui.promptchoice(
1325 1328 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1326 1329 ):
1327 1330 return False
1328 1331
1329 1332 return True
1330 1333
1331 1334
1332 1335 _knownstatusnames = {
1333 1336 b'accepted',
1334 1337 b'needsreview',
1335 1338 b'needsrevision',
1336 1339 b'closed',
1337 1340 b'abandoned',
1338 1341 b'changesplanned',
1339 1342 }
1340 1343
1341 1344
1342 1345 def _getstatusname(drev):
1343 1346 """get normalized status name from a Differential Revision"""
1344 1347 return drev[b'statusName'].replace(b' ', b'').lower()
1345 1348
1346 1349
1347 1350 # Small language to specify differential revisions. Support symbols: (), :X,
1348 1351 # +, and -.
1349 1352
1350 1353 _elements = {
1351 1354 # token-type: binding-strength, primary, prefix, infix, suffix
1352 1355 b'(': (12, None, (b'group', 1, b')'), None, None),
1353 1356 b':': (8, None, (b'ancestors', 8), None, None),
1354 1357 b'&': (5, None, None, (b'and_', 5), None),
1355 1358 b'+': (4, None, None, (b'add', 4), None),
1356 1359 b'-': (4, None, None, (b'sub', 4), None),
1357 1360 b')': (0, None, None, None, None),
1358 1361 b'symbol': (0, b'symbol', None, None, None),
1359 1362 b'end': (0, None, None, None, None),
1360 1363 }
1361 1364
1362 1365
1363 1366 def _tokenize(text):
1364 1367 view = memoryview(text) # zero-copy slice
1365 1368 special = b'():+-& '
1366 1369 pos = 0
1367 1370 length = len(text)
1368 1371 while pos < length:
1369 1372 symbol = b''.join(
1370 1373 itertools.takewhile(
1371 1374 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1372 1375 )
1373 1376 )
1374 1377 if symbol:
1375 1378 yield (b'symbol', symbol, pos)
1376 1379 pos += len(symbol)
1377 1380 else: # special char, ignore space
1378 1381 if text[pos : pos + 1] != b' ':
1379 1382 yield (text[pos : pos + 1], None, pos)
1380 1383 pos += 1
1381 1384 yield (b'end', None, pos)
1382 1385
1383 1386
1384 1387 def _parse(text):
1385 1388 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1386 1389 if pos != len(text):
1387 1390 raise error.ParseError(b'invalid token', pos)
1388 1391 return tree
1389 1392
1390 1393
1391 1394 def _parsedrev(symbol):
1392 1395 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1393 1396 if symbol.startswith(b'D') and symbol[1:].isdigit():
1394 1397 return int(symbol[1:])
1395 1398 if symbol.isdigit():
1396 1399 return int(symbol)
1397 1400
1398 1401
1399 1402 def _prefetchdrevs(tree):
1400 1403 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1401 1404 drevs = set()
1402 1405 ancestordrevs = set()
1403 1406 op = tree[0]
1404 1407 if op == b'symbol':
1405 1408 r = _parsedrev(tree[1])
1406 1409 if r:
1407 1410 drevs.add(r)
1408 1411 elif op == b'ancestors':
1409 1412 r, a = _prefetchdrevs(tree[1])
1410 1413 drevs.update(r)
1411 1414 ancestordrevs.update(r)
1412 1415 ancestordrevs.update(a)
1413 1416 else:
1414 1417 for t in tree[1:]:
1415 1418 r, a = _prefetchdrevs(t)
1416 1419 drevs.update(r)
1417 1420 ancestordrevs.update(a)
1418 1421 return drevs, ancestordrevs
1419 1422
1420 1423
1421 1424 def querydrev(ui, spec):
1422 1425 """return a list of "Differential Revision" dicts
1423 1426
1424 1427 spec is a string using a simple query language, see docstring in phabread
1425 1428 for details.
1426 1429
1427 1430 A "Differential Revision dict" looks like:
1428 1431
1429 1432 {
1430 1433 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1431 1434 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1432 1435 "auxiliary": {
1433 1436 "phabricator:depends-on": [
1434 1437 "PHID-DREV-gbapp366kutjebt7agcd"
1435 1438 ]
1436 1439 "phabricator:projects": [],
1437 1440 },
1438 1441 "branch": "default",
1439 1442 "ccs": [],
1440 1443 "commits": [],
1441 1444 "dateCreated": "1499181406",
1442 1445 "dateModified": "1499182103",
1443 1446 "diffs": [
1444 1447 "3",
1445 1448 "4",
1446 1449 ],
1447 1450 "hashes": [],
1448 1451 "id": "2",
1449 1452 "lineCount": "2",
1450 1453 "phid": "PHID-DREV-672qvysjcczopag46qty",
1451 1454 "properties": {},
1452 1455 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1453 1456 "reviewers": [],
1454 1457 "sourcePath": null
1455 1458 "status": "0",
1456 1459 "statusName": "Needs Review",
1457 1460 "summary": "",
1458 1461 "testPlan": "",
1459 1462 "title": "example",
1460 1463 "uri": "https://phab.example.com/D2",
1461 1464 }
1462 1465 """
1463 1466 # TODO: replace differential.query and differential.querydiffs with
1464 1467 # differential.diff.search because the former (and their output) are
1465 1468 # frozen, and planned to be deprecated and removed.
1466 1469
1467 1470 def fetch(params):
1468 1471 """params -> single drev or None"""
1469 1472 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1470 1473 if key in prefetched:
1471 1474 return prefetched[key]
1472 1475 drevs = callconduit(ui, b'differential.query', params)
1473 1476 # Fill prefetched with the result
1474 1477 for drev in drevs:
1475 1478 prefetched[drev[b'phid']] = drev
1476 1479 prefetched[int(drev[b'id'])] = drev
1477 1480 if key not in prefetched:
1478 1481 raise error.Abort(
1479 1482 _(b'cannot get Differential Revision %r') % params
1480 1483 )
1481 1484 return prefetched[key]
1482 1485
1483 1486 def getstack(topdrevids):
1484 1487 """given a top, get a stack from the bottom, [id] -> [id]"""
1485 1488 visited = set()
1486 1489 result = []
1487 1490 queue = [{b'ids': [i]} for i in topdrevids]
1488 1491 while queue:
1489 1492 params = queue.pop()
1490 1493 drev = fetch(params)
1491 1494 if drev[b'id'] in visited:
1492 1495 continue
1493 1496 visited.add(drev[b'id'])
1494 1497 result.append(int(drev[b'id']))
1495 1498 auxiliary = drev.get(b'auxiliary', {})
1496 1499 depends = auxiliary.get(b'phabricator:depends-on', [])
1497 1500 for phid in depends:
1498 1501 queue.append({b'phids': [phid]})
1499 1502 result.reverse()
1500 1503 return smartset.baseset(result)
1501 1504
1502 1505 # Initialize prefetch cache
1503 1506 prefetched = {} # {id or phid: drev}
1504 1507
1505 1508 tree = _parse(spec)
1506 1509 drevs, ancestordrevs = _prefetchdrevs(tree)
1507 1510
1508 1511 # developer config: phabricator.batchsize
1509 1512 batchsize = ui.configint(b'phabricator', b'batchsize')
1510 1513
1511 1514 # Prefetch Differential Revisions in batch
1512 1515 tofetch = set(drevs)
1513 1516 for r in ancestordrevs:
1514 1517 tofetch.update(range(max(1, r - batchsize), r + 1))
1515 1518 if drevs:
1516 1519 fetch({b'ids': list(tofetch)})
1517 1520 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1518 1521
1519 1522 # Walk through the tree, return smartsets
1520 1523 def walk(tree):
1521 1524 op = tree[0]
1522 1525 if op == b'symbol':
1523 1526 drev = _parsedrev(tree[1])
1524 1527 if drev:
1525 1528 return smartset.baseset([drev])
1526 1529 elif tree[1] in _knownstatusnames:
1527 1530 drevs = [
1528 1531 r
1529 1532 for r in validids
1530 1533 if _getstatusname(prefetched[r]) == tree[1]
1531 1534 ]
1532 1535 return smartset.baseset(drevs)
1533 1536 else:
1534 1537 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1535 1538 elif op in {b'and_', b'add', b'sub'}:
1536 1539 assert len(tree) == 3
1537 1540 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1538 1541 elif op == b'group':
1539 1542 return walk(tree[1])
1540 1543 elif op == b'ancestors':
1541 1544 return getstack(walk(tree[1]))
1542 1545 else:
1543 1546 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1544 1547
1545 1548 return [prefetched[r] for r in walk(tree)]
1546 1549
1547 1550
1548 1551 def getdescfromdrev(drev):
1549 1552 """get description (commit message) from "Differential Revision"
1550 1553
1551 1554 This is similar to differential.getcommitmessage API. But we only care
1552 1555 about limited fields: title, summary, test plan, and URL.
1553 1556 """
1554 1557 title = drev[b'title']
1555 1558 summary = drev[b'summary'].rstrip()
1556 1559 testplan = drev[b'testPlan'].rstrip()
1557 1560 if testplan:
1558 1561 testplan = b'Test Plan:\n%s' % testplan
1559 1562 uri = b'Differential Revision: %s' % drev[b'uri']
1560 1563 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1561 1564
1562 1565
1563 1566 def getdiffmeta(diff):
1564 1567 """get commit metadata (date, node, user, p1) from a diff object
1565 1568
1566 1569 The metadata could be "hg:meta", sent by phabsend, like:
1567 1570
1568 1571 "properties": {
1569 1572 "hg:meta": {
1570 1573 "branch": "default",
1571 1574 "date": "1499571514 25200",
1572 1575 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1573 1576 "user": "Foo Bar <foo@example.com>",
1574 1577 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1575 1578 }
1576 1579 }
1577 1580
1578 1581 Or converted from "local:commits", sent by "arc", like:
1579 1582
1580 1583 "properties": {
1581 1584 "local:commits": {
1582 1585 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1583 1586 "author": "Foo Bar",
1584 1587 "authorEmail": "foo@example.com"
1585 1588 "branch": "default",
1586 1589 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1587 1590 "local": "1000",
1588 1591 "message": "...",
1589 1592 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1590 1593 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1591 1594 "summary": "...",
1592 1595 "tag": "",
1593 1596 "time": 1499546314,
1594 1597 }
1595 1598 }
1596 1599 }
1597 1600
1598 1601 Note: metadata extracted from "local:commits" will lose time zone
1599 1602 information.
1600 1603 """
1601 1604 props = diff.get(b'properties') or {}
1602 1605 meta = props.get(b'hg:meta')
1603 1606 if not meta:
1604 1607 if props.get(b'local:commits'):
1605 1608 commit = sorted(props[b'local:commits'].values())[0]
1606 1609 meta = {}
1607 1610 if b'author' in commit and b'authorEmail' in commit:
1608 1611 meta[b'user'] = b'%s <%s>' % (
1609 1612 commit[b'author'],
1610 1613 commit[b'authorEmail'],
1611 1614 )
1612 1615 if b'time' in commit:
1613 1616 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1614 1617 if b'branch' in commit:
1615 1618 meta[b'branch'] = commit[b'branch']
1616 1619 node = commit.get(b'commit', commit.get(b'rev'))
1617 1620 if node:
1618 1621 meta[b'node'] = node
1619 1622 if len(commit.get(b'parents', ())) >= 1:
1620 1623 meta[b'parent'] = commit[b'parents'][0]
1621 1624 else:
1622 1625 meta = {}
1623 1626 if b'date' not in meta and b'dateCreated' in diff:
1624 1627 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1625 1628 if b'branch' not in meta and diff.get(b'branch'):
1626 1629 meta[b'branch'] = diff[b'branch']
1627 1630 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1628 1631 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1629 1632 return meta
1630 1633
1631 1634
1632 1635 def readpatch(ui, drevs, write):
1633 1636 """generate plain-text patch readable by 'hg import'
1634 1637
1635 1638 write takes a list of (DREV, bytes), where DREV is the differential number
1636 1639 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1637 1640 to be imported. drevs is what "querydrev" returns, results of
1638 1641 "differential.query".
1639 1642 """
1640 1643 # Prefetch hg:meta property for all diffs
1641 1644 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1642 1645 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1643 1646
1644 1647 patches = []
1645 1648
1646 1649 # Generate patch for each drev
1647 1650 for drev in drevs:
1648 1651 ui.note(_(b'reading D%s\n') % drev[b'id'])
1649 1652
1650 1653 diffid = max(int(v) for v in drev[b'diffs'])
1651 1654 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1652 1655 desc = getdescfromdrev(drev)
1653 1656 header = b'# HG changeset patch\n'
1654 1657
1655 1658 # Try to preserve metadata from hg:meta property. Write hg patch
1656 1659 # headers that can be read by the "import" command. See patchheadermap
1657 1660 # and extract in mercurial/patch.py for supported headers.
1658 1661 meta = getdiffmeta(diffs[b'%d' % diffid])
1659 1662 for k in _metanamemap.keys():
1660 1663 if k in meta:
1661 1664 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1662 1665
1663 1666 content = b'%s%s\n%s' % (header, desc, body)
1664 1667 patches.append((drev[b'id'], content))
1665 1668
1666 1669 # Write patches to the supplied callback
1667 1670 write(patches)
1668 1671
1669 1672
1670 1673 @vcrcommand(
1671 1674 b'phabread',
1672 1675 [(b'', b'stack', False, _(b'read dependencies'))],
1673 1676 _(b'DREVSPEC [OPTIONS]'),
1674 1677 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1675 1678 optionalrepo=True,
1676 1679 )
1677 1680 def phabread(ui, repo, spec, **opts):
1678 1681 """print patches from Phabricator suitable for importing
1679 1682
1680 1683 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1681 1684 the number ``123``. It could also have common operators like ``+``, ``-``,
1682 1685 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1683 1686 select a stack.
1684 1687
1685 1688 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1686 1689 could be used to filter patches by status. For performance reason, they
1687 1690 only represent a subset of non-status selections and cannot be used alone.
1688 1691
1689 1692 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1690 1693 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1691 1694 stack up to D9.
1692 1695
1693 1696 If --stack is given, follow dependencies information and read all patches.
1694 1697 It is equivalent to the ``:`` operator.
1695 1698 """
1696 1699 opts = pycompat.byteskwargs(opts)
1697 1700 if opts.get(b'stack'):
1698 1701 spec = b':(%s)' % spec
1699 1702 drevs = querydrev(ui, spec)
1700 1703
1701 1704 def _write(patches):
1702 1705 for drev, content in patches:
1703 1706 ui.write(content)
1704 1707
1705 1708 readpatch(ui, drevs, _write)
1706 1709
1707 1710
1708 1711 @vcrcommand(
1709 1712 b'phabimport',
1710 1713 [(b'', b'stack', False, _(b'import dependencies as well'))],
1711 1714 _(b'DREVSPEC [OPTIONS]'),
1712 1715 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1713 1716 )
1714 1717 def phabimport(ui, repo, spec, **opts):
1715 1718 """import patches from Phabricator for the specified Differential Revisions
1716 1719
1717 1720 The patches are read and applied starting at the parent of the working
1718 1721 directory.
1719 1722
1720 1723 See ``hg help phabread`` for how to specify DREVSPEC.
1721 1724 """
1722 1725 opts = pycompat.byteskwargs(opts)
1723 1726
1724 1727 # --bypass avoids losing exec and symlink bits when importing on Windows,
1725 1728 # and allows importing with a dirty wdir. It also aborts instead of leaving
1726 1729 # rejects.
1727 1730 opts[b'bypass'] = True
1728 1731
1729 1732 # Mandatory default values, synced with commands.import
1730 1733 opts[b'strip'] = 1
1731 1734 opts[b'prefix'] = b''
1732 1735 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1733 1736 opts[b'obsolete'] = False
1734 1737
1735 1738 if ui.configbool(b'phabimport', b'secret'):
1736 1739 opts[b'secret'] = True
1740 if ui.configbool(b'phabimport', b'obsolete'):
1741 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1737 1742
1738 1743 def _write(patches):
1739 1744 parents = repo[None].parents()
1740 1745
1741 1746 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1742 1747 for drev, contents in patches:
1743 1748 ui.status(_(b'applying patch from D%s\n') % drev)
1744 1749
1745 1750 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1746 1751 msg, node, rej = cmdutil.tryimportone(
1747 1752 ui,
1748 1753 repo,
1749 1754 patchdata,
1750 1755 parents,
1751 1756 opts,
1752 1757 [],
1753 1758 None, # Never update wdir to another revision
1754 1759 )
1755 1760
1756 1761 if not node:
1757 1762 raise error.Abort(_(b'D%s: no diffs found') % drev)
1758 1763
1759 1764 ui.note(msg + b'\n')
1760 1765 parents = [repo[node]]
1761 1766
1762 1767 opts = pycompat.byteskwargs(opts)
1763 1768 if opts.get(b'stack'):
1764 1769 spec = b':(%s)' % spec
1765 1770 drevs = querydrev(repo.ui, spec)
1766 1771
1767 1772 readpatch(repo.ui, drevs, _write)
1768 1773
1769 1774
1770 1775 @vcrcommand(
1771 1776 b'phabupdate',
1772 1777 [
1773 1778 (b'', b'accept', False, _(b'accept revisions')),
1774 1779 (b'', b'reject', False, _(b'reject revisions')),
1775 1780 (b'', b'abandon', False, _(b'abandon revisions')),
1776 1781 (b'', b'reclaim', False, _(b'reclaim revisions')),
1777 1782 (b'm', b'comment', b'', _(b'comment on the last revision')),
1778 1783 ],
1779 1784 _(b'DREVSPEC [OPTIONS]'),
1780 1785 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1781 1786 optionalrepo=True,
1782 1787 )
1783 1788 def phabupdate(ui, repo, spec, **opts):
1784 1789 """update Differential Revision in batch
1785 1790
1786 1791 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1787 1792 """
1788 1793 opts = pycompat.byteskwargs(opts)
1789 1794 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1790 1795 if len(flags) > 1:
1791 1796 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1792 1797
1793 1798 actions = []
1794 1799 for f in flags:
1795 1800 actions.append({b'type': f, b'value': True})
1796 1801
1797 1802 drevs = querydrev(ui, spec)
1798 1803 for i, drev in enumerate(drevs):
1799 1804 if i + 1 == len(drevs) and opts.get(b'comment'):
1800 1805 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1801 1806 if actions:
1802 1807 params = {
1803 1808 b'objectIdentifier': drev[b'phid'],
1804 1809 b'transactions': actions,
1805 1810 }
1806 1811 callconduit(ui, b'differential.revision.edit', params)
1807 1812
1808 1813
1809 1814 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1810 1815 def template_review(context, mapping):
1811 1816 """:phabreview: Object describing the review for this changeset.
1812 1817 Has attributes `url` and `id`.
1813 1818 """
1814 1819 ctx = context.resource(mapping, b'ctx')
1815 1820 m = _differentialrevisiondescre.search(ctx.description())
1816 1821 if m:
1817 1822 return templateutil.hybriddict(
1818 1823 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1819 1824 )
1820 1825 else:
1821 1826 tags = ctx.repo().nodetags(ctx.node())
1822 1827 for t in tags:
1823 1828 if _differentialrevisiontagre.match(t):
1824 1829 url = ctx.repo().ui.config(b'phabricator', b'url')
1825 1830 if not url.endswith(b'/'):
1826 1831 url += b'/'
1827 1832 url += t
1828 1833
1829 1834 return templateutil.hybriddict({b'url': url, b'id': t,})
1830 1835 return None
1831 1836
1832 1837
1833 1838 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1834 1839 def template_status(context, mapping):
1835 1840 """:phabstatus: String. Status of Phabricator differential.
1836 1841 """
1837 1842 ctx = context.resource(mapping, b'ctx')
1838 1843 repo = context.resource(mapping, b'repo')
1839 1844 ui = context.resource(mapping, b'ui')
1840 1845
1841 1846 rev = ctx.rev()
1842 1847 try:
1843 1848 drevid = getdrevmap(repo, [rev])[rev]
1844 1849 except KeyError:
1845 1850 return None
1846 1851 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1847 1852 for drev in drevs:
1848 1853 if int(drev[b'id']) == drevid:
1849 1854 return templateutil.hybriddict(
1850 1855 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1851 1856 )
1852 1857 return None
1853 1858
1854 1859
1855 1860 @show.showview(b'phabstatus', csettopic=b'work')
1856 1861 def phabstatusshowview(ui, repo, displayer):
1857 1862 """Phabricator differiential status"""
1858 1863 revs = repo.revs('sort(_underway(), topo)')
1859 1864 drevmap = getdrevmap(repo, revs)
1860 1865 unknownrevs, drevids, revsbydrevid = [], set(), {}
1861 1866 for rev, drevid in pycompat.iteritems(drevmap):
1862 1867 if drevid is not None:
1863 1868 drevids.add(drevid)
1864 1869 revsbydrevid.setdefault(drevid, set()).add(rev)
1865 1870 else:
1866 1871 unknownrevs.append(rev)
1867 1872
1868 1873 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1869 1874 drevsbyrev = {}
1870 1875 for drev in drevs:
1871 1876 for rev in revsbydrevid[int(drev[b'id'])]:
1872 1877 drevsbyrev[rev] = drev
1873 1878
1874 1879 def phabstatus(ctx):
1875 1880 drev = drevsbyrev[ctx.rev()]
1876 1881 status = ui.label(
1877 1882 b'%(statusName)s' % drev,
1878 1883 b'phabricator.status.%s' % _getstatusname(drev),
1879 1884 )
1880 1885 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1881 1886
1882 1887 revs -= smartset.baseset(unknownrevs)
1883 1888 revdag = graphmod.dagwalker(repo, revs)
1884 1889
1885 1890 ui.setconfig(b'experimental', b'graphshorten', True)
1886 1891 displayer._exthook = phabstatus
1887 1892 nodelen = show.longestshortest(repo, revs)
1888 1893 logcmdutil.displaygraph(
1889 1894 ui,
1890 1895 repo,
1891 1896 revdag,
1892 1897 displayer,
1893 1898 graphmod.asciiedges,
1894 1899 props={b'nodelen': nodelen},
1895 1900 )
General Comments 0
You need to be logged in to leave comments. Login now