##// END OF EJS Templates
phabricator: combine commit messages into the review when folding commits...
Matt Harbison -
r45134:dbe9182c default
parent child Browse files
Show More
@@ -1,1948 +1,2003 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.repophid
119 119 eh.configitem(
120 120 b'phabricator', b'repophid', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabricator', b'url', default=None,
124 124 )
125 125 eh.configitem(
126 126 b'phabsend', b'confirm', default=False,
127 127 )
128 128 eh.configitem(
129 129 b'phabimport', b'secret', default=False,
130 130 )
131 131 eh.configitem(
132 132 b'phabimport', b'obsolete', default=False,
133 133 )
134 134
135 135 colortable = {
136 136 b'phabricator.action.created': b'green',
137 137 b'phabricator.action.skipped': b'magenta',
138 138 b'phabricator.action.updated': b'magenta',
139 139 b'phabricator.desc': b'',
140 140 b'phabricator.drev': b'bold',
141 141 b'phabricator.node': b'',
142 142 b'phabricator.status.abandoned': b'magenta dim',
143 143 b'phabricator.status.accepted': b'green bold',
144 144 b'phabricator.status.closed': b'green',
145 145 b'phabricator.status.needsreview': b'yellow',
146 146 b'phabricator.status.needsrevision': b'red',
147 147 b'phabricator.status.changesplanned': b'red',
148 148 }
149 149
150 150 _VCR_FLAGS = [
151 151 (
152 152 b'',
153 153 b'test-vcr',
154 154 b'',
155 155 _(
156 156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 157 b', otherwise will mock all http requests using the specified vcr file.'
158 158 b' (ADVANCED)'
159 159 ),
160 160 ),
161 161 ]
162 162
163 163
164 164 @eh.wrapfunction(localrepo, "loadhgrc")
165 165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 167 """
168 168 result = False
169 169 arcconfig = {}
170 170
171 171 try:
172 172 # json.loads only accepts bytes from 3.6+
173 173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 174 # json.loads only returns unicode strings
175 175 arcconfig = pycompat.rapply(
176 176 lambda x: encoding.unitolocal(x)
177 177 if isinstance(x, pycompat.unicode)
178 178 else x,
179 179 pycompat.json_loads(rawparams),
180 180 )
181 181
182 182 result = True
183 183 except ValueError:
184 184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 185 except IOError:
186 186 pass
187 187
188 188 cfg = util.sortdict()
189 189
190 190 if b"repository.callsign" in arcconfig:
191 191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192 192
193 193 if b"phabricator.uri" in arcconfig:
194 194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195 195
196 196 if cfg:
197 197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198 198
199 199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200 200
201 201
202 202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 203 fullflags = flags + _VCR_FLAGS
204 204
205 205 def hgmatcher(r1, r2):
206 206 if r1.uri != r2.uri or r1.method != r2.method:
207 207 return False
208 208 r1params = util.urlreq.parseqs(r1.body)
209 209 r2params = util.urlreq.parseqs(r2.body)
210 210 for key in r1params:
211 211 if key not in r2params:
212 212 return False
213 213 value = r1params[key][0]
214 214 # we want to compare json payloads without worrying about ordering
215 215 if value.startswith(b'{') and value.endswith(b'}'):
216 216 r1json = pycompat.json_loads(value)
217 217 r2json = pycompat.json_loads(r2params[key][0])
218 218 if r1json != r2json:
219 219 return False
220 220 elif r2params[key][0] != value:
221 221 return False
222 222 return True
223 223
224 224 def sanitiserequest(request):
225 225 request.body = re.sub(
226 226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 227 )
228 228 return request
229 229
230 230 def sanitiseresponse(response):
231 231 if 'set-cookie' in response['headers']:
232 232 del response['headers']['set-cookie']
233 233 return response
234 234
235 235 def decorate(fn):
236 236 def inner(*args, **kwargs):
237 237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 238 if cassette:
239 239 import hgdemandimport
240 240
241 241 with hgdemandimport.deactivated():
242 242 import vcr as vcrmod
243 243 import vcr.stubs as stubs
244 244
245 245 vcr = vcrmod.VCR(
246 246 serializer='json',
247 247 before_record_request=sanitiserequest,
248 248 before_record_response=sanitiseresponse,
249 249 custom_patches=[
250 250 (
251 251 urlmod,
252 252 'httpconnection',
253 253 stubs.VCRHTTPConnection,
254 254 ),
255 255 (
256 256 urlmod,
257 257 'httpsconnection',
258 258 stubs.VCRHTTPSConnection,
259 259 ),
260 260 ],
261 261 )
262 262 vcr.register_matcher('hgmatcher', hgmatcher)
263 263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 264 return fn(*args, **kwargs)
265 265 return fn(*args, **kwargs)
266 266
267 267 cmd = util.checksignature(inner, depth=2)
268 268 cmd.__name__ = fn.__name__
269 269 cmd.__doc__ = fn.__doc__
270 270
271 271 return command(
272 272 name,
273 273 fullflags,
274 274 spec,
275 275 helpcategory=helpcategory,
276 276 optionalrepo=optionalrepo,
277 277 )(cmd)
278 278
279 279 return decorate
280 280
281 281
282 282 def urlencodenested(params):
283 283 """like urlencode, but works with nested parameters.
284 284
285 285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 288 """
289 289 flatparams = util.sortdict()
290 290
291 291 def process(prefix, obj):
292 292 if isinstance(obj, bool):
293 293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 296 if items is None:
297 297 flatparams[prefix] = obj
298 298 else:
299 299 for k, v in items(obj):
300 300 if prefix:
301 301 process(b'%s[%s]' % (prefix, k), v)
302 302 else:
303 303 process(k, v)
304 304
305 305 process(b'', params)
306 306 return util.urlreq.urlencode(flatparams)
307 307
308 308
309 309 def readurltoken(ui):
310 310 """return conduit url, token and make sure they exist
311 311
312 312 Currently read from [auth] config section. In the future, it might
313 313 make sense to read from .arcconfig and .arcrc as well.
314 314 """
315 315 url = ui.config(b'phabricator', b'url')
316 316 if not url:
317 317 raise error.Abort(
318 318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 319 )
320 320
321 321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 322 token = None
323 323
324 324 if res:
325 325 group, auth = res
326 326
327 327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328 328
329 329 token = auth.get(b'phabtoken')
330 330
331 331 if not token:
332 332 raise error.Abort(
333 333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 334 )
335 335
336 336 return url, token
337 337
338 338
339 339 def callconduit(ui, name, params):
340 340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 341 host, token = readurltoken(ui)
342 342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 344 params = params.copy()
345 345 params[b'__conduit__'] = {
346 346 b'token': token,
347 347 }
348 348 rawdata = {
349 349 b'params': templatefilters.json(params),
350 350 b'output': b'json',
351 351 b'__conduit__': 1,
352 352 }
353 353 data = urlencodenested(rawdata)
354 354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 355 if curlcmd:
356 356 sin, sout = procutil.popen2(
357 357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 358 )
359 359 sin.write(data)
360 360 sin.close()
361 361 body = sout.read()
362 362 else:
363 363 urlopener = urlmod.opener(ui, authinfo)
364 364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 365 with contextlib.closing(urlopener.open(request)) as rsp:
366 366 body = rsp.read()
367 367 ui.debug(b'Conduit Response: %s\n' % body)
368 368 parsed = pycompat.rapply(
369 369 lambda x: encoding.unitolocal(x)
370 370 if isinstance(x, pycompat.unicode)
371 371 else x,
372 372 # json.loads only accepts bytes from py3.6+
373 373 pycompat.json_loads(encoding.unifromlocal(body)),
374 374 )
375 375 if parsed.get(b'error_code'):
376 376 msg = _(b'Conduit Error (%s): %s') % (
377 377 parsed[b'error_code'],
378 378 parsed[b'error_info'],
379 379 )
380 380 raise error.Abort(msg)
381 381 return parsed[b'result']
382 382
383 383
384 384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 385 def debugcallconduit(ui, repo, name):
386 386 """call Conduit API
387 387
388 388 Call parameters are read from stdin as a JSON blob. Result will be written
389 389 to stdout as a JSON blob.
390 390 """
391 391 # json.loads only accepts bytes from 3.6+
392 392 rawparams = encoding.unifromlocal(ui.fin.read())
393 393 # json.loads only returns unicode strings
394 394 params = pycompat.rapply(
395 395 lambda x: encoding.unitolocal(x)
396 396 if isinstance(x, pycompat.unicode)
397 397 else x,
398 398 pycompat.json_loads(rawparams),
399 399 )
400 400 # json.dumps only accepts unicode strings
401 401 result = pycompat.rapply(
402 402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 403 callconduit(ui, name, params),
404 404 )
405 405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 406 ui.write(b'%s\n' % encoding.unitolocal(s))
407 407
408 408
409 409 def getrepophid(repo):
410 410 """given callsign, return repository PHID or None"""
411 411 # developer config: phabricator.repophid
412 412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 413 if repophid:
414 414 return repophid
415 415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 416 if not callsign:
417 417 return None
418 418 query = callconduit(
419 419 repo.ui,
420 420 b'diffusion.repository.search',
421 421 {b'constraints': {b'callsigns': [callsign]}},
422 422 )
423 423 if len(query[b'data']) == 0:
424 424 return None
425 425 repophid = query[b'data'][0][b'phid']
426 426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 427 return repophid
428 428
429 429
430 430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 431 _differentialrevisiondescre = re.compile(
432 432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 433 )
434 434
435 435
436 436 def getoldnodedrevmap(repo, nodelist):
437 437 """find previous nodes that has been sent to Phabricator
438 438
439 439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 440 for node in nodelist with known previous sent versions, or associated
441 441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 442 be ``None``.
443 443
444 444 Examines commit messages like "Differential Revision:" to get the
445 445 association information.
446 446
447 447 If such commit message line is not found, examines all precursors and their
448 448 tags. Tags with format like "D1234" are considered a match and the node
449 449 with that tag, and the number after "D" (ex. 1234) will be returned.
450 450
451 451 The ``old node``, if not None, is guaranteed to be the last diff of
452 452 corresponding Differential Revision, and exist in the repo.
453 453 """
454 454 unfi = repo.unfiltered()
455 455 has_node = unfi.changelog.index.has_node
456 456
457 457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 459 for node in nodelist:
460 460 ctx = unfi[node]
461 461 # For tags like "D123", put them into "toconfirm" to verify later
462 462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 463 for n in precnodes:
464 464 if has_node(n):
465 465 for tag in unfi.nodetags(n):
466 466 m = _differentialrevisiontagre.match(tag)
467 467 if m:
468 468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 469 break
470 470 else:
471 471 continue # move to next predecessor
472 472 break # found a tag, stop
473 473 else:
474 474 # Check commit message
475 475 m = _differentialrevisiondescre.search(ctx.description())
476 476 if m:
477 477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478 478
479 479 # Double check if tags are genuine by collecting all old nodes from
480 480 # Phabricator, and expect precursors overlap with it.
481 481 if toconfirm:
482 482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 483 alldiffs = callconduit(
484 484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 485 )
486 486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 487 for newnode, (force, precset, drev) in toconfirm.items():
488 488 diffs = [
489 489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 490 ]
491 491
492 492 # "precursors" as known by Phabricator
493 493 phprecset = {getnode(d) for d in diffs}
494 494
495 495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 496 # and force is not set (when commit message says nothing)
497 497 if not force and not bool(phprecset & precset):
498 498 tagname = b'D%d' % drev
499 499 tags.tag(
500 500 repo,
501 501 tagname,
502 502 nullid,
503 503 message=None,
504 504 user=None,
505 505 date=None,
506 506 local=True,
507 507 )
508 508 unfi.ui.warn(
509 509 _(
510 510 b'D%d: local tag removed - does not match '
511 511 b'Differential history\n'
512 512 )
513 513 % drev
514 514 )
515 515 continue
516 516
517 517 # Find the last node using Phabricator metadata, and make sure it
518 518 # exists in the repo
519 519 oldnode = lastdiff = None
520 520 if diffs:
521 521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 522 oldnode = getnode(lastdiff)
523 523 if oldnode and not has_node(oldnode):
524 524 oldnode = None
525 525
526 526 result[newnode] = (oldnode, lastdiff, drev)
527 527
528 528 return result
529 529
530 530
531 531 def getdrevmap(repo, revs):
532 532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 533 ID or None.
534 534 """
535 535 result = {}
536 536 for rev in revs:
537 537 result[rev] = None
538 538 ctx = repo[rev]
539 539 # Check commit message
540 540 m = _differentialrevisiondescre.search(ctx.description())
541 541 if m:
542 542 result[rev] = int(m.group('id'))
543 543 continue
544 544 # Check tags
545 545 for tag in repo.nodetags(ctx.node()):
546 546 m = _differentialrevisiontagre.match(tag)
547 547 if m:
548 548 result[rev] = int(m.group(1))
549 549 break
550 550
551 551 return result
552 552
553 553
554 554 def getdiff(basectx, ctx, diffopts):
555 555 """plain-text diff without header (user, commit message, etc)"""
556 556 output = util.stringio()
557 557 for chunk, _label in patch.diffui(
558 558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 559 ):
560 560 output.write(chunk)
561 561 return output.getvalue()
562 562
563 563
564 564 class DiffChangeType(object):
565 565 ADD = 1
566 566 CHANGE = 2
567 567 DELETE = 3
568 568 MOVE_AWAY = 4
569 569 COPY_AWAY = 5
570 570 MOVE_HERE = 6
571 571 COPY_HERE = 7
572 572 MULTICOPY = 8
573 573
574 574
575 575 class DiffFileType(object):
576 576 TEXT = 1
577 577 IMAGE = 2
578 578 BINARY = 3
579 579
580 580
581 581 @attr.s
582 582 class phabhunk(dict):
583 583 """Represents a Differential hunk, which is owned by a Differential change
584 584 """
585 585
586 586 oldOffset = attr.ib(default=0) # camelcase-required
587 587 oldLength = attr.ib(default=0) # camelcase-required
588 588 newOffset = attr.ib(default=0) # camelcase-required
589 589 newLength = attr.ib(default=0) # camelcase-required
590 590 corpus = attr.ib(default='')
591 591 # These get added to the phabchange's equivalents
592 592 addLines = attr.ib(default=0) # camelcase-required
593 593 delLines = attr.ib(default=0) # camelcase-required
594 594
595 595
596 596 @attr.s
597 597 class phabchange(object):
598 598 """Represents a Differential change, owns Differential hunks and owned by a
599 599 Differential diff. Each one represents one file in a diff.
600 600 """
601 601
602 602 currentPath = attr.ib(default=None) # camelcase-required
603 603 oldPath = attr.ib(default=None) # camelcase-required
604 604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 605 metadata = attr.ib(default=attr.Factory(dict))
606 606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 608 type = attr.ib(default=DiffChangeType.CHANGE)
609 609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 610 commitHash = attr.ib(default=None) # camelcase-required
611 611 addLines = attr.ib(default=0) # camelcase-required
612 612 delLines = attr.ib(default=0) # camelcase-required
613 613 hunks = attr.ib(default=attr.Factory(list))
614 614
615 615 def copynewmetadatatoold(self):
616 616 for key in list(self.metadata.keys()):
617 617 newkey = key.replace(b'new:', b'old:')
618 618 self.metadata[newkey] = self.metadata[key]
619 619
620 620 def addoldmode(self, value):
621 621 self.oldProperties[b'unix:filemode'] = value
622 622
623 623 def addnewmode(self, value):
624 624 self.newProperties[b'unix:filemode'] = value
625 625
626 626 def addhunk(self, hunk):
627 627 if not isinstance(hunk, phabhunk):
628 628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 630 # It's useful to include these stats since the Phab web UI shows them,
631 631 # and uses them to estimate how large a change a Revision is. Also used
632 632 # in email subjects for the [+++--] bit.
633 633 self.addLines += hunk.addLines
634 634 self.delLines += hunk.delLines
635 635
636 636
637 637 @attr.s
638 638 class phabdiff(object):
639 639 """Represents a Differential diff, owns Differential changes. Corresponds
640 640 to a commit.
641 641 """
642 642
643 643 # Doesn't seem to be any reason to send this (output of uname -n)
644 644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 649 branch = attr.ib(default=b'default')
650 650 bookmark = attr.ib(default=None)
651 651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 654 changes = attr.ib(default=attr.Factory(dict))
655 655 repositoryPHID = attr.ib(default=None) # camelcase-required
656 656
657 657 def addchange(self, change):
658 658 if not isinstance(change, phabchange):
659 659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 661 attr.asdict(change)
662 662 )
663 663
664 664
665 665 def maketext(pchange, basectx, ctx, fname):
666 666 """populate the phabchange for a text file"""
667 667 repo = ctx.repo()
668 668 fmatcher = match.exact([fname])
669 669 diffopts = mdiff.diffopts(git=True, context=32767)
670 670 _pfctx, _fctx, header, fhunks = next(
671 671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 672 )
673 673
674 674 for fhunk in fhunks:
675 675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 676 corpus = b''.join(lines[1:])
677 677 shunk = list(header)
678 678 shunk.extend(lines)
679 679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 680 patch.diffstatdata(util.iterlines(shunk))
681 681 )
682 682 pchange.addhunk(
683 683 phabhunk(
684 684 oldOffset,
685 685 oldLength,
686 686 newOffset,
687 687 newLength,
688 688 corpus,
689 689 addLines,
690 690 delLines,
691 691 )
692 692 )
693 693
694 694
695 695 def uploadchunks(fctx, fphid):
696 696 """upload large binary files as separate chunks.
697 697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 698 """
699 699 ui = fctx.repo().ui
700 700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 701 with ui.makeprogress(
702 702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 703 ) as progress:
704 704 for chunk in chunks:
705 705 progress.increment()
706 706 if chunk[b'complete']:
707 707 continue
708 708 bstart = int(chunk[b'byteStart'])
709 709 bend = int(chunk[b'byteEnd'])
710 710 callconduit(
711 711 ui,
712 712 b'file.uploadchunk',
713 713 {
714 714 b'filePHID': fphid,
715 715 b'byteStart': bstart,
716 716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 717 b'dataEncoding': b'base64',
718 718 },
719 719 )
720 720
721 721
722 722 def uploadfile(fctx):
723 723 """upload binary files to Phabricator"""
724 724 repo = fctx.repo()
725 725 ui = repo.ui
726 726 fname = fctx.path()
727 727 size = fctx.size()
728 728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729 729
730 730 # an allocate call is required first to see if an upload is even required
731 731 # (Phab might already have it) and to determine if chunking is needed
732 732 allocateparams = {
733 733 b'name': fname,
734 734 b'contentLength': size,
735 735 b'contentHash': fhash,
736 736 }
737 737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 738 fphid = filealloc[b'filePHID']
739 739
740 740 if filealloc[b'upload']:
741 741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 742 if not fphid:
743 743 uploadparams = {
744 744 b'name': fname,
745 745 b'data_base64': base64.b64encode(fctx.data()),
746 746 }
747 747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 748 else:
749 749 uploadchunks(fctx, fphid)
750 750 else:
751 751 ui.debug(b'server already has %s\n' % bytes(fctx))
752 752
753 753 if not fphid:
754 754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755 755
756 756 return fphid
757 757
758 758
759 759 def addoldbinary(pchange, oldfctx, fctx):
760 760 """add the metadata for the previous version of a binary file to the
761 761 phabchange for the new version
762 762
763 763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 764 version of the file, or None if the file is being removed.
765 765 """
766 766 if not fctx or fctx.cmp(oldfctx):
767 767 # Files differ, add the old one
768 768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 769 mimeguess, _enc = mimetypes.guess_type(
770 770 encoding.unifromlocal(oldfctx.path())
771 771 )
772 772 if mimeguess:
773 773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 774 mimeguess
775 775 )
776 776 fphid = uploadfile(oldfctx)
777 777 pchange.metadata[b'old:binary-phid'] = fphid
778 778 else:
779 779 # If it's left as IMAGE/BINARY web UI might try to display it
780 780 pchange.fileType = DiffFileType.TEXT
781 781 pchange.copynewmetadatatoold()
782 782
783 783
784 784 def makebinary(pchange, fctx):
785 785 """populate the phabchange for a binary file"""
786 786 pchange.fileType = DiffFileType.BINARY
787 787 fphid = uploadfile(fctx)
788 788 pchange.metadata[b'new:binary-phid'] = fphid
789 789 pchange.metadata[b'new:file:size'] = fctx.size()
790 790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 791 if mimeguess:
792 792 mimeguess = pycompat.bytestr(mimeguess)
793 793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 794 if mimeguess.startswith(b'image/'):
795 795 pchange.fileType = DiffFileType.IMAGE
796 796
797 797
798 798 # Copied from mercurial/patch.py
799 799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800 800
801 801
802 802 def notutf8(fctx):
803 803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 804 as binary
805 805 """
806 806 try:
807 807 fctx.data().decode('utf-8')
808 808 return False
809 809 except UnicodeDecodeError:
810 810 fctx.repo().ui.write(
811 811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 812 % fctx.path()
813 813 )
814 814 return True
815 815
816 816
817 817 def addremoved(pdiff, basectx, ctx, removed):
818 818 """add removed files to the phabdiff. Shouldn't include moves"""
819 819 for fname in removed:
820 820 pchange = phabchange(
821 821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 822 )
823 823 oldfctx = basectx.p1()[fname]
824 824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 826 maketext(pchange, basectx, ctx, fname)
827 827
828 828 pdiff.addchange(pchange)
829 829
830 830
831 831 def addmodified(pdiff, basectx, ctx, modified):
832 832 """add modified files to the phabdiff"""
833 833 for fname in modified:
834 834 fctx = ctx[fname]
835 835 oldfctx = basectx.p1()[fname]
836 836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 837 filemode = gitmode[fctx.flags()]
838 838 originalmode = gitmode[oldfctx.flags()]
839 839 if filemode != originalmode:
840 840 pchange.addoldmode(originalmode)
841 841 pchange.addnewmode(filemode)
842 842
843 843 if (
844 844 fctx.isbinary()
845 845 or notutf8(fctx)
846 846 or oldfctx.isbinary()
847 847 or notutf8(oldfctx)
848 848 ):
849 849 makebinary(pchange, fctx)
850 850 addoldbinary(pchange, oldfctx, fctx)
851 851 else:
852 852 maketext(pchange, basectx, ctx, fname)
853 853
854 854 pdiff.addchange(pchange)
855 855
856 856
857 857 def addadded(pdiff, basectx, ctx, added, removed):
858 858 """add file adds to the phabdiff, both new files and copies/moves"""
859 859 # Keep track of files that've been recorded as moved/copied, so if there are
860 860 # additional copies we can mark them (moves get removed from removed)
861 861 copiedchanges = {}
862 862 movedchanges = {}
863 863
864 864 copy = {}
865 865 if basectx != ctx:
866 866 copy = copies.pathcopies(basectx.p1(), ctx)
867 867
868 868 for fname in added:
869 869 fctx = ctx[fname]
870 870 oldfctx = None
871 871 pchange = phabchange(currentPath=fname)
872 872
873 873 filemode = gitmode[fctx.flags()]
874 874
875 875 if copy:
876 876 originalfname = copy.get(fname, fname)
877 877 else:
878 878 originalfname = fname
879 879 if fctx.renamed():
880 880 originalfname = fctx.renamed()[0]
881 881
882 882 renamed = fname != originalfname
883 883
884 884 if renamed:
885 885 oldfctx = basectx.p1()[originalfname]
886 886 originalmode = gitmode[oldfctx.flags()]
887 887 pchange.oldPath = originalfname
888 888
889 889 if originalfname in removed:
890 890 origpchange = phabchange(
891 891 currentPath=originalfname,
892 892 oldPath=originalfname,
893 893 type=DiffChangeType.MOVE_AWAY,
894 894 awayPaths=[fname],
895 895 )
896 896 movedchanges[originalfname] = origpchange
897 897 removed.remove(originalfname)
898 898 pchange.type = DiffChangeType.MOVE_HERE
899 899 elif originalfname in movedchanges:
900 900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 901 movedchanges[originalfname].awayPaths.append(fname)
902 902 pchange.type = DiffChangeType.COPY_HERE
903 903 else: # pure copy
904 904 if originalfname not in copiedchanges:
905 905 origpchange = phabchange(
906 906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 907 )
908 908 copiedchanges[originalfname] = origpchange
909 909 else:
910 910 origpchange = copiedchanges[originalfname]
911 911 origpchange.awayPaths.append(fname)
912 912 pchange.type = DiffChangeType.COPY_HERE
913 913
914 914 if filemode != originalmode:
915 915 pchange.addoldmode(originalmode)
916 916 pchange.addnewmode(filemode)
917 917 else: # Brand-new file
918 918 pchange.addnewmode(gitmode[fctx.flags()])
919 919 pchange.type = DiffChangeType.ADD
920 920
921 921 if (
922 922 fctx.isbinary()
923 923 or notutf8(fctx)
924 924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 925 ):
926 926 makebinary(pchange, fctx)
927 927 if renamed:
928 928 addoldbinary(pchange, oldfctx, fctx)
929 929 else:
930 930 maketext(pchange, basectx, ctx, fname)
931 931
932 932 pdiff.addchange(pchange)
933 933
934 934 for _path, copiedchange in copiedchanges.items():
935 935 pdiff.addchange(copiedchange)
936 936 for _path, movedchange in movedchanges.items():
937 937 pdiff.addchange(movedchange)
938 938
939 939
940 940 def creatediff(basectx, ctx):
941 941 """create a Differential Diff"""
942 942 repo = ctx.repo()
943 943 repophid = getrepophid(repo)
944 944 # Create a "Differential Diff" via "differential.creatediff" API
945 945 pdiff = phabdiff(
946 946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 947 branch=b'%s' % ctx.branch(),
948 948 )
949 949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 950 # addadded will remove moved files from removed, so addremoved won't get
951 951 # them
952 952 addadded(pdiff, basectx, ctx, added, removed)
953 953 addmodified(pdiff, basectx, ctx, modified)
954 954 addremoved(pdiff, basectx, ctx, removed)
955 955 if repophid:
956 956 pdiff.repositoryPHID = repophid
957 957 diff = callconduit(
958 958 repo.ui,
959 959 b'differential.creatediff',
960 960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 961 )
962 962 if not diff:
963 963 if basectx != ctx:
964 964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 965 else:
966 966 msg = _(b'cannot create diff for %s') % ctx
967 967 raise error.Abort(msg)
968 968 return diff
969 969
970 970
971 971 def writediffproperties(ctxs, diff):
972 972 """write metadata to diff so patches could be applied losslessly
973 973
974 974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 975 The list is generally a single commit, but may be several when using
976 976 ``phabsend --fold``.
977 977 """
978 978 # creatediff returns with a diffid but query returns with an id
979 979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 980 basectx = ctxs[0]
981 981 tipctx = ctxs[-1]
982 982
983 983 params = {
984 984 b'diff_id': diffid,
985 985 b'name': b'hg:meta',
986 986 b'data': templatefilters.json(
987 987 {
988 988 b'user': tipctx.user(),
989 989 b'date': b'%d %d' % tipctx.date(),
990 990 b'branch': tipctx.branch(),
991 991 b'node': tipctx.hex(),
992 992 b'parent': basectx.p1().hex(),
993 993 }
994 994 ),
995 995 }
996 996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997 997
998 998 commits = {}
999 999 for ctx in ctxs:
1000 1000 commits[ctx.hex()] = {
1001 1001 b'author': stringutil.person(ctx.user()),
1002 1002 b'authorEmail': stringutil.email(ctx.user()),
1003 1003 b'time': int(ctx.date()[0]),
1004 1004 b'commit': ctx.hex(),
1005 1005 b'parents': [ctx.p1().hex()],
1006 1006 b'branch': ctx.branch(),
1007 1007 }
1008 1008 params = {
1009 1009 b'diff_id': diffid,
1010 1010 b'name': b'local:commits',
1011 1011 b'data': templatefilters.json(commits),
1012 1012 }
1013 1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014 1014
1015 1015
1016 1016 def createdifferentialrevision(
1017 1017 ctx,
1018 1018 revid=None,
1019 1019 parentrevphid=None,
1020 1020 oldnode=None,
1021 1021 olddiff=None,
1022 1022 actions=None,
1023 1023 comment=None,
1024 1024 ):
1025 1025 """create or update a Differential Revision
1026 1026
1027 1027 If revid is None, create a new Differential Revision, otherwise update
1028 1028 revid. If parentrevphid is not None, set it as a dependency.
1029 1029
1030 1030 If oldnode is not None, check if the patch content (without commit message
1031 1031 and metadata) has changed before creating another diff.
1032 1032
1033 1033 If actions is not None, they will be appended to the transaction.
1034 1034 """
1035 1035 basectx = ctx
1036 1036 repo = ctx.repo()
1037 1037 if oldnode:
1038 1038 diffopts = mdiff.diffopts(git=True, context=32767)
1039 1039 oldctx = repo.unfiltered()[oldnode]
1040 1040 oldbasectx = oldctx
1041 1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1042 1042 oldbasectx, oldctx, diffopts
1043 1043 )
1044 1044 else:
1045 1045 neednewdiff = True
1046 1046
1047 1047 transactions = []
1048 1048 if neednewdiff:
1049 1049 diff = creatediff(basectx, ctx)
1050 1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1051 1051 if comment:
1052 1052 transactions.append({b'type': b'comment', b'value': comment})
1053 1053 else:
1054 1054 # Even if we don't need to upload a new diff because the patch content
1055 1055 # does not change. We might still need to update its metadata so
1056 1056 # pushers could know the correct node metadata.
1057 1057 assert olddiff
1058 1058 diff = olddiff
1059 1059 writediffproperties([ctx], diff)
1060 1060
1061 1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1062 1062 if parentrevphid:
1063 1063 transactions.append(
1064 1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1065 1065 )
1066 1066
1067 1067 if actions:
1068 1068 transactions += actions
1069 1069
1070 # Parse commit message and update related fields.
1071 desc = ctx.description()
1072 info = callconduit(
1073 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1074 )
1075 for k, v in info[b'fields'].items():
1076 if k in [b'title', b'summary', b'testPlan']:
1077 transactions.append({b'type': k, b'value': v})
1070 # When folding multiple local commits into a single review, arcanist will
1071 # take the summary line of the first commit as the title, and then
1072 # concatenate the rest of the remaining messages (including each of their
1073 # first lines) to the rest of the first commit message (each separated by
1074 # an empty line), and use that as the summary field. Do the same here.
1075 # For commits with only a one line message, there is no summary field, as
1076 # this gets assigned to the title.
1077 fields = util.sortdict() # sorted for stable wire protocol in tests
1078
1079 for i, _ctx in enumerate([ctx]):
1080 # Parse commit message and update related fields.
1081 desc = _ctx.description()
1082 info = callconduit(
1083 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1084 )
1085
1086 for k in [b'title', b'summary', b'testPlan']:
1087 v = info[b'fields'].get(k)
1088 if not v:
1089 continue
1090
1091 if i == 0:
1092 # Title, summary and test plan (if present) are taken verbatim
1093 # for the first commit.
1094 fields[k] = v.rstrip()
1095 continue
1096 elif k == b'title':
1097 # Add subsequent titles (i.e. the first line of the commit
1098 # message) back to the summary.
1099 k = b'summary'
1100
1101 # Append any current field to the existing composite field
1102 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1103
1104 for k, v in fields.items():
1105 transactions.append({b'type': k, b'value': v})
1078 1106
1079 1107 params = {b'transactions': transactions}
1080 1108 if revid is not None:
1081 1109 # Update an existing Differential Revision
1082 1110 params[b'objectIdentifier'] = revid
1083 1111
1084 1112 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1085 1113 if not revision:
1086 1114 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1087 1115
1088 1116 return revision, diff
1089 1117
1090 1118
1091 1119 def userphids(ui, names):
1092 1120 """convert user names to PHIDs"""
1093 1121 names = [name.lower() for name in names]
1094 1122 query = {b'constraints': {b'usernames': names}}
1095 1123 result = callconduit(ui, b'user.search', query)
1096 1124 # username not found is not an error of the API. So check if we have missed
1097 1125 # some names here.
1098 1126 data = result[b'data']
1099 1127 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1100 1128 unresolved = set(names) - resolved
1101 1129 if unresolved:
1102 1130 raise error.Abort(
1103 1131 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1104 1132 )
1105 1133 return [entry[b'phid'] for entry in data]
1106 1134
1107 1135
1108 1136 @vcrcommand(
1109 1137 b'phabsend',
1110 1138 [
1111 1139 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1112 1140 (b'', b'amend', True, _(b'update commit messages')),
1113 1141 (b'', b'reviewer', [], _(b'specify reviewers')),
1114 1142 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1115 1143 (
1116 1144 b'm',
1117 1145 b'comment',
1118 1146 b'',
1119 1147 _(b'add a comment to Revisions with new/updated Diffs'),
1120 1148 ),
1121 1149 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1122 1150 ],
1123 1151 _(b'REV [OPTIONS]'),
1124 1152 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1125 1153 )
1126 1154 def phabsend(ui, repo, *revs, **opts):
1127 1155 """upload changesets to Phabricator
1128 1156
1129 1157 If there are multiple revisions specified, they will be send as a stack
1130 1158 with a linear dependencies relationship using the order specified by the
1131 1159 revset.
1132 1160
1133 1161 For the first time uploading changesets, local tags will be created to
1134 1162 maintain the association. After the first time, phabsend will check
1135 1163 obsstore and tags information so it can figure out whether to update an
1136 1164 existing Differential Revision, or create a new one.
1137 1165
1138 1166 If --amend is set, update commit messages so they have the
1139 1167 ``Differential Revision`` URL, remove related tags. This is similar to what
1140 1168 arcanist will do, and is more desired in author-push workflows. Otherwise,
1141 1169 use local tags to record the ``Differential Revision`` association.
1142 1170
1143 1171 The --confirm option lets you confirm changesets before sending them. You
1144 1172 can also add following to your configuration file to make it default
1145 1173 behaviour::
1146 1174
1147 1175 [phabsend]
1148 1176 confirm = true
1149 1177
1150 1178 phabsend will check obsstore and the above association to decide whether to
1151 1179 update an existing Differential Revision, or create a new one.
1152 1180 """
1153 1181 opts = pycompat.byteskwargs(opts)
1154 1182 revs = list(revs) + opts.get(b'rev', [])
1155 1183 revs = scmutil.revrange(repo, revs)
1156 1184 revs.sort() # ascending order to preserve topological parent/child in phab
1157 1185
1158 1186 if not revs:
1159 1187 raise error.Abort(_(b'phabsend requires at least one changeset'))
1160 1188 if opts.get(b'amend'):
1161 1189 cmdutil.checkunfinished(repo)
1162 1190
1163 1191 # {newnode: (oldnode, olddiff, olddrev}
1164 1192 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1165 1193
1166 1194 confirm = ui.configbool(b'phabsend', b'confirm')
1167 1195 confirm |= bool(opts.get(b'confirm'))
1168 1196 if confirm:
1169 1197 confirmed = _confirmbeforesend(repo, revs, oldmap)
1170 1198 if not confirmed:
1171 1199 raise error.Abort(_(b'phabsend cancelled'))
1172 1200
1173 1201 actions = []
1174 1202 reviewers = opts.get(b'reviewer', [])
1175 1203 blockers = opts.get(b'blocker', [])
1176 1204 phids = []
1177 1205 if reviewers:
1178 1206 phids.extend(userphids(repo.ui, reviewers))
1179 1207 if blockers:
1180 1208 phids.extend(
1181 1209 map(
1182 1210 lambda phid: b'blocking(%s)' % phid,
1183 1211 userphids(repo.ui, blockers),
1184 1212 )
1185 1213 )
1186 1214 if phids:
1187 1215 actions.append({b'type': b'reviewers.add', b'value': phids})
1188 1216
1189 1217 drevids = [] # [int]
1190 1218 diffmap = {} # {newnode: diff}
1191 1219
1192 1220 # Send patches one by one so we know their Differential Revision PHIDs and
1193 1221 # can provide dependency relationship
1194 1222 lastrevphid = None
1195 1223 for rev in revs:
1196 1224 ui.debug(b'sending rev %d\n' % rev)
1197 1225 ctx = repo[rev]
1198 1226
1199 1227 # Get Differential Revision ID
1200 1228 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1201 1229 if oldnode != ctx.node() or opts.get(b'amend'):
1202 1230 # Create or update Differential Revision
1203 1231 revision, diff = createdifferentialrevision(
1204 1232 ctx,
1205 1233 revid,
1206 1234 lastrevphid,
1207 1235 oldnode,
1208 1236 olddiff,
1209 1237 actions,
1210 1238 opts.get(b'comment'),
1211 1239 )
1212 1240 diffmap[ctx.node()] = diff
1213 1241 newrevid = int(revision[b'object'][b'id'])
1214 1242 newrevphid = revision[b'object'][b'phid']
1215 1243 if revid:
1216 1244 action = b'updated'
1217 1245 else:
1218 1246 action = b'created'
1219 1247
1220 1248 # Create a local tag to note the association, if commit message
1221 1249 # does not have it already
1222 1250 m = _differentialrevisiondescre.search(ctx.description())
1223 1251 if not m or int(m.group('id')) != newrevid:
1224 1252 tagname = b'D%d' % newrevid
1225 1253 tags.tag(
1226 1254 repo,
1227 1255 tagname,
1228 1256 ctx.node(),
1229 1257 message=None,
1230 1258 user=None,
1231 1259 date=None,
1232 1260 local=True,
1233 1261 )
1234 1262 else:
1235 1263 # Nothing changed. But still set "newrevphid" so the next revision
1236 1264 # could depend on this one and "newrevid" for the summary line.
1237 1265 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1238 1266 newrevid = revid
1239 1267 action = b'skipped'
1240 1268
1241 1269 actiondesc = ui.label(
1242 1270 {
1243 1271 b'created': _(b'created'),
1244 1272 b'skipped': _(b'skipped'),
1245 1273 b'updated': _(b'updated'),
1246 1274 }[action],
1247 1275 b'phabricator.action.%s' % action,
1248 1276 )
1249 1277 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1250 1278 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1251 1279 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1252 1280 ui.write(
1253 1281 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1254 1282 )
1255 1283 drevids.append(newrevid)
1256 1284 lastrevphid = newrevphid
1257 1285
1258 1286 # Update commit messages and remove tags
1259 1287 if opts.get(b'amend'):
1260 1288 unfi = repo.unfiltered()
1261 1289 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1262 1290 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1263 1291 wnode = unfi[b'.'].node()
1264 1292 mapping = {} # {oldnode: [newnode]}
1265 1293 for i, rev in enumerate(revs):
1266 1294 old = unfi[rev]
1267 1295 drevid = drevids[i]
1268 1296 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1269 newdesc = getdescfromdrev(drev)
1297 newdesc = get_amended_desc(drev, old, False)
1270 1298 # Make sure commit message contain "Differential Revision"
1271 1299 if old.description() != newdesc:
1272 1300 if old.phase() == phases.public:
1273 1301 ui.warn(
1274 1302 _(b"warning: not updating public commit %s\n")
1275 1303 % scmutil.formatchangeid(old)
1276 1304 )
1277 1305 continue
1278 1306 parents = [
1279 1307 mapping.get(old.p1().node(), (old.p1(),))[0],
1280 1308 mapping.get(old.p2().node(), (old.p2(),))[0],
1281 1309 ]
1282 1310 new = context.metadataonlyctx(
1283 1311 repo,
1284 1312 old,
1285 1313 parents=parents,
1286 1314 text=newdesc,
1287 1315 user=old.user(),
1288 1316 date=old.date(),
1289 1317 extra=old.extra(),
1290 1318 )
1291 1319
1292 1320 newnode = new.commit()
1293 1321
1294 1322 mapping[old.node()] = [newnode]
1295 1323 # Update diff property
1296 1324 # If it fails just warn and keep going, otherwise the DREV
1297 1325 # associations will be lost
1298 1326 try:
1299 1327 writediffproperties(
1300 1328 [unfi[newnode]], diffmap[old.node()]
1301 1329 )
1302 1330 except util.urlerr.urlerror:
1303 1331 ui.warnnoi18n(
1304 1332 b'Failed to update metadata for D%d\n' % drevid
1305 1333 )
1306 1334 # Remove local tags since it's no longer necessary
1307 1335 tagname = b'D%d' % drevid
1308 1336 if tagname in repo.tags():
1309 1337 tags.tag(
1310 1338 repo,
1311 1339 tagname,
1312 1340 nullid,
1313 1341 message=None,
1314 1342 user=None,
1315 1343 date=None,
1316 1344 local=True,
1317 1345 )
1318 1346 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1319 1347 if wnode in mapping:
1320 1348 unfi.setparents(mapping[wnode][0])
1321 1349
1322 1350
1323 1351 # Map from "hg:meta" keys to header understood by "hg import". The order is
1324 1352 # consistent with "hg export" output.
1325 1353 _metanamemap = util.sortdict(
1326 1354 [
1327 1355 (b'user', b'User'),
1328 1356 (b'date', b'Date'),
1329 1357 (b'branch', b'Branch'),
1330 1358 (b'node', b'Node ID'),
1331 1359 (b'parent', b'Parent '),
1332 1360 ]
1333 1361 )
1334 1362
1335 1363
1336 1364 def _confirmbeforesend(repo, revs, oldmap):
1337 1365 url, token = readurltoken(repo.ui)
1338 1366 ui = repo.ui
1339 1367 for rev in revs:
1340 1368 ctx = repo[rev]
1341 1369 desc = ctx.description().splitlines()[0]
1342 1370 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1343 1371 if drevid:
1344 1372 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1345 1373 else:
1346 1374 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1347 1375
1348 1376 ui.write(
1349 1377 _(b'%s - %s: %s\n')
1350 1378 % (
1351 1379 drevdesc,
1352 1380 ui.label(bytes(ctx), b'phabricator.node'),
1353 1381 ui.label(desc, b'phabricator.desc'),
1354 1382 )
1355 1383 )
1356 1384
1357 1385 if ui.promptchoice(
1358 1386 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1359 1387 ):
1360 1388 return False
1361 1389
1362 1390 return True
1363 1391
1364 1392
1365 1393 _knownstatusnames = {
1366 1394 b'accepted',
1367 1395 b'needsreview',
1368 1396 b'needsrevision',
1369 1397 b'closed',
1370 1398 b'abandoned',
1371 1399 b'changesplanned',
1372 1400 }
1373 1401
1374 1402
1375 1403 def _getstatusname(drev):
1376 1404 """get normalized status name from a Differential Revision"""
1377 1405 return drev[b'statusName'].replace(b' ', b'').lower()
1378 1406
1379 1407
1380 1408 # Small language to specify differential revisions. Support symbols: (), :X,
1381 1409 # +, and -.
1382 1410
1383 1411 _elements = {
1384 1412 # token-type: binding-strength, primary, prefix, infix, suffix
1385 1413 b'(': (12, None, (b'group', 1, b')'), None, None),
1386 1414 b':': (8, None, (b'ancestors', 8), None, None),
1387 1415 b'&': (5, None, None, (b'and_', 5), None),
1388 1416 b'+': (4, None, None, (b'add', 4), None),
1389 1417 b'-': (4, None, None, (b'sub', 4), None),
1390 1418 b')': (0, None, None, None, None),
1391 1419 b'symbol': (0, b'symbol', None, None, None),
1392 1420 b'end': (0, None, None, None, None),
1393 1421 }
1394 1422
1395 1423
1396 1424 def _tokenize(text):
1397 1425 view = memoryview(text) # zero-copy slice
1398 1426 special = b'():+-& '
1399 1427 pos = 0
1400 1428 length = len(text)
1401 1429 while pos < length:
1402 1430 symbol = b''.join(
1403 1431 itertools.takewhile(
1404 1432 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1405 1433 )
1406 1434 )
1407 1435 if symbol:
1408 1436 yield (b'symbol', symbol, pos)
1409 1437 pos += len(symbol)
1410 1438 else: # special char, ignore space
1411 1439 if text[pos : pos + 1] != b' ':
1412 1440 yield (text[pos : pos + 1], None, pos)
1413 1441 pos += 1
1414 1442 yield (b'end', None, pos)
1415 1443
1416 1444
1417 1445 def _parse(text):
1418 1446 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1419 1447 if pos != len(text):
1420 1448 raise error.ParseError(b'invalid token', pos)
1421 1449 return tree
1422 1450
1423 1451
1424 1452 def _parsedrev(symbol):
1425 1453 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1426 1454 if symbol.startswith(b'D') and symbol[1:].isdigit():
1427 1455 return int(symbol[1:])
1428 1456 if symbol.isdigit():
1429 1457 return int(symbol)
1430 1458
1431 1459
1432 1460 def _prefetchdrevs(tree):
1433 1461 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1434 1462 drevs = set()
1435 1463 ancestordrevs = set()
1436 1464 op = tree[0]
1437 1465 if op == b'symbol':
1438 1466 r = _parsedrev(tree[1])
1439 1467 if r:
1440 1468 drevs.add(r)
1441 1469 elif op == b'ancestors':
1442 1470 r, a = _prefetchdrevs(tree[1])
1443 1471 drevs.update(r)
1444 1472 ancestordrevs.update(r)
1445 1473 ancestordrevs.update(a)
1446 1474 else:
1447 1475 for t in tree[1:]:
1448 1476 r, a = _prefetchdrevs(t)
1449 1477 drevs.update(r)
1450 1478 ancestordrevs.update(a)
1451 1479 return drevs, ancestordrevs
1452 1480
1453 1481
1454 1482 def querydrev(ui, spec):
1455 1483 """return a list of "Differential Revision" dicts
1456 1484
1457 1485 spec is a string using a simple query language, see docstring in phabread
1458 1486 for details.
1459 1487
1460 1488 A "Differential Revision dict" looks like:
1461 1489
1462 1490 {
1463 1491 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1464 1492 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1465 1493 "auxiliary": {
1466 1494 "phabricator:depends-on": [
1467 1495 "PHID-DREV-gbapp366kutjebt7agcd"
1468 1496 ]
1469 1497 "phabricator:projects": [],
1470 1498 },
1471 1499 "branch": "default",
1472 1500 "ccs": [],
1473 1501 "commits": [],
1474 1502 "dateCreated": "1499181406",
1475 1503 "dateModified": "1499182103",
1476 1504 "diffs": [
1477 1505 "3",
1478 1506 "4",
1479 1507 ],
1480 1508 "hashes": [],
1481 1509 "id": "2",
1482 1510 "lineCount": "2",
1483 1511 "phid": "PHID-DREV-672qvysjcczopag46qty",
1484 1512 "properties": {},
1485 1513 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1486 1514 "reviewers": [],
1487 1515 "sourcePath": null
1488 1516 "status": "0",
1489 1517 "statusName": "Needs Review",
1490 1518 "summary": "",
1491 1519 "testPlan": "",
1492 1520 "title": "example",
1493 1521 "uri": "https://phab.example.com/D2",
1494 1522 }
1495 1523 """
1496 1524 # TODO: replace differential.query and differential.querydiffs with
1497 1525 # differential.diff.search because the former (and their output) are
1498 1526 # frozen, and planned to be deprecated and removed.
1499 1527
1500 1528 def fetch(params):
1501 1529 """params -> single drev or None"""
1502 1530 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1503 1531 if key in prefetched:
1504 1532 return prefetched[key]
1505 1533 drevs = callconduit(ui, b'differential.query', params)
1506 1534 # Fill prefetched with the result
1507 1535 for drev in drevs:
1508 1536 prefetched[drev[b'phid']] = drev
1509 1537 prefetched[int(drev[b'id'])] = drev
1510 1538 if key not in prefetched:
1511 1539 raise error.Abort(
1512 1540 _(b'cannot get Differential Revision %r') % params
1513 1541 )
1514 1542 return prefetched[key]
1515 1543
1516 1544 def getstack(topdrevids):
1517 1545 """given a top, get a stack from the bottom, [id] -> [id]"""
1518 1546 visited = set()
1519 1547 result = []
1520 1548 queue = [{b'ids': [i]} for i in topdrevids]
1521 1549 while queue:
1522 1550 params = queue.pop()
1523 1551 drev = fetch(params)
1524 1552 if drev[b'id'] in visited:
1525 1553 continue
1526 1554 visited.add(drev[b'id'])
1527 1555 result.append(int(drev[b'id']))
1528 1556 auxiliary = drev.get(b'auxiliary', {})
1529 1557 depends = auxiliary.get(b'phabricator:depends-on', [])
1530 1558 for phid in depends:
1531 1559 queue.append({b'phids': [phid]})
1532 1560 result.reverse()
1533 1561 return smartset.baseset(result)
1534 1562
1535 1563 # Initialize prefetch cache
1536 1564 prefetched = {} # {id or phid: drev}
1537 1565
1538 1566 tree = _parse(spec)
1539 1567 drevs, ancestordrevs = _prefetchdrevs(tree)
1540 1568
1541 1569 # developer config: phabricator.batchsize
1542 1570 batchsize = ui.configint(b'phabricator', b'batchsize')
1543 1571
1544 1572 # Prefetch Differential Revisions in batch
1545 1573 tofetch = set(drevs)
1546 1574 for r in ancestordrevs:
1547 1575 tofetch.update(range(max(1, r - batchsize), r + 1))
1548 1576 if drevs:
1549 1577 fetch({b'ids': list(tofetch)})
1550 1578 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1551 1579
1552 1580 # Walk through the tree, return smartsets
1553 1581 def walk(tree):
1554 1582 op = tree[0]
1555 1583 if op == b'symbol':
1556 1584 drev = _parsedrev(tree[1])
1557 1585 if drev:
1558 1586 return smartset.baseset([drev])
1559 1587 elif tree[1] in _knownstatusnames:
1560 1588 drevs = [
1561 1589 r
1562 1590 for r in validids
1563 1591 if _getstatusname(prefetched[r]) == tree[1]
1564 1592 ]
1565 1593 return smartset.baseset(drevs)
1566 1594 else:
1567 1595 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1568 1596 elif op in {b'and_', b'add', b'sub'}:
1569 1597 assert len(tree) == 3
1570 1598 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1571 1599 elif op == b'group':
1572 1600 return walk(tree[1])
1573 1601 elif op == b'ancestors':
1574 1602 return getstack(walk(tree[1]))
1575 1603 else:
1576 1604 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1577 1605
1578 1606 return [prefetched[r] for r in walk(tree)]
1579 1607
1580 1608
1581 1609 def getdescfromdrev(drev):
1582 1610 """get description (commit message) from "Differential Revision"
1583 1611
1584 1612 This is similar to differential.getcommitmessage API. But we only care
1585 1613 about limited fields: title, summary, test plan, and URL.
1586 1614 """
1587 1615 title = drev[b'title']
1588 1616 summary = drev[b'summary'].rstrip()
1589 1617 testplan = drev[b'testPlan'].rstrip()
1590 1618 if testplan:
1591 1619 testplan = b'Test Plan:\n%s' % testplan
1592 1620 uri = b'Differential Revision: %s' % drev[b'uri']
1593 1621 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1594 1622
1595 1623
1624 def get_amended_desc(drev, ctx, folded):
1625 """similar to ``getdescfromdrev``, but supports a folded series of commits
1626
1627 This is used when determining if an individual commit needs to have its
1628 message amended after posting it for review. The determination is made for
1629 each individual commit, even when they were folded into one review.
1630 """
1631 if not folded:
1632 return getdescfromdrev(drev)
1633
1634 uri = b'Differential Revision: %s' % drev[b'uri']
1635
1636 # Since the commit messages were combined when posting multiple commits
1637 # with --fold, the fields can't be read from Phabricator here, or *all*
1638 # affected local revisions will end up with the same commit message after
1639 # the URI is amended in. Append in the DREV line, or update it if it
1640 # exists. At worst, this means commit message or test plan updates on
1641 # Phabricator aren't propagated back to the repository, but that seems
1642 # reasonable for the case where local commits are effectively combined
1643 # in Phabricator.
1644 m = _differentialrevisiondescre.search(ctx.description())
1645 if not m:
1646 return b'\n\n'.join([ctx.description(), uri])
1647
1648 return _differentialrevisiondescre.sub(uri, ctx.description())
1649
1650
1596 1651 def getdiffmeta(diff):
1597 1652 """get commit metadata (date, node, user, p1) from a diff object
1598 1653
1599 1654 The metadata could be "hg:meta", sent by phabsend, like:
1600 1655
1601 1656 "properties": {
1602 1657 "hg:meta": {
1603 1658 "branch": "default",
1604 1659 "date": "1499571514 25200",
1605 1660 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1606 1661 "user": "Foo Bar <foo@example.com>",
1607 1662 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1608 1663 }
1609 1664 }
1610 1665
1611 1666 Or converted from "local:commits", sent by "arc", like:
1612 1667
1613 1668 "properties": {
1614 1669 "local:commits": {
1615 1670 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1616 1671 "author": "Foo Bar",
1617 1672 "authorEmail": "foo@example.com"
1618 1673 "branch": "default",
1619 1674 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1620 1675 "local": "1000",
1621 1676 "message": "...",
1622 1677 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1623 1678 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1624 1679 "summary": "...",
1625 1680 "tag": "",
1626 1681 "time": 1499546314,
1627 1682 }
1628 1683 }
1629 1684 }
1630 1685
1631 1686 Note: metadata extracted from "local:commits" will lose time zone
1632 1687 information.
1633 1688 """
1634 1689 props = diff.get(b'properties') or {}
1635 1690 meta = props.get(b'hg:meta')
1636 1691 if not meta:
1637 1692 if props.get(b'local:commits'):
1638 1693 commit = sorted(props[b'local:commits'].values())[0]
1639 1694 meta = {}
1640 1695 if b'author' in commit and b'authorEmail' in commit:
1641 1696 meta[b'user'] = b'%s <%s>' % (
1642 1697 commit[b'author'],
1643 1698 commit[b'authorEmail'],
1644 1699 )
1645 1700 if b'time' in commit:
1646 1701 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1647 1702 if b'branch' in commit:
1648 1703 meta[b'branch'] = commit[b'branch']
1649 1704 node = commit.get(b'commit', commit.get(b'rev'))
1650 1705 if node:
1651 1706 meta[b'node'] = node
1652 1707 if len(commit.get(b'parents', ())) >= 1:
1653 1708 meta[b'parent'] = commit[b'parents'][0]
1654 1709 else:
1655 1710 meta = {}
1656 1711 if b'date' not in meta and b'dateCreated' in diff:
1657 1712 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1658 1713 if b'branch' not in meta and diff.get(b'branch'):
1659 1714 meta[b'branch'] = diff[b'branch']
1660 1715 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1661 1716 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1662 1717 return meta
1663 1718
1664 1719
1665 1720 def _getdrevs(ui, stack, specs):
1666 1721 """convert user supplied DREVSPECs into "Differential Revision" dicts
1667 1722
1668 1723 See ``hg help phabread`` for how to specify each DREVSPEC.
1669 1724 """
1670 1725 if len(specs) > 0:
1671 1726
1672 1727 def _formatspec(s):
1673 1728 if stack:
1674 1729 s = b':(%s)' % s
1675 1730 return b'(%s)' % s
1676 1731
1677 1732 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1678 1733
1679 1734 drevs = querydrev(ui, spec)
1680 1735 if drevs:
1681 1736 return drevs
1682 1737
1683 1738 raise error.Abort(_(b"empty DREVSPEC set"))
1684 1739
1685 1740
1686 1741 def readpatch(ui, drevs, write):
1687 1742 """generate plain-text patch readable by 'hg import'
1688 1743
1689 1744 write takes a list of (DREV, bytes), where DREV is the differential number
1690 1745 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1691 1746 to be imported. drevs is what "querydrev" returns, results of
1692 1747 "differential.query".
1693 1748 """
1694 1749 # Prefetch hg:meta property for all diffs
1695 1750 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1696 1751 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1697 1752
1698 1753 patches = []
1699 1754
1700 1755 # Generate patch for each drev
1701 1756 for drev in drevs:
1702 1757 ui.note(_(b'reading D%s\n') % drev[b'id'])
1703 1758
1704 1759 diffid = max(int(v) for v in drev[b'diffs'])
1705 1760 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1706 1761 desc = getdescfromdrev(drev)
1707 1762 header = b'# HG changeset patch\n'
1708 1763
1709 1764 # Try to preserve metadata from hg:meta property. Write hg patch
1710 1765 # headers that can be read by the "import" command. See patchheadermap
1711 1766 # and extract in mercurial/patch.py for supported headers.
1712 1767 meta = getdiffmeta(diffs[b'%d' % diffid])
1713 1768 for k in _metanamemap.keys():
1714 1769 if k in meta:
1715 1770 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1716 1771
1717 1772 content = b'%s%s\n%s' % (header, desc, body)
1718 1773 patches.append((drev[b'id'], content))
1719 1774
1720 1775 # Write patches to the supplied callback
1721 1776 write(patches)
1722 1777
1723 1778
1724 1779 @vcrcommand(
1725 1780 b'phabread',
1726 1781 [(b'', b'stack', False, _(b'read dependencies'))],
1727 1782 _(b'DREVSPEC... [OPTIONS]'),
1728 1783 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1729 1784 optionalrepo=True,
1730 1785 )
1731 1786 def phabread(ui, repo, *specs, **opts):
1732 1787 """print patches from Phabricator suitable for importing
1733 1788
1734 1789 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1735 1790 the number ``123``. It could also have common operators like ``+``, ``-``,
1736 1791 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1737 1792 select a stack. If multiple DREVSPEC values are given, the result is the
1738 1793 union of each individually evaluated value. No attempt is currently made
1739 1794 to reorder the values to run from parent to child.
1740 1795
1741 1796 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1742 1797 could be used to filter patches by status. For performance reason, they
1743 1798 only represent a subset of non-status selections and cannot be used alone.
1744 1799
1745 1800 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1746 1801 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1747 1802 stack up to D9.
1748 1803
1749 1804 If --stack is given, follow dependencies information and read all patches.
1750 1805 It is equivalent to the ``:`` operator.
1751 1806 """
1752 1807 opts = pycompat.byteskwargs(opts)
1753 1808 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1754 1809
1755 1810 def _write(patches):
1756 1811 for drev, content in patches:
1757 1812 ui.write(content)
1758 1813
1759 1814 readpatch(ui, drevs, _write)
1760 1815
1761 1816
1762 1817 @vcrcommand(
1763 1818 b'phabimport',
1764 1819 [(b'', b'stack', False, _(b'import dependencies as well'))],
1765 1820 _(b'DREVSPEC... [OPTIONS]'),
1766 1821 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1767 1822 )
1768 1823 def phabimport(ui, repo, *specs, **opts):
1769 1824 """import patches from Phabricator for the specified Differential Revisions
1770 1825
1771 1826 The patches are read and applied starting at the parent of the working
1772 1827 directory.
1773 1828
1774 1829 See ``hg help phabread`` for how to specify DREVSPEC.
1775 1830 """
1776 1831 opts = pycompat.byteskwargs(opts)
1777 1832
1778 1833 # --bypass avoids losing exec and symlink bits when importing on Windows,
1779 1834 # and allows importing with a dirty wdir. It also aborts instead of leaving
1780 1835 # rejects.
1781 1836 opts[b'bypass'] = True
1782 1837
1783 1838 # Mandatory default values, synced with commands.import
1784 1839 opts[b'strip'] = 1
1785 1840 opts[b'prefix'] = b''
1786 1841 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1787 1842 opts[b'obsolete'] = False
1788 1843
1789 1844 if ui.configbool(b'phabimport', b'secret'):
1790 1845 opts[b'secret'] = True
1791 1846 if ui.configbool(b'phabimport', b'obsolete'):
1792 1847 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1793 1848
1794 1849 def _write(patches):
1795 1850 parents = repo[None].parents()
1796 1851
1797 1852 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1798 1853 for drev, contents in patches:
1799 1854 ui.status(_(b'applying patch from D%s\n') % drev)
1800 1855
1801 1856 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1802 1857 msg, node, rej = cmdutil.tryimportone(
1803 1858 ui,
1804 1859 repo,
1805 1860 patchdata,
1806 1861 parents,
1807 1862 opts,
1808 1863 [],
1809 1864 None, # Never update wdir to another revision
1810 1865 )
1811 1866
1812 1867 if not node:
1813 1868 raise error.Abort(_(b'D%s: no diffs found') % drev)
1814 1869
1815 1870 ui.note(msg + b'\n')
1816 1871 parents = [repo[node]]
1817 1872
1818 1873 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1819 1874
1820 1875 readpatch(repo.ui, drevs, _write)
1821 1876
1822 1877
1823 1878 @vcrcommand(
1824 1879 b'phabupdate',
1825 1880 [
1826 1881 (b'', b'accept', False, _(b'accept revisions')),
1827 1882 (b'', b'reject', False, _(b'reject revisions')),
1828 1883 (b'', b'abandon', False, _(b'abandon revisions')),
1829 1884 (b'', b'reclaim', False, _(b'reclaim revisions')),
1830 1885 (b'm', b'comment', b'', _(b'comment on the last revision')),
1831 1886 ],
1832 1887 _(b'DREVSPEC... [OPTIONS]'),
1833 1888 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1834 1889 optionalrepo=True,
1835 1890 )
1836 1891 def phabupdate(ui, repo, *specs, **opts):
1837 1892 """update Differential Revision in batch
1838 1893
1839 1894 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1840 1895 """
1841 1896 opts = pycompat.byteskwargs(opts)
1842 1897 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1843 1898 if len(flags) > 1:
1844 1899 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1845 1900
1846 1901 actions = []
1847 1902 for f in flags:
1848 1903 actions.append({b'type': f, b'value': True})
1849 1904
1850 1905 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1851 1906 for i, drev in enumerate(drevs):
1852 1907 if i + 1 == len(drevs) and opts.get(b'comment'):
1853 1908 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1854 1909 if actions:
1855 1910 params = {
1856 1911 b'objectIdentifier': drev[b'phid'],
1857 1912 b'transactions': actions,
1858 1913 }
1859 1914 callconduit(ui, b'differential.revision.edit', params)
1860 1915
1861 1916
1862 1917 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1863 1918 def template_review(context, mapping):
1864 1919 """:phabreview: Object describing the review for this changeset.
1865 1920 Has attributes `url` and `id`.
1866 1921 """
1867 1922 ctx = context.resource(mapping, b'ctx')
1868 1923 m = _differentialrevisiondescre.search(ctx.description())
1869 1924 if m:
1870 1925 return templateutil.hybriddict(
1871 1926 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1872 1927 )
1873 1928 else:
1874 1929 tags = ctx.repo().nodetags(ctx.node())
1875 1930 for t in tags:
1876 1931 if _differentialrevisiontagre.match(t):
1877 1932 url = ctx.repo().ui.config(b'phabricator', b'url')
1878 1933 if not url.endswith(b'/'):
1879 1934 url += b'/'
1880 1935 url += t
1881 1936
1882 1937 return templateutil.hybriddict({b'url': url, b'id': t,})
1883 1938 return None
1884 1939
1885 1940
1886 1941 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1887 1942 def template_status(context, mapping):
1888 1943 """:phabstatus: String. Status of Phabricator differential.
1889 1944 """
1890 1945 ctx = context.resource(mapping, b'ctx')
1891 1946 repo = context.resource(mapping, b'repo')
1892 1947 ui = context.resource(mapping, b'ui')
1893 1948
1894 1949 rev = ctx.rev()
1895 1950 try:
1896 1951 drevid = getdrevmap(repo, [rev])[rev]
1897 1952 except KeyError:
1898 1953 return None
1899 1954 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1900 1955 for drev in drevs:
1901 1956 if int(drev[b'id']) == drevid:
1902 1957 return templateutil.hybriddict(
1903 1958 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1904 1959 )
1905 1960 return None
1906 1961
1907 1962
1908 1963 @show.showview(b'phabstatus', csettopic=b'work')
1909 1964 def phabstatusshowview(ui, repo, displayer):
1910 1965 """Phabricator differiential status"""
1911 1966 revs = repo.revs('sort(_underway(), topo)')
1912 1967 drevmap = getdrevmap(repo, revs)
1913 1968 unknownrevs, drevids, revsbydrevid = [], set(), {}
1914 1969 for rev, drevid in pycompat.iteritems(drevmap):
1915 1970 if drevid is not None:
1916 1971 drevids.add(drevid)
1917 1972 revsbydrevid.setdefault(drevid, set()).add(rev)
1918 1973 else:
1919 1974 unknownrevs.append(rev)
1920 1975
1921 1976 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1922 1977 drevsbyrev = {}
1923 1978 for drev in drevs:
1924 1979 for rev in revsbydrevid[int(drev[b'id'])]:
1925 1980 drevsbyrev[rev] = drev
1926 1981
1927 1982 def phabstatus(ctx):
1928 1983 drev = drevsbyrev[ctx.rev()]
1929 1984 status = ui.label(
1930 1985 b'%(statusName)s' % drev,
1931 1986 b'phabricator.status.%s' % _getstatusname(drev),
1932 1987 )
1933 1988 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1934 1989
1935 1990 revs -= smartset.baseset(unknownrevs)
1936 1991 revdag = graphmod.dagwalker(repo, revs)
1937 1992
1938 1993 ui.setconfig(b'experimental', b'graphshorten', True)
1939 1994 displayer._exthook = phabstatus
1940 1995 nodelen = show.longestshortest(repo, revs)
1941 1996 logcmdutil.displaygraph(
1942 1997 ui,
1943 1998 repo,
1944 1999 revdag,
1945 2000 displayer,
1946 2001 graphmod.asciiedges,
1947 2002 props={b'nodelen': nodelen},
1948 2003 )
General Comments 0
You need to be logged in to leave comments. Login now