##// END OF EJS Templates
phabupdate: allow revisions to be reopened...
Matt Harbison -
r45698:2010f314 default
parent child Browse files
Show More
@@ -1,2297 +1,2299 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid, short
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.debug
119 119 eh.configitem(
120 120 b'phabricator', b'debug', default=False,
121 121 )
122 122 # developer config: phabricator.repophid
123 123 eh.configitem(
124 124 b'phabricator', b'repophid', default=None,
125 125 )
126 126 eh.configitem(
127 127 b'phabricator', b'url', default=None,
128 128 )
129 129 eh.configitem(
130 130 b'phabsend', b'confirm', default=False,
131 131 )
132 132 eh.configitem(
133 133 b'phabimport', b'secret', default=False,
134 134 )
135 135 eh.configitem(
136 136 b'phabimport', b'obsolete', default=False,
137 137 )
138 138
139 139 colortable = {
140 140 b'phabricator.action.created': b'green',
141 141 b'phabricator.action.skipped': b'magenta',
142 142 b'phabricator.action.updated': b'magenta',
143 143 b'phabricator.desc': b'',
144 144 b'phabricator.drev': b'bold',
145 145 b'phabricator.node': b'',
146 146 b'phabricator.status.abandoned': b'magenta dim',
147 147 b'phabricator.status.accepted': b'green bold',
148 148 b'phabricator.status.closed': b'green',
149 149 b'phabricator.status.needsreview': b'yellow',
150 150 b'phabricator.status.needsrevision': b'red',
151 151 b'phabricator.status.changesplanned': b'red',
152 152 }
153 153
154 154 _VCR_FLAGS = [
155 155 (
156 156 b'',
157 157 b'test-vcr',
158 158 b'',
159 159 _(
160 160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 161 b', otherwise will mock all http requests using the specified vcr file.'
162 162 b' (ADVANCED)'
163 163 ),
164 164 ),
165 165 ]
166 166
167 167
168 168 @eh.wrapfunction(localrepo, "loadhgrc")
169 169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 171 """
172 172 result = False
173 173 arcconfig = {}
174 174
175 175 try:
176 176 # json.loads only accepts bytes from 3.6+
177 177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 178 # json.loads only returns unicode strings
179 179 arcconfig = pycompat.rapply(
180 180 lambda x: encoding.unitolocal(x)
181 181 if isinstance(x, pycompat.unicode)
182 182 else x,
183 183 pycompat.json_loads(rawparams),
184 184 )
185 185
186 186 result = True
187 187 except ValueError:
188 188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 189 except IOError:
190 190 pass
191 191
192 192 cfg = util.sortdict()
193 193
194 194 if b"repository.callsign" in arcconfig:
195 195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196 196
197 197 if b"phabricator.uri" in arcconfig:
198 198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199 199
200 200 if cfg:
201 201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202 202
203 203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204 204
205 205
206 206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 207 fullflags = flags + _VCR_FLAGS
208 208
209 209 def hgmatcher(r1, r2):
210 210 if r1.uri != r2.uri or r1.method != r2.method:
211 211 return False
212 212 r1params = util.urlreq.parseqs(r1.body)
213 213 r2params = util.urlreq.parseqs(r2.body)
214 214 for key in r1params:
215 215 if key not in r2params:
216 216 return False
217 217 value = r1params[key][0]
218 218 # we want to compare json payloads without worrying about ordering
219 219 if value.startswith(b'{') and value.endswith(b'}'):
220 220 r1json = pycompat.json_loads(value)
221 221 r2json = pycompat.json_loads(r2params[key][0])
222 222 if r1json != r2json:
223 223 return False
224 224 elif r2params[key][0] != value:
225 225 return False
226 226 return True
227 227
228 228 def sanitiserequest(request):
229 229 request.body = re.sub(
230 230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 231 )
232 232 return request
233 233
234 234 def sanitiseresponse(response):
235 235 if 'set-cookie' in response['headers']:
236 236 del response['headers']['set-cookie']
237 237 return response
238 238
239 239 def decorate(fn):
240 240 def inner(*args, **kwargs):
241 241 if kwargs.get('test_vcr'):
242 242 cassette = pycompat.fsdecode(kwargs.pop('test_vcr'))
243 243 import hgdemandimport
244 244
245 245 with hgdemandimport.deactivated():
246 246 import vcr as vcrmod
247 247 import vcr.stubs as stubs
248 248
249 249 vcr = vcrmod.VCR(
250 250 serializer='json',
251 251 before_record_request=sanitiserequest,
252 252 before_record_response=sanitiseresponse,
253 253 custom_patches=[
254 254 (
255 255 urlmod,
256 256 'httpconnection',
257 257 stubs.VCRHTTPConnection,
258 258 ),
259 259 (
260 260 urlmod,
261 261 'httpsconnection',
262 262 stubs.VCRHTTPSConnection,
263 263 ),
264 264 ],
265 265 )
266 266 vcr.register_matcher('hgmatcher', hgmatcher)
267 267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
268 268 return fn(*args, **kwargs)
269 269 return fn(*args, **kwargs)
270 270
271 271 cmd = util.checksignature(inner, depth=2)
272 272 cmd.__name__ = fn.__name__
273 273 cmd.__doc__ = fn.__doc__
274 274
275 275 return command(
276 276 name,
277 277 fullflags,
278 278 spec,
279 279 helpcategory=helpcategory,
280 280 optionalrepo=optionalrepo,
281 281 )(cmd)
282 282
283 283 return decorate
284 284
285 285
286 286 def _debug(ui, *msg, **opts):
287 287 """write debug output for Phabricator if ``phabricator.debug`` is set
288 288
289 289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 290 printed with the --debug argument.
291 291 """
292 292 if ui.configbool(b"phabricator", b"debug"):
293 293 flag = ui.debugflag
294 294 try:
295 295 ui.debugflag = True
296 296 ui.write(*msg, **opts)
297 297 finally:
298 298 ui.debugflag = flag
299 299
300 300
301 301 def urlencodenested(params):
302 302 """like urlencode, but works with nested parameters.
303 303
304 304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
305 305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
306 306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
307 307 """
308 308 flatparams = util.sortdict()
309 309
310 310 def process(prefix, obj):
311 311 if isinstance(obj, bool):
312 312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
313 313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
314 314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
315 315 if items is None:
316 316 flatparams[prefix] = obj
317 317 else:
318 318 for k, v in items(obj):
319 319 if prefix:
320 320 process(b'%s[%s]' % (prefix, k), v)
321 321 else:
322 322 process(k, v)
323 323
324 324 process(b'', params)
325 325 return util.urlreq.urlencode(flatparams)
326 326
327 327
328 328 def readurltoken(ui):
329 329 """return conduit url, token and make sure they exist
330 330
331 331 Currently read from [auth] config section. In the future, it might
332 332 make sense to read from .arcconfig and .arcrc as well.
333 333 """
334 334 url = ui.config(b'phabricator', b'url')
335 335 if not url:
336 336 raise error.Abort(
337 337 _(b'config %s.%s is required') % (b'phabricator', b'url')
338 338 )
339 339
340 340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
341 341 token = None
342 342
343 343 if res:
344 344 group, auth = res
345 345
346 346 ui.debug(b"using auth.%s.* for authentication\n" % group)
347 347
348 348 token = auth.get(b'phabtoken')
349 349
350 350 if not token:
351 351 raise error.Abort(
352 352 _(b'Can\'t find conduit token associated to %s') % (url,)
353 353 )
354 354
355 355 return url, token
356 356
357 357
358 358 def callconduit(ui, name, params):
359 359 """call Conduit API, params is a dict. return json.loads result, or None"""
360 360 host, token = readurltoken(ui)
361 361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
362 362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
363 363 params = params.copy()
364 364 params[b'__conduit__'] = {
365 365 b'token': token,
366 366 }
367 367 rawdata = {
368 368 b'params': templatefilters.json(params),
369 369 b'output': b'json',
370 370 b'__conduit__': 1,
371 371 }
372 372 data = urlencodenested(rawdata)
373 373 curlcmd = ui.config(b'phabricator', b'curlcmd')
374 374 if curlcmd:
375 375 sin, sout = procutil.popen2(
376 376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
377 377 )
378 378 sin.write(data)
379 379 sin.close()
380 380 body = sout.read()
381 381 else:
382 382 urlopener = urlmod.opener(ui, authinfo)
383 383 request = util.urlreq.request(pycompat.strurl(url), data=data)
384 384 with contextlib.closing(urlopener.open(request)) as rsp:
385 385 body = rsp.read()
386 386 ui.debug(b'Conduit Response: %s\n' % body)
387 387 parsed = pycompat.rapply(
388 388 lambda x: encoding.unitolocal(x)
389 389 if isinstance(x, pycompat.unicode)
390 390 else x,
391 391 # json.loads only accepts bytes from py3.6+
392 392 pycompat.json_loads(encoding.unifromlocal(body)),
393 393 )
394 394 if parsed.get(b'error_code'):
395 395 msg = _(b'Conduit Error (%s): %s') % (
396 396 parsed[b'error_code'],
397 397 parsed[b'error_info'],
398 398 )
399 399 raise error.Abort(msg)
400 400 return parsed[b'result']
401 401
402 402
403 403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
404 404 def debugcallconduit(ui, repo, name):
405 405 """call Conduit API
406 406
407 407 Call parameters are read from stdin as a JSON blob. Result will be written
408 408 to stdout as a JSON blob.
409 409 """
410 410 # json.loads only accepts bytes from 3.6+
411 411 rawparams = encoding.unifromlocal(ui.fin.read())
412 412 # json.loads only returns unicode strings
413 413 params = pycompat.rapply(
414 414 lambda x: encoding.unitolocal(x)
415 415 if isinstance(x, pycompat.unicode)
416 416 else x,
417 417 pycompat.json_loads(rawparams),
418 418 )
419 419 # json.dumps only accepts unicode strings
420 420 result = pycompat.rapply(
421 421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
422 422 callconduit(ui, name, params),
423 423 )
424 424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
425 425 ui.write(b'%s\n' % encoding.unitolocal(s))
426 426
427 427
428 428 def getrepophid(repo):
429 429 """given callsign, return repository PHID or None"""
430 430 # developer config: phabricator.repophid
431 431 repophid = repo.ui.config(b'phabricator', b'repophid')
432 432 if repophid:
433 433 return repophid
434 434 callsign = repo.ui.config(b'phabricator', b'callsign')
435 435 if not callsign:
436 436 return None
437 437 query = callconduit(
438 438 repo.ui,
439 439 b'diffusion.repository.search',
440 440 {b'constraints': {b'callsigns': [callsign]}},
441 441 )
442 442 if len(query[b'data']) == 0:
443 443 return None
444 444 repophid = query[b'data'][0][b'phid']
445 445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
446 446 return repophid
447 447
448 448
449 449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
450 450 _differentialrevisiondescre = re.compile(
451 451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
452 452 )
453 453
454 454
455 455 def getoldnodedrevmap(repo, nodelist):
456 456 """find previous nodes that has been sent to Phabricator
457 457
458 458 return {node: (oldnode, Differential diff, Differential Revision ID)}
459 459 for node in nodelist with known previous sent versions, or associated
460 460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
461 461 be ``None``.
462 462
463 463 Examines commit messages like "Differential Revision:" to get the
464 464 association information.
465 465
466 466 If such commit message line is not found, examines all precursors and their
467 467 tags. Tags with format like "D1234" are considered a match and the node
468 468 with that tag, and the number after "D" (ex. 1234) will be returned.
469 469
470 470 The ``old node``, if not None, is guaranteed to be the last diff of
471 471 corresponding Differential Revision, and exist in the repo.
472 472 """
473 473 unfi = repo.unfiltered()
474 474 has_node = unfi.changelog.index.has_node
475 475
476 476 result = {} # {node: (oldnode?, lastdiff?, drev)}
477 477 # ordered for test stability when printing new -> old mapping below
478 478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
479 479 for node in nodelist:
480 480 ctx = unfi[node]
481 481 # For tags like "D123", put them into "toconfirm" to verify later
482 482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
483 483 for n in precnodes:
484 484 if has_node(n):
485 485 for tag in unfi.nodetags(n):
486 486 m = _differentialrevisiontagre.match(tag)
487 487 if m:
488 488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
489 489 break
490 490 else:
491 491 continue # move to next predecessor
492 492 break # found a tag, stop
493 493 else:
494 494 # Check commit message
495 495 m = _differentialrevisiondescre.search(ctx.description())
496 496 if m:
497 497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
498 498
499 499 # Double check if tags are genuine by collecting all old nodes from
500 500 # Phabricator, and expect precursors overlap with it.
501 501 if toconfirm:
502 502 drevs = [drev for force, precs, drev in toconfirm.values()]
503 503 alldiffs = callconduit(
504 504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
505 505 )
506 506
507 507 def getnodes(d, precset):
508 508 # Ignore other nodes that were combined into the Differential
509 509 # that aren't predecessors of the current local node.
510 510 return [n for n in getlocalcommits(d) if n in precset]
511 511
512 512 for newnode, (force, precset, drev) in toconfirm.items():
513 513 diffs = [
514 514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
515 515 ]
516 516
517 517 # local predecessors known by Phabricator
518 518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
519 519
520 520 # Ignore if precursors (Phabricator and local repo) do not overlap,
521 521 # and force is not set (when commit message says nothing)
522 522 if not force and not phprecset:
523 523 tagname = b'D%d' % drev
524 524 tags.tag(
525 525 repo,
526 526 tagname,
527 527 nullid,
528 528 message=None,
529 529 user=None,
530 530 date=None,
531 531 local=True,
532 532 )
533 533 unfi.ui.warn(
534 534 _(
535 535 b'D%d: local tag removed - does not match '
536 536 b'Differential history\n'
537 537 )
538 538 % drev
539 539 )
540 540 continue
541 541
542 542 # Find the last node using Phabricator metadata, and make sure it
543 543 # exists in the repo
544 544 oldnode = lastdiff = None
545 545 if diffs:
546 546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
547 547 oldnodes = getnodes(lastdiff, precset)
548 548
549 549 _debug(
550 550 unfi.ui,
551 551 b"%s mapped to old nodes %s\n"
552 552 % (
553 553 short(newnode),
554 554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 555 ),
556 556 )
557 557
558 558 # If this commit was the result of `hg fold` after submission,
559 559 # and now resubmitted with --fold, the easiest thing to do is
560 560 # to leave the node clear. This only results in creating a new
561 561 # diff for the _same_ Differential Revision if this commit is
562 562 # the first or last in the selected range. If we picked a node
563 563 # from the list instead, it would have to be the lowest if at
564 564 # the beginning of the --fold range, or the highest at the end.
565 565 # Otherwise, one or more of the nodes wouldn't be considered in
566 566 # the diff, and the Differential wouldn't be properly updated.
567 567 # If this commit is the result of `hg split` in the same
568 568 # scenario, there is a single oldnode here (and multiple
569 569 # newnodes mapped to it). That makes it the same as the normal
570 570 # case, as the edges of the newnode range cleanly maps to one
571 571 # oldnode each.
572 572 if len(oldnodes) == 1:
573 573 oldnode = oldnodes[0]
574 574 if oldnode and not has_node(oldnode):
575 575 oldnode = None
576 576
577 577 result[newnode] = (oldnode, lastdiff, drev)
578 578
579 579 return result
580 580
581 581
582 582 def getdrevmap(repo, revs):
583 583 """Return a dict mapping each rev in `revs` to their Differential Revision
584 584 ID or None.
585 585 """
586 586 result = {}
587 587 for rev in revs:
588 588 result[rev] = None
589 589 ctx = repo[rev]
590 590 # Check commit message
591 591 m = _differentialrevisiondescre.search(ctx.description())
592 592 if m:
593 593 result[rev] = int(m.group('id'))
594 594 continue
595 595 # Check tags
596 596 for tag in repo.nodetags(ctx.node()):
597 597 m = _differentialrevisiontagre.match(tag)
598 598 if m:
599 599 result[rev] = int(m.group(1))
600 600 break
601 601
602 602 return result
603 603
604 604
605 605 def getdiff(basectx, ctx, diffopts):
606 606 """plain-text diff without header (user, commit message, etc)"""
607 607 output = util.stringio()
608 608 for chunk, _label in patch.diffui(
609 609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
610 610 ):
611 611 output.write(chunk)
612 612 return output.getvalue()
613 613
614 614
615 615 class DiffChangeType(object):
616 616 ADD = 1
617 617 CHANGE = 2
618 618 DELETE = 3
619 619 MOVE_AWAY = 4
620 620 COPY_AWAY = 5
621 621 MOVE_HERE = 6
622 622 COPY_HERE = 7
623 623 MULTICOPY = 8
624 624
625 625
626 626 class DiffFileType(object):
627 627 TEXT = 1
628 628 IMAGE = 2
629 629 BINARY = 3
630 630
631 631
632 632 @attr.s
633 633 class phabhunk(dict):
634 634 """Represents a Differential hunk, which is owned by a Differential change
635 635 """
636 636
637 637 oldOffset = attr.ib(default=0) # camelcase-required
638 638 oldLength = attr.ib(default=0) # camelcase-required
639 639 newOffset = attr.ib(default=0) # camelcase-required
640 640 newLength = attr.ib(default=0) # camelcase-required
641 641 corpus = attr.ib(default='')
642 642 # These get added to the phabchange's equivalents
643 643 addLines = attr.ib(default=0) # camelcase-required
644 644 delLines = attr.ib(default=0) # camelcase-required
645 645
646 646
647 647 @attr.s
648 648 class phabchange(object):
649 649 """Represents a Differential change, owns Differential hunks and owned by a
650 650 Differential diff. Each one represents one file in a diff.
651 651 """
652 652
653 653 currentPath = attr.ib(default=None) # camelcase-required
654 654 oldPath = attr.ib(default=None) # camelcase-required
655 655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
656 656 metadata = attr.ib(default=attr.Factory(dict))
657 657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 659 type = attr.ib(default=DiffChangeType.CHANGE)
660 660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
661 661 commitHash = attr.ib(default=None) # camelcase-required
662 662 addLines = attr.ib(default=0) # camelcase-required
663 663 delLines = attr.ib(default=0) # camelcase-required
664 664 hunks = attr.ib(default=attr.Factory(list))
665 665
666 666 def copynewmetadatatoold(self):
667 667 for key in list(self.metadata.keys()):
668 668 newkey = key.replace(b'new:', b'old:')
669 669 self.metadata[newkey] = self.metadata[key]
670 670
671 671 def addoldmode(self, value):
672 672 self.oldProperties[b'unix:filemode'] = value
673 673
674 674 def addnewmode(self, value):
675 675 self.newProperties[b'unix:filemode'] = value
676 676
677 677 def addhunk(self, hunk):
678 678 if not isinstance(hunk, phabhunk):
679 679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
680 680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
681 681 # It's useful to include these stats since the Phab web UI shows them,
682 682 # and uses them to estimate how large a change a Revision is. Also used
683 683 # in email subjects for the [+++--] bit.
684 684 self.addLines += hunk.addLines
685 685 self.delLines += hunk.delLines
686 686
687 687
688 688 @attr.s
689 689 class phabdiff(object):
690 690 """Represents a Differential diff, owns Differential changes. Corresponds
691 691 to a commit.
692 692 """
693 693
694 694 # Doesn't seem to be any reason to send this (output of uname -n)
695 695 sourceMachine = attr.ib(default=b'') # camelcase-required
696 696 sourcePath = attr.ib(default=b'/') # camelcase-required
697 697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
698 698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
699 699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
700 700 branch = attr.ib(default=b'default')
701 701 bookmark = attr.ib(default=None)
702 702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
703 703 lintStatus = attr.ib(default=b'none') # camelcase-required
704 704 unitStatus = attr.ib(default=b'none') # camelcase-required
705 705 changes = attr.ib(default=attr.Factory(dict))
706 706 repositoryPHID = attr.ib(default=None) # camelcase-required
707 707
708 708 def addchange(self, change):
709 709 if not isinstance(change, phabchange):
710 710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
711 711 self.changes[change.currentPath] = pycompat.byteskwargs(
712 712 attr.asdict(change)
713 713 )
714 714
715 715
716 716 def maketext(pchange, basectx, ctx, fname):
717 717 """populate the phabchange for a text file"""
718 718 repo = ctx.repo()
719 719 fmatcher = match.exact([fname])
720 720 diffopts = mdiff.diffopts(git=True, context=32767)
721 721 _pfctx, _fctx, header, fhunks = next(
722 722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
723 723 )
724 724
725 725 for fhunk in fhunks:
726 726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
727 727 corpus = b''.join(lines[1:])
728 728 shunk = list(header)
729 729 shunk.extend(lines)
730 730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
731 731 patch.diffstatdata(util.iterlines(shunk))
732 732 )
733 733 pchange.addhunk(
734 734 phabhunk(
735 735 oldOffset,
736 736 oldLength,
737 737 newOffset,
738 738 newLength,
739 739 corpus,
740 740 addLines,
741 741 delLines,
742 742 )
743 743 )
744 744
745 745
746 746 def uploadchunks(fctx, fphid):
747 747 """upload large binary files as separate chunks.
748 748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
749 749 """
750 750 ui = fctx.repo().ui
751 751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
752 752 with ui.makeprogress(
753 753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
754 754 ) as progress:
755 755 for chunk in chunks:
756 756 progress.increment()
757 757 if chunk[b'complete']:
758 758 continue
759 759 bstart = int(chunk[b'byteStart'])
760 760 bend = int(chunk[b'byteEnd'])
761 761 callconduit(
762 762 ui,
763 763 b'file.uploadchunk',
764 764 {
765 765 b'filePHID': fphid,
766 766 b'byteStart': bstart,
767 767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
768 768 b'dataEncoding': b'base64',
769 769 },
770 770 )
771 771
772 772
773 773 def uploadfile(fctx):
774 774 """upload binary files to Phabricator"""
775 775 repo = fctx.repo()
776 776 ui = repo.ui
777 777 fname = fctx.path()
778 778 size = fctx.size()
779 779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
780 780
781 781 # an allocate call is required first to see if an upload is even required
782 782 # (Phab might already have it) and to determine if chunking is needed
783 783 allocateparams = {
784 784 b'name': fname,
785 785 b'contentLength': size,
786 786 b'contentHash': fhash,
787 787 }
788 788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
789 789 fphid = filealloc[b'filePHID']
790 790
791 791 if filealloc[b'upload']:
792 792 ui.write(_(b'uploading %s\n') % bytes(fctx))
793 793 if not fphid:
794 794 uploadparams = {
795 795 b'name': fname,
796 796 b'data_base64': base64.b64encode(fctx.data()),
797 797 }
798 798 fphid = callconduit(ui, b'file.upload', uploadparams)
799 799 else:
800 800 uploadchunks(fctx, fphid)
801 801 else:
802 802 ui.debug(b'server already has %s\n' % bytes(fctx))
803 803
804 804 if not fphid:
805 805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
806 806
807 807 return fphid
808 808
809 809
810 810 def addoldbinary(pchange, oldfctx, fctx):
811 811 """add the metadata for the previous version of a binary file to the
812 812 phabchange for the new version
813 813
814 814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
815 815 version of the file, or None if the file is being removed.
816 816 """
817 817 if not fctx or fctx.cmp(oldfctx):
818 818 # Files differ, add the old one
819 819 pchange.metadata[b'old:file:size'] = oldfctx.size()
820 820 mimeguess, _enc = mimetypes.guess_type(
821 821 encoding.unifromlocal(oldfctx.path())
822 822 )
823 823 if mimeguess:
824 824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
825 825 mimeguess
826 826 )
827 827 fphid = uploadfile(oldfctx)
828 828 pchange.metadata[b'old:binary-phid'] = fphid
829 829 else:
830 830 # If it's left as IMAGE/BINARY web UI might try to display it
831 831 pchange.fileType = DiffFileType.TEXT
832 832 pchange.copynewmetadatatoold()
833 833
834 834
835 835 def makebinary(pchange, fctx):
836 836 """populate the phabchange for a binary file"""
837 837 pchange.fileType = DiffFileType.BINARY
838 838 fphid = uploadfile(fctx)
839 839 pchange.metadata[b'new:binary-phid'] = fphid
840 840 pchange.metadata[b'new:file:size'] = fctx.size()
841 841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
842 842 if mimeguess:
843 843 mimeguess = pycompat.bytestr(mimeguess)
844 844 pchange.metadata[b'new:file:mime-type'] = mimeguess
845 845 if mimeguess.startswith(b'image/'):
846 846 pchange.fileType = DiffFileType.IMAGE
847 847
848 848
849 849 # Copied from mercurial/patch.py
850 850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
851 851
852 852
853 853 def notutf8(fctx):
854 854 """detect non-UTF-8 text files since Phabricator requires them to be marked
855 855 as binary
856 856 """
857 857 try:
858 858 fctx.data().decode('utf-8')
859 859 return False
860 860 except UnicodeDecodeError:
861 861 fctx.repo().ui.write(
862 862 _(b'file %s detected as non-UTF-8, marked as binary\n')
863 863 % fctx.path()
864 864 )
865 865 return True
866 866
867 867
868 868 def addremoved(pdiff, basectx, ctx, removed):
869 869 """add removed files to the phabdiff. Shouldn't include moves"""
870 870 for fname in removed:
871 871 pchange = phabchange(
872 872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
873 873 )
874 874 oldfctx = basectx.p1()[fname]
875 875 pchange.addoldmode(gitmode[oldfctx.flags()])
876 876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
877 877 maketext(pchange, basectx, ctx, fname)
878 878
879 879 pdiff.addchange(pchange)
880 880
881 881
882 882 def addmodified(pdiff, basectx, ctx, modified):
883 883 """add modified files to the phabdiff"""
884 884 for fname in modified:
885 885 fctx = ctx[fname]
886 886 oldfctx = basectx.p1()[fname]
887 887 pchange = phabchange(currentPath=fname, oldPath=fname)
888 888 filemode = gitmode[fctx.flags()]
889 889 originalmode = gitmode[oldfctx.flags()]
890 890 if filemode != originalmode:
891 891 pchange.addoldmode(originalmode)
892 892 pchange.addnewmode(filemode)
893 893
894 894 if (
895 895 fctx.isbinary()
896 896 or notutf8(fctx)
897 897 or oldfctx.isbinary()
898 898 or notutf8(oldfctx)
899 899 ):
900 900 makebinary(pchange, fctx)
901 901 addoldbinary(pchange, oldfctx, fctx)
902 902 else:
903 903 maketext(pchange, basectx, ctx, fname)
904 904
905 905 pdiff.addchange(pchange)
906 906
907 907
908 908 def addadded(pdiff, basectx, ctx, added, removed):
909 909 """add file adds to the phabdiff, both new files and copies/moves"""
910 910 # Keep track of files that've been recorded as moved/copied, so if there are
911 911 # additional copies we can mark them (moves get removed from removed)
912 912 copiedchanges = {}
913 913 movedchanges = {}
914 914
915 915 copy = {}
916 916 if basectx != ctx:
917 917 copy = copies.pathcopies(basectx.p1(), ctx)
918 918
919 919 for fname in added:
920 920 fctx = ctx[fname]
921 921 oldfctx = None
922 922 pchange = phabchange(currentPath=fname)
923 923
924 924 filemode = gitmode[fctx.flags()]
925 925
926 926 if copy:
927 927 originalfname = copy.get(fname, fname)
928 928 else:
929 929 originalfname = fname
930 930 if fctx.renamed():
931 931 originalfname = fctx.renamed()[0]
932 932
933 933 renamed = fname != originalfname
934 934
935 935 if renamed:
936 936 oldfctx = basectx.p1()[originalfname]
937 937 originalmode = gitmode[oldfctx.flags()]
938 938 pchange.oldPath = originalfname
939 939
940 940 if originalfname in removed:
941 941 origpchange = phabchange(
942 942 currentPath=originalfname,
943 943 oldPath=originalfname,
944 944 type=DiffChangeType.MOVE_AWAY,
945 945 awayPaths=[fname],
946 946 )
947 947 movedchanges[originalfname] = origpchange
948 948 removed.remove(originalfname)
949 949 pchange.type = DiffChangeType.MOVE_HERE
950 950 elif originalfname in movedchanges:
951 951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
952 952 movedchanges[originalfname].awayPaths.append(fname)
953 953 pchange.type = DiffChangeType.COPY_HERE
954 954 else: # pure copy
955 955 if originalfname not in copiedchanges:
956 956 origpchange = phabchange(
957 957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
958 958 )
959 959 copiedchanges[originalfname] = origpchange
960 960 else:
961 961 origpchange = copiedchanges[originalfname]
962 962 origpchange.awayPaths.append(fname)
963 963 pchange.type = DiffChangeType.COPY_HERE
964 964
965 965 if filemode != originalmode:
966 966 pchange.addoldmode(originalmode)
967 967 pchange.addnewmode(filemode)
968 968 else: # Brand-new file
969 969 pchange.addnewmode(gitmode[fctx.flags()])
970 970 pchange.type = DiffChangeType.ADD
971 971
972 972 if (
973 973 fctx.isbinary()
974 974 or notutf8(fctx)
975 975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
976 976 ):
977 977 makebinary(pchange, fctx)
978 978 if renamed:
979 979 addoldbinary(pchange, oldfctx, fctx)
980 980 else:
981 981 maketext(pchange, basectx, ctx, fname)
982 982
983 983 pdiff.addchange(pchange)
984 984
985 985 for _path, copiedchange in copiedchanges.items():
986 986 pdiff.addchange(copiedchange)
987 987 for _path, movedchange in movedchanges.items():
988 988 pdiff.addchange(movedchange)
989 989
990 990
991 991 def creatediff(basectx, ctx):
992 992 """create a Differential Diff"""
993 993 repo = ctx.repo()
994 994 repophid = getrepophid(repo)
995 995 # Create a "Differential Diff" via "differential.creatediff" API
996 996 pdiff = phabdiff(
997 997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
998 998 branch=b'%s' % ctx.branch(),
999 999 )
1000 1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1001 1001 # addadded will remove moved files from removed, so addremoved won't get
1002 1002 # them
1003 1003 addadded(pdiff, basectx, ctx, added, removed)
1004 1004 addmodified(pdiff, basectx, ctx, modified)
1005 1005 addremoved(pdiff, basectx, ctx, removed)
1006 1006 if repophid:
1007 1007 pdiff.repositoryPHID = repophid
1008 1008 diff = callconduit(
1009 1009 repo.ui,
1010 1010 b'differential.creatediff',
1011 1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1012 1012 )
1013 1013 if not diff:
1014 1014 if basectx != ctx:
1015 1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1016 1016 else:
1017 1017 msg = _(b'cannot create diff for %s') % ctx
1018 1018 raise error.Abort(msg)
1019 1019 return diff
1020 1020
1021 1021
1022 1022 def writediffproperties(ctxs, diff):
1023 1023 """write metadata to diff so patches could be applied losslessly
1024 1024
1025 1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1026 1026 The list is generally a single commit, but may be several when using
1027 1027 ``phabsend --fold``.
1028 1028 """
1029 1029 # creatediff returns with a diffid but query returns with an id
1030 1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1031 1031 basectx = ctxs[0]
1032 1032 tipctx = ctxs[-1]
1033 1033
1034 1034 params = {
1035 1035 b'diff_id': diffid,
1036 1036 b'name': b'hg:meta',
1037 1037 b'data': templatefilters.json(
1038 1038 {
1039 1039 b'user': tipctx.user(),
1040 1040 b'date': b'%d %d' % tipctx.date(),
1041 1041 b'branch': tipctx.branch(),
1042 1042 b'node': tipctx.hex(),
1043 1043 b'parent': basectx.p1().hex(),
1044 1044 }
1045 1045 ),
1046 1046 }
1047 1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1048 1048
1049 1049 commits = {}
1050 1050 for ctx in ctxs:
1051 1051 commits[ctx.hex()] = {
1052 1052 b'author': stringutil.person(ctx.user()),
1053 1053 b'authorEmail': stringutil.email(ctx.user()),
1054 1054 b'time': int(ctx.date()[0]),
1055 1055 b'commit': ctx.hex(),
1056 1056 b'parents': [ctx.p1().hex()],
1057 1057 b'branch': ctx.branch(),
1058 1058 }
1059 1059 params = {
1060 1060 b'diff_id': diffid,
1061 1061 b'name': b'local:commits',
1062 1062 b'data': templatefilters.json(commits),
1063 1063 }
1064 1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1065 1065
1066 1066
1067 1067 def createdifferentialrevision(
1068 1068 ctxs,
1069 1069 revid=None,
1070 1070 parentrevphid=None,
1071 1071 oldbasenode=None,
1072 1072 oldnode=None,
1073 1073 olddiff=None,
1074 1074 actions=None,
1075 1075 comment=None,
1076 1076 ):
1077 1077 """create or update a Differential Revision
1078 1078
1079 1079 If revid is None, create a new Differential Revision, otherwise update
1080 1080 revid. If parentrevphid is not None, set it as a dependency.
1081 1081
1082 1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1083 1083 be a list of that single context. Otherwise, it is a list that covers the
1084 1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1085 1085 to include and ``ctxs[-1]`` is the last.
1086 1086
1087 1087 If oldnode is not None, check if the patch content (without commit message
1088 1088 and metadata) has changed before creating another diff. For a Revision with
1089 1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1090 1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1091 1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1092 1092 corresponds to ``ctxs[-1]``.
1093 1093
1094 1094 If actions is not None, they will be appended to the transaction.
1095 1095 """
1096 1096 ctx = ctxs[-1]
1097 1097 basectx = ctxs[0]
1098 1098
1099 1099 repo = ctx.repo()
1100 1100 if oldnode:
1101 1101 diffopts = mdiff.diffopts(git=True, context=32767)
1102 1102 unfi = repo.unfiltered()
1103 1103 oldctx = unfi[oldnode]
1104 1104 oldbasectx = unfi[oldbasenode]
1105 1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1106 1106 oldbasectx, oldctx, diffopts
1107 1107 )
1108 1108 else:
1109 1109 neednewdiff = True
1110 1110
1111 1111 transactions = []
1112 1112 if neednewdiff:
1113 1113 diff = creatediff(basectx, ctx)
1114 1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1115 1115 if comment:
1116 1116 transactions.append({b'type': b'comment', b'value': comment})
1117 1117 else:
1118 1118 # Even if we don't need to upload a new diff because the patch content
1119 1119 # does not change. We might still need to update its metadata so
1120 1120 # pushers could know the correct node metadata.
1121 1121 assert olddiff
1122 1122 diff = olddiff
1123 1123 writediffproperties(ctxs, diff)
1124 1124
1125 1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1126 1126 if parentrevphid:
1127 1127 transactions.append(
1128 1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1129 1129 )
1130 1130
1131 1131 if actions:
1132 1132 transactions += actions
1133 1133
1134 1134 # When folding multiple local commits into a single review, arcanist will
1135 1135 # take the summary line of the first commit as the title, and then
1136 1136 # concatenate the rest of the remaining messages (including each of their
1137 1137 # first lines) to the rest of the first commit message (each separated by
1138 1138 # an empty line), and use that as the summary field. Do the same here.
1139 1139 # For commits with only a one line message, there is no summary field, as
1140 1140 # this gets assigned to the title.
1141 1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1142 1142
1143 1143 for i, _ctx in enumerate(ctxs):
1144 1144 # Parse commit message and update related fields.
1145 1145 desc = _ctx.description()
1146 1146 info = callconduit(
1147 1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1148 1148 )
1149 1149
1150 1150 for k in [b'title', b'summary', b'testPlan']:
1151 1151 v = info[b'fields'].get(k)
1152 1152 if not v:
1153 1153 continue
1154 1154
1155 1155 if i == 0:
1156 1156 # Title, summary and test plan (if present) are taken verbatim
1157 1157 # for the first commit.
1158 1158 fields[k] = v.rstrip()
1159 1159 continue
1160 1160 elif k == b'title':
1161 1161 # Add subsequent titles (i.e. the first line of the commit
1162 1162 # message) back to the summary.
1163 1163 k = b'summary'
1164 1164
1165 1165 # Append any current field to the existing composite field
1166 1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1167 1167
1168 1168 for k, v in fields.items():
1169 1169 transactions.append({b'type': k, b'value': v})
1170 1170
1171 1171 params = {b'transactions': transactions}
1172 1172 if revid is not None:
1173 1173 # Update an existing Differential Revision
1174 1174 params[b'objectIdentifier'] = revid
1175 1175
1176 1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1177 1177 if not revision:
1178 1178 if len(ctxs) == 1:
1179 1179 msg = _(b'cannot create revision for %s') % ctx
1180 1180 else:
1181 1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1182 1182 raise error.Abort(msg)
1183 1183
1184 1184 return revision, diff
1185 1185
1186 1186
1187 1187 def userphids(ui, names):
1188 1188 """convert user names to PHIDs"""
1189 1189 names = [name.lower() for name in names]
1190 1190 query = {b'constraints': {b'usernames': names}}
1191 1191 result = callconduit(ui, b'user.search', query)
1192 1192 # username not found is not an error of the API. So check if we have missed
1193 1193 # some names here.
1194 1194 data = result[b'data']
1195 1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1196 1196 unresolved = set(names) - resolved
1197 1197 if unresolved:
1198 1198 raise error.Abort(
1199 1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1200 1200 )
1201 1201 return [entry[b'phid'] for entry in data]
1202 1202
1203 1203
1204 1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1205 1205 """print the ``action`` that occurred when posting ``ctx`` for review
1206 1206
1207 1207 This is a utility function for the sending phase of ``phabsend``, which
1208 1208 makes it easier to show a status for all local commits with `--fold``.
1209 1209 """
1210 1210 actiondesc = ui.label(
1211 1211 {
1212 1212 b'created': _(b'created'),
1213 1213 b'skipped': _(b'skipped'),
1214 1214 b'updated': _(b'updated'),
1215 1215 }[action],
1216 1216 b'phabricator.action.%s' % action,
1217 1217 )
1218 1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1222 1222
1223 1223
1224 1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1225 1225 """update the local commit list for the ``diff`` associated with ``drevid``
1226 1226
1227 1227 This is a utility function for the amend phase of ``phabsend``, which
1228 1228 converts failures to warning messages.
1229 1229 """
1230 1230 _debug(
1231 1231 unfi.ui,
1232 1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1233 1233 )
1234 1234
1235 1235 try:
1236 1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1237 1237 except util.urlerr.urlerror:
1238 1238 # If it fails just warn and keep going, otherwise the DREV
1239 1239 # associations will be lost
1240 1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1241 1241
1242 1242
1243 1243 @vcrcommand(
1244 1244 b'phabsend',
1245 1245 [
1246 1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1247 1247 (b'', b'amend', True, _(b'update commit messages')),
1248 1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1249 1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1250 1250 (
1251 1251 b'm',
1252 1252 b'comment',
1253 1253 b'',
1254 1254 _(b'add a comment to Revisions with new/updated Diffs'),
1255 1255 ),
1256 1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1257 1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1258 1258 ],
1259 1259 _(b'REV [OPTIONS]'),
1260 1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1261 1261 )
1262 1262 def phabsend(ui, repo, *revs, **opts):
1263 1263 """upload changesets to Phabricator
1264 1264
1265 1265 If there are multiple revisions specified, they will be send as a stack
1266 1266 with a linear dependencies relationship using the order specified by the
1267 1267 revset.
1268 1268
1269 1269 For the first time uploading changesets, local tags will be created to
1270 1270 maintain the association. After the first time, phabsend will check
1271 1271 obsstore and tags information so it can figure out whether to update an
1272 1272 existing Differential Revision, or create a new one.
1273 1273
1274 1274 If --amend is set, update commit messages so they have the
1275 1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1276 1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1277 1277 use local tags to record the ``Differential Revision`` association.
1278 1278
1279 1279 The --confirm option lets you confirm changesets before sending them. You
1280 1280 can also add following to your configuration file to make it default
1281 1281 behaviour::
1282 1282
1283 1283 [phabsend]
1284 1284 confirm = true
1285 1285
1286 1286 By default, a separate review will be created for each commit that is
1287 1287 selected, and will have the same parent/child relationship in Phabricator.
1288 1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1289 1289 as if diffed from the parent of the first revision to the last. The commit
1290 1290 messages are concatenated in the summary field on Phabricator.
1291 1291
1292 1292 phabsend will check obsstore and the above association to decide whether to
1293 1293 update an existing Differential Revision, or create a new one.
1294 1294 """
1295 1295 opts = pycompat.byteskwargs(opts)
1296 1296 revs = list(revs) + opts.get(b'rev', [])
1297 1297 revs = scmutil.revrange(repo, revs)
1298 1298 revs.sort() # ascending order to preserve topological parent/child in phab
1299 1299
1300 1300 if not revs:
1301 1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1302 1302 if opts.get(b'amend'):
1303 1303 cmdutil.checkunfinished(repo)
1304 1304
1305 1305 ctxs = [repo[rev] for rev in revs]
1306 1306
1307 1307 if any(c for c in ctxs if c.obsolete()):
1308 1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1309 1309
1310 1310 # Ensure the local commits are an unbroken range. The semantics of the
1311 1311 # --fold option implies this, and the auto restacking of orphans requires
1312 1312 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1313 1313 # get A' as a parent.
1314 1314 def _fail_nonlinear_revs(revs, skiprev, revtype):
1315 1315 badnodes = [repo[r].node() for r in revs if r != skiprev]
1316 1316 raise error.Abort(
1317 1317 _(b"cannot phabsend multiple %s revisions: %s")
1318 1318 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1319 1319 hint=_(b"the revisions must form a linear chain"),
1320 1320 )
1321 1321
1322 1322 heads = repo.revs(b'heads(%ld)', revs)
1323 1323 if len(heads) > 1:
1324 1324 _fail_nonlinear_revs(heads, heads.max(), b"head")
1325 1325
1326 1326 roots = repo.revs(b'roots(%ld)', revs)
1327 1327 if len(roots) > 1:
1328 1328 _fail_nonlinear_revs(roots, roots.min(), b"root")
1329 1329
1330 1330 fold = opts.get(b'fold')
1331 1331 if fold:
1332 1332 if len(revs) == 1:
1333 1333 # TODO: just switch to --no-fold instead?
1334 1334 raise error.Abort(_(b"cannot fold a single revision"))
1335 1335
1336 1336 # There's no clear way to manage multiple commits with a Dxxx tag, so
1337 1337 # require the amend option. (We could append "_nnn", but then it
1338 1338 # becomes jumbled if earlier commits are added to an update.) It should
1339 1339 # lock the repo and ensure that the range is editable, but that would
1340 1340 # make the code pretty convoluted. The default behavior of `arc` is to
1341 1341 # create a new review anyway.
1342 1342 if not opts.get(b"amend"):
1343 1343 raise error.Abort(_(b"cannot fold with --no-amend"))
1344 1344
1345 1345 # It might be possible to bucketize the revisions by the DREV value, and
1346 1346 # iterate over those groups when posting, and then again when amending.
1347 1347 # But for simplicity, require all selected revisions to be for the same
1348 1348 # DREV (if present). Adding local revisions to an existing DREV is
1349 1349 # acceptable.
1350 1350 drevmatchers = [
1351 1351 _differentialrevisiondescre.search(ctx.description())
1352 1352 for ctx in ctxs
1353 1353 ]
1354 1354 if len({m.group('url') for m in drevmatchers if m}) > 1:
1355 1355 raise error.Abort(
1356 1356 _(b"cannot fold revisions with different DREV values")
1357 1357 )
1358 1358
1359 1359 # {newnode: (oldnode, olddiff, olddrev}
1360 1360 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1361 1361
1362 1362 confirm = ui.configbool(b'phabsend', b'confirm')
1363 1363 confirm |= bool(opts.get(b'confirm'))
1364 1364 if confirm:
1365 1365 confirmed = _confirmbeforesend(repo, revs, oldmap)
1366 1366 if not confirmed:
1367 1367 raise error.Abort(_(b'phabsend cancelled'))
1368 1368
1369 1369 actions = []
1370 1370 reviewers = opts.get(b'reviewer', [])
1371 1371 blockers = opts.get(b'blocker', [])
1372 1372 phids = []
1373 1373 if reviewers:
1374 1374 phids.extend(userphids(repo.ui, reviewers))
1375 1375 if blockers:
1376 1376 phids.extend(
1377 1377 map(
1378 1378 lambda phid: b'blocking(%s)' % phid,
1379 1379 userphids(repo.ui, blockers),
1380 1380 )
1381 1381 )
1382 1382 if phids:
1383 1383 actions.append({b'type': b'reviewers.add', b'value': phids})
1384 1384
1385 1385 drevids = [] # [int]
1386 1386 diffmap = {} # {newnode: diff}
1387 1387
1388 1388 # Send patches one by one so we know their Differential Revision PHIDs and
1389 1389 # can provide dependency relationship
1390 1390 lastrevphid = None
1391 1391 for ctx in ctxs:
1392 1392 if fold:
1393 1393 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1394 1394 else:
1395 1395 ui.debug(b'sending rev %d\n' % ctx.rev())
1396 1396
1397 1397 # Get Differential Revision ID
1398 1398 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1399 1399 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1400 1400
1401 1401 if fold:
1402 1402 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1403 1403 ctxs[-1].node(), (None, None, None)
1404 1404 )
1405 1405
1406 1406 if oldnode != ctx.node() or opts.get(b'amend'):
1407 1407 # Create or update Differential Revision
1408 1408 revision, diff = createdifferentialrevision(
1409 1409 ctxs if fold else [ctx],
1410 1410 revid,
1411 1411 lastrevphid,
1412 1412 oldbasenode,
1413 1413 oldnode,
1414 1414 olddiff,
1415 1415 actions,
1416 1416 opts.get(b'comment'),
1417 1417 )
1418 1418
1419 1419 if fold:
1420 1420 for ctx in ctxs:
1421 1421 diffmap[ctx.node()] = diff
1422 1422 else:
1423 1423 diffmap[ctx.node()] = diff
1424 1424
1425 1425 newrevid = int(revision[b'object'][b'id'])
1426 1426 newrevphid = revision[b'object'][b'phid']
1427 1427 if revid:
1428 1428 action = b'updated'
1429 1429 else:
1430 1430 action = b'created'
1431 1431
1432 1432 # Create a local tag to note the association, if commit message
1433 1433 # does not have it already
1434 1434 if not fold:
1435 1435 m = _differentialrevisiondescre.search(ctx.description())
1436 1436 if not m or int(m.group('id')) != newrevid:
1437 1437 tagname = b'D%d' % newrevid
1438 1438 tags.tag(
1439 1439 repo,
1440 1440 tagname,
1441 1441 ctx.node(),
1442 1442 message=None,
1443 1443 user=None,
1444 1444 date=None,
1445 1445 local=True,
1446 1446 )
1447 1447 else:
1448 1448 # Nothing changed. But still set "newrevphid" so the next revision
1449 1449 # could depend on this one and "newrevid" for the summary line.
1450 1450 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1451 1451 newrevid = revid
1452 1452 action = b'skipped'
1453 1453
1454 1454 drevids.append(newrevid)
1455 1455 lastrevphid = newrevphid
1456 1456
1457 1457 if fold:
1458 1458 for c in ctxs:
1459 1459 if oldmap.get(c.node(), (None, None, None))[2]:
1460 1460 action = b'updated'
1461 1461 else:
1462 1462 action = b'created'
1463 1463 _print_phabsend_action(ui, c, newrevid, action)
1464 1464 break
1465 1465
1466 1466 _print_phabsend_action(ui, ctx, newrevid, action)
1467 1467
1468 1468 # Update commit messages and remove tags
1469 1469 if opts.get(b'amend'):
1470 1470 unfi = repo.unfiltered()
1471 1471 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1472 1472 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1473 1473 # Eagerly evaluate commits to restabilize before creating new
1474 1474 # commits. The selected revisions are excluded because they are
1475 1475 # automatically restacked as part of the submission process.
1476 1476 restack = [
1477 1477 c
1478 1478 for c in repo.set(
1479 1479 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1480 1480 revs,
1481 1481 revs,
1482 1482 )
1483 1483 ]
1484 1484 wnode = unfi[b'.'].node()
1485 1485 mapping = {} # {oldnode: [newnode]}
1486 1486 newnodes = []
1487 1487
1488 1488 drevid = drevids[0]
1489 1489
1490 1490 for i, rev in enumerate(revs):
1491 1491 old = unfi[rev]
1492 1492 if not fold:
1493 1493 drevid = drevids[i]
1494 1494 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1495 1495
1496 1496 newdesc = get_amended_desc(drev, old, fold)
1497 1497 # Make sure commit message contain "Differential Revision"
1498 1498 if (
1499 1499 old.description() != newdesc
1500 1500 or old.p1().node() in mapping
1501 1501 or old.p2().node() in mapping
1502 1502 ):
1503 1503 if old.phase() == phases.public:
1504 1504 ui.warn(
1505 1505 _(b"warning: not updating public commit %s\n")
1506 1506 % scmutil.formatchangeid(old)
1507 1507 )
1508 1508 continue
1509 1509 parents = [
1510 1510 mapping.get(old.p1().node(), (old.p1(),))[0],
1511 1511 mapping.get(old.p2().node(), (old.p2(),))[0],
1512 1512 ]
1513 1513 new = context.metadataonlyctx(
1514 1514 repo,
1515 1515 old,
1516 1516 parents=parents,
1517 1517 text=newdesc,
1518 1518 user=old.user(),
1519 1519 date=old.date(),
1520 1520 extra=old.extra(),
1521 1521 )
1522 1522
1523 1523 newnode = new.commit()
1524 1524
1525 1525 mapping[old.node()] = [newnode]
1526 1526
1527 1527 if fold:
1528 1528 # Defer updating the (single) Diff until all nodes are
1529 1529 # collected. No tags were created, so none need to be
1530 1530 # removed.
1531 1531 newnodes.append(newnode)
1532 1532 continue
1533 1533
1534 1534 _amend_diff_properties(
1535 1535 unfi, drevid, [newnode], diffmap[old.node()]
1536 1536 )
1537 1537
1538 1538 # Remove local tags since it's no longer necessary
1539 1539 tagname = b'D%d' % drevid
1540 1540 if tagname in repo.tags():
1541 1541 tags.tag(
1542 1542 repo,
1543 1543 tagname,
1544 1544 nullid,
1545 1545 message=None,
1546 1546 user=None,
1547 1547 date=None,
1548 1548 local=True,
1549 1549 )
1550 1550 elif fold:
1551 1551 # When folding multiple commits into one review with
1552 1552 # --fold, track even the commits that weren't amended, so
1553 1553 # that their association isn't lost if the properties are
1554 1554 # rewritten below.
1555 1555 newnodes.append(old.node())
1556 1556
1557 1557 # If the submitted commits are public, no amend takes place so
1558 1558 # there are no newnodes and therefore no diff update to do.
1559 1559 if fold and newnodes:
1560 1560 diff = diffmap[old.node()]
1561 1561
1562 1562 # The diff object in diffmap doesn't have the local commits
1563 1563 # because that could be returned from differential.creatediff,
1564 1564 # not differential.querydiffs. So use the queried diff (if
1565 1565 # present), or force the amend (a new revision is being posted.)
1566 1566 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1567 1567 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1568 1568 _amend_diff_properties(unfi, drevid, newnodes, diff)
1569 1569 else:
1570 1570 _debug(
1571 1571 ui,
1572 1572 b"local commit list for D%d is already up-to-date\n"
1573 1573 % drevid,
1574 1574 )
1575 1575 elif fold:
1576 1576 _debug(ui, b"no newnodes to update\n")
1577 1577
1578 1578 # Restack any children of first-time submissions that were orphaned
1579 1579 # in the process. The ctx won't report that it is an orphan until
1580 1580 # the cleanup takes place below.
1581 1581 for old in restack:
1582 1582 parents = [
1583 1583 mapping.get(old.p1().node(), (old.p1(),))[0],
1584 1584 mapping.get(old.p2().node(), (old.p2(),))[0],
1585 1585 ]
1586 1586 new = context.metadataonlyctx(
1587 1587 repo,
1588 1588 old,
1589 1589 parents=parents,
1590 1590 text=old.description(),
1591 1591 user=old.user(),
1592 1592 date=old.date(),
1593 1593 extra=old.extra(),
1594 1594 )
1595 1595
1596 1596 newnode = new.commit()
1597 1597
1598 1598 # Don't obsolete unselected descendants of nodes that have not
1599 1599 # been changed in this transaction- that results in an error.
1600 1600 if newnode != old.node():
1601 1601 mapping[old.node()] = [newnode]
1602 1602 _debug(
1603 1603 ui,
1604 1604 b"restabilizing %s as %s\n"
1605 1605 % (short(old.node()), short(newnode)),
1606 1606 )
1607 1607 else:
1608 1608 _debug(
1609 1609 ui,
1610 1610 b"not restabilizing unchanged %s\n" % short(old.node()),
1611 1611 )
1612 1612
1613 1613 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1614 1614 if wnode in mapping:
1615 1615 unfi.setparents(mapping[wnode][0])
1616 1616
1617 1617
1618 1618 # Map from "hg:meta" keys to header understood by "hg import". The order is
1619 1619 # consistent with "hg export" output.
1620 1620 _metanamemap = util.sortdict(
1621 1621 [
1622 1622 (b'user', b'User'),
1623 1623 (b'date', b'Date'),
1624 1624 (b'branch', b'Branch'),
1625 1625 (b'node', b'Node ID'),
1626 1626 (b'parent', b'Parent '),
1627 1627 ]
1628 1628 )
1629 1629
1630 1630
1631 1631 def _confirmbeforesend(repo, revs, oldmap):
1632 1632 url, token = readurltoken(repo.ui)
1633 1633 ui = repo.ui
1634 1634 for rev in revs:
1635 1635 ctx = repo[rev]
1636 1636 desc = ctx.description().splitlines()[0]
1637 1637 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1638 1638 if drevid:
1639 1639 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1640 1640 else:
1641 1641 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1642 1642
1643 1643 ui.write(
1644 1644 _(b'%s - %s: %s\n')
1645 1645 % (
1646 1646 drevdesc,
1647 1647 ui.label(bytes(ctx), b'phabricator.node'),
1648 1648 ui.label(desc, b'phabricator.desc'),
1649 1649 )
1650 1650 )
1651 1651
1652 1652 if ui.promptchoice(
1653 1653 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1654 1654 ):
1655 1655 return False
1656 1656
1657 1657 return True
1658 1658
1659 1659
1660 1660 _knownstatusnames = {
1661 1661 b'accepted',
1662 1662 b'needsreview',
1663 1663 b'needsrevision',
1664 1664 b'closed',
1665 1665 b'abandoned',
1666 1666 b'changesplanned',
1667 1667 }
1668 1668
1669 1669
1670 1670 def _getstatusname(drev):
1671 1671 """get normalized status name from a Differential Revision"""
1672 1672 return drev[b'statusName'].replace(b' ', b'').lower()
1673 1673
1674 1674
1675 1675 # Small language to specify differential revisions. Support symbols: (), :X,
1676 1676 # +, and -.
1677 1677
1678 1678 _elements = {
1679 1679 # token-type: binding-strength, primary, prefix, infix, suffix
1680 1680 b'(': (12, None, (b'group', 1, b')'), None, None),
1681 1681 b':': (8, None, (b'ancestors', 8), None, None),
1682 1682 b'&': (5, None, None, (b'and_', 5), None),
1683 1683 b'+': (4, None, None, (b'add', 4), None),
1684 1684 b'-': (4, None, None, (b'sub', 4), None),
1685 1685 b')': (0, None, None, None, None),
1686 1686 b'symbol': (0, b'symbol', None, None, None),
1687 1687 b'end': (0, None, None, None, None),
1688 1688 }
1689 1689
1690 1690
1691 1691 def _tokenize(text):
1692 1692 view = memoryview(text) # zero-copy slice
1693 1693 special = b'():+-& '
1694 1694 pos = 0
1695 1695 length = len(text)
1696 1696 while pos < length:
1697 1697 symbol = b''.join(
1698 1698 itertools.takewhile(
1699 1699 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1700 1700 )
1701 1701 )
1702 1702 if symbol:
1703 1703 yield (b'symbol', symbol, pos)
1704 1704 pos += len(symbol)
1705 1705 else: # special char, ignore space
1706 1706 if text[pos : pos + 1] != b' ':
1707 1707 yield (text[pos : pos + 1], None, pos)
1708 1708 pos += 1
1709 1709 yield (b'end', None, pos)
1710 1710
1711 1711
1712 1712 def _parse(text):
1713 1713 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1714 1714 if pos != len(text):
1715 1715 raise error.ParseError(b'invalid token', pos)
1716 1716 return tree
1717 1717
1718 1718
1719 1719 def _parsedrev(symbol):
1720 1720 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1721 1721 if symbol.startswith(b'D') and symbol[1:].isdigit():
1722 1722 return int(symbol[1:])
1723 1723 if symbol.isdigit():
1724 1724 return int(symbol)
1725 1725
1726 1726
1727 1727 def _prefetchdrevs(tree):
1728 1728 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1729 1729 drevs = set()
1730 1730 ancestordrevs = set()
1731 1731 op = tree[0]
1732 1732 if op == b'symbol':
1733 1733 r = _parsedrev(tree[1])
1734 1734 if r:
1735 1735 drevs.add(r)
1736 1736 elif op == b'ancestors':
1737 1737 r, a = _prefetchdrevs(tree[1])
1738 1738 drevs.update(r)
1739 1739 ancestordrevs.update(r)
1740 1740 ancestordrevs.update(a)
1741 1741 else:
1742 1742 for t in tree[1:]:
1743 1743 r, a = _prefetchdrevs(t)
1744 1744 drevs.update(r)
1745 1745 ancestordrevs.update(a)
1746 1746 return drevs, ancestordrevs
1747 1747
1748 1748
1749 1749 def querydrev(ui, spec):
1750 1750 """return a list of "Differential Revision" dicts
1751 1751
1752 1752 spec is a string using a simple query language, see docstring in phabread
1753 1753 for details.
1754 1754
1755 1755 A "Differential Revision dict" looks like:
1756 1756
1757 1757 {
1758 1758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1759 1759 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1760 1760 "auxiliary": {
1761 1761 "phabricator:depends-on": [
1762 1762 "PHID-DREV-gbapp366kutjebt7agcd"
1763 1763 ]
1764 1764 "phabricator:projects": [],
1765 1765 },
1766 1766 "branch": "default",
1767 1767 "ccs": [],
1768 1768 "commits": [],
1769 1769 "dateCreated": "1499181406",
1770 1770 "dateModified": "1499182103",
1771 1771 "diffs": [
1772 1772 "3",
1773 1773 "4",
1774 1774 ],
1775 1775 "hashes": [],
1776 1776 "id": "2",
1777 1777 "lineCount": "2",
1778 1778 "phid": "PHID-DREV-672qvysjcczopag46qty",
1779 1779 "properties": {},
1780 1780 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1781 1781 "reviewers": [],
1782 1782 "sourcePath": null
1783 1783 "status": "0",
1784 1784 "statusName": "Needs Review",
1785 1785 "summary": "",
1786 1786 "testPlan": "",
1787 1787 "title": "example",
1788 1788 "uri": "https://phab.example.com/D2",
1789 1789 }
1790 1790 """
1791 1791 # TODO: replace differential.query and differential.querydiffs with
1792 1792 # differential.diff.search because the former (and their output) are
1793 1793 # frozen, and planned to be deprecated and removed.
1794 1794
1795 1795 def fetch(params):
1796 1796 """params -> single drev or None"""
1797 1797 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1798 1798 if key in prefetched:
1799 1799 return prefetched[key]
1800 1800 drevs = callconduit(ui, b'differential.query', params)
1801 1801 # Fill prefetched with the result
1802 1802 for drev in drevs:
1803 1803 prefetched[drev[b'phid']] = drev
1804 1804 prefetched[int(drev[b'id'])] = drev
1805 1805 if key not in prefetched:
1806 1806 raise error.Abort(
1807 1807 _(b'cannot get Differential Revision %r') % params
1808 1808 )
1809 1809 return prefetched[key]
1810 1810
1811 1811 def getstack(topdrevids):
1812 1812 """given a top, get a stack from the bottom, [id] -> [id]"""
1813 1813 visited = set()
1814 1814 result = []
1815 1815 queue = [{b'ids': [i]} for i in topdrevids]
1816 1816 while queue:
1817 1817 params = queue.pop()
1818 1818 drev = fetch(params)
1819 1819 if drev[b'id'] in visited:
1820 1820 continue
1821 1821 visited.add(drev[b'id'])
1822 1822 result.append(int(drev[b'id']))
1823 1823 auxiliary = drev.get(b'auxiliary', {})
1824 1824 depends = auxiliary.get(b'phabricator:depends-on', [])
1825 1825 for phid in depends:
1826 1826 queue.append({b'phids': [phid]})
1827 1827 result.reverse()
1828 1828 return smartset.baseset(result)
1829 1829
1830 1830 # Initialize prefetch cache
1831 1831 prefetched = {} # {id or phid: drev}
1832 1832
1833 1833 tree = _parse(spec)
1834 1834 drevs, ancestordrevs = _prefetchdrevs(tree)
1835 1835
1836 1836 # developer config: phabricator.batchsize
1837 1837 batchsize = ui.configint(b'phabricator', b'batchsize')
1838 1838
1839 1839 # Prefetch Differential Revisions in batch
1840 1840 tofetch = set(drevs)
1841 1841 for r in ancestordrevs:
1842 1842 tofetch.update(range(max(1, r - batchsize), r + 1))
1843 1843 if drevs:
1844 1844 fetch({b'ids': list(tofetch)})
1845 1845 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1846 1846
1847 1847 # Walk through the tree, return smartsets
1848 1848 def walk(tree):
1849 1849 op = tree[0]
1850 1850 if op == b'symbol':
1851 1851 drev = _parsedrev(tree[1])
1852 1852 if drev:
1853 1853 return smartset.baseset([drev])
1854 1854 elif tree[1] in _knownstatusnames:
1855 1855 drevs = [
1856 1856 r
1857 1857 for r in validids
1858 1858 if _getstatusname(prefetched[r]) == tree[1]
1859 1859 ]
1860 1860 return smartset.baseset(drevs)
1861 1861 else:
1862 1862 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1863 1863 elif op in {b'and_', b'add', b'sub'}:
1864 1864 assert len(tree) == 3
1865 1865 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1866 1866 elif op == b'group':
1867 1867 return walk(tree[1])
1868 1868 elif op == b'ancestors':
1869 1869 return getstack(walk(tree[1]))
1870 1870 else:
1871 1871 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1872 1872
1873 1873 return [prefetched[r] for r in walk(tree)]
1874 1874
1875 1875
1876 1876 def getdescfromdrev(drev):
1877 1877 """get description (commit message) from "Differential Revision"
1878 1878
1879 1879 This is similar to differential.getcommitmessage API. But we only care
1880 1880 about limited fields: title, summary, test plan, and URL.
1881 1881 """
1882 1882 title = drev[b'title']
1883 1883 summary = drev[b'summary'].rstrip()
1884 1884 testplan = drev[b'testPlan'].rstrip()
1885 1885 if testplan:
1886 1886 testplan = b'Test Plan:\n%s' % testplan
1887 1887 uri = b'Differential Revision: %s' % drev[b'uri']
1888 1888 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1889 1889
1890 1890
1891 1891 def get_amended_desc(drev, ctx, folded):
1892 1892 """similar to ``getdescfromdrev``, but supports a folded series of commits
1893 1893
1894 1894 This is used when determining if an individual commit needs to have its
1895 1895 message amended after posting it for review. The determination is made for
1896 1896 each individual commit, even when they were folded into one review.
1897 1897 """
1898 1898 if not folded:
1899 1899 return getdescfromdrev(drev)
1900 1900
1901 1901 uri = b'Differential Revision: %s' % drev[b'uri']
1902 1902
1903 1903 # Since the commit messages were combined when posting multiple commits
1904 1904 # with --fold, the fields can't be read from Phabricator here, or *all*
1905 1905 # affected local revisions will end up with the same commit message after
1906 1906 # the URI is amended in. Append in the DREV line, or update it if it
1907 1907 # exists. At worst, this means commit message or test plan updates on
1908 1908 # Phabricator aren't propagated back to the repository, but that seems
1909 1909 # reasonable for the case where local commits are effectively combined
1910 1910 # in Phabricator.
1911 1911 m = _differentialrevisiondescre.search(ctx.description())
1912 1912 if not m:
1913 1913 return b'\n\n'.join([ctx.description(), uri])
1914 1914
1915 1915 return _differentialrevisiondescre.sub(uri, ctx.description())
1916 1916
1917 1917
1918 1918 def getlocalcommits(diff):
1919 1919 """get the set of local commits from a diff object
1920 1920
1921 1921 See ``getdiffmeta()`` for an example diff object.
1922 1922 """
1923 1923 props = diff.get(b'properties') or {}
1924 1924 commits = props.get(b'local:commits') or {}
1925 1925 if len(commits) > 1:
1926 1926 return {bin(c) for c in commits.keys()}
1927 1927
1928 1928 # Storing the diff metadata predates storing `local:commits`, so continue
1929 1929 # to use that in the --no-fold case.
1930 1930 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1931 1931
1932 1932
1933 1933 def getdiffmeta(diff):
1934 1934 """get commit metadata (date, node, user, p1) from a diff object
1935 1935
1936 1936 The metadata could be "hg:meta", sent by phabsend, like:
1937 1937
1938 1938 "properties": {
1939 1939 "hg:meta": {
1940 1940 "branch": "default",
1941 1941 "date": "1499571514 25200",
1942 1942 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1943 1943 "user": "Foo Bar <foo@example.com>",
1944 1944 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1945 1945 }
1946 1946 }
1947 1947
1948 1948 Or converted from "local:commits", sent by "arc", like:
1949 1949
1950 1950 "properties": {
1951 1951 "local:commits": {
1952 1952 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1953 1953 "author": "Foo Bar",
1954 1954 "authorEmail": "foo@example.com"
1955 1955 "branch": "default",
1956 1956 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1957 1957 "local": "1000",
1958 1958 "message": "...",
1959 1959 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1960 1960 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1961 1961 "summary": "...",
1962 1962 "tag": "",
1963 1963 "time": 1499546314,
1964 1964 }
1965 1965 }
1966 1966 }
1967 1967
1968 1968 Note: metadata extracted from "local:commits" will lose time zone
1969 1969 information.
1970 1970 """
1971 1971 props = diff.get(b'properties') or {}
1972 1972 meta = props.get(b'hg:meta')
1973 1973 if not meta:
1974 1974 if props.get(b'local:commits'):
1975 1975 commit = sorted(props[b'local:commits'].values())[0]
1976 1976 meta = {}
1977 1977 if b'author' in commit and b'authorEmail' in commit:
1978 1978 meta[b'user'] = b'%s <%s>' % (
1979 1979 commit[b'author'],
1980 1980 commit[b'authorEmail'],
1981 1981 )
1982 1982 if b'time' in commit:
1983 1983 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1984 1984 if b'branch' in commit:
1985 1985 meta[b'branch'] = commit[b'branch']
1986 1986 node = commit.get(b'commit', commit.get(b'rev'))
1987 1987 if node:
1988 1988 meta[b'node'] = node
1989 1989 if len(commit.get(b'parents', ())) >= 1:
1990 1990 meta[b'parent'] = commit[b'parents'][0]
1991 1991 else:
1992 1992 meta = {}
1993 1993 if b'date' not in meta and b'dateCreated' in diff:
1994 1994 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1995 1995 if b'branch' not in meta and diff.get(b'branch'):
1996 1996 meta[b'branch'] = diff[b'branch']
1997 1997 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1998 1998 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1999 1999 return meta
2000 2000
2001 2001
2002 2002 def _getdrevs(ui, stack, specs):
2003 2003 """convert user supplied DREVSPECs into "Differential Revision" dicts
2004 2004
2005 2005 See ``hg help phabread`` for how to specify each DREVSPEC.
2006 2006 """
2007 2007 if len(specs) > 0:
2008 2008
2009 2009 def _formatspec(s):
2010 2010 if stack:
2011 2011 s = b':(%s)' % s
2012 2012 return b'(%s)' % s
2013 2013
2014 2014 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2015 2015
2016 2016 drevs = querydrev(ui, spec)
2017 2017 if drevs:
2018 2018 return drevs
2019 2019
2020 2020 raise error.Abort(_(b"empty DREVSPEC set"))
2021 2021
2022 2022
2023 2023 def readpatch(ui, drevs, write):
2024 2024 """generate plain-text patch readable by 'hg import'
2025 2025
2026 2026 write takes a list of (DREV, bytes), where DREV is the differential number
2027 2027 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2028 2028 to be imported. drevs is what "querydrev" returns, results of
2029 2029 "differential.query".
2030 2030 """
2031 2031 # Prefetch hg:meta property for all diffs
2032 2032 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2033 2033 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2034 2034
2035 2035 patches = []
2036 2036
2037 2037 # Generate patch for each drev
2038 2038 for drev in drevs:
2039 2039 ui.note(_(b'reading D%s\n') % drev[b'id'])
2040 2040
2041 2041 diffid = max(int(v) for v in drev[b'diffs'])
2042 2042 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2043 2043 desc = getdescfromdrev(drev)
2044 2044 header = b'# HG changeset patch\n'
2045 2045
2046 2046 # Try to preserve metadata from hg:meta property. Write hg patch
2047 2047 # headers that can be read by the "import" command. See patchheadermap
2048 2048 # and extract in mercurial/patch.py for supported headers.
2049 2049 meta = getdiffmeta(diffs[b'%d' % diffid])
2050 2050 for k in _metanamemap.keys():
2051 2051 if k in meta:
2052 2052 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2053 2053
2054 2054 content = b'%s%s\n%s' % (header, desc, body)
2055 2055 patches.append((drev[b'id'], content))
2056 2056
2057 2057 # Write patches to the supplied callback
2058 2058 write(patches)
2059 2059
2060 2060
2061 2061 @vcrcommand(
2062 2062 b'phabread',
2063 2063 [(b'', b'stack', False, _(b'read dependencies'))],
2064 2064 _(b'DREVSPEC... [OPTIONS]'),
2065 2065 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2066 2066 optionalrepo=True,
2067 2067 )
2068 2068 def phabread(ui, repo, *specs, **opts):
2069 2069 """print patches from Phabricator suitable for importing
2070 2070
2071 2071 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2072 2072 the number ``123``. It could also have common operators like ``+``, ``-``,
2073 2073 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2074 2074 select a stack. If multiple DREVSPEC values are given, the result is the
2075 2075 union of each individually evaluated value. No attempt is currently made
2076 2076 to reorder the values to run from parent to child.
2077 2077
2078 2078 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2079 2079 could be used to filter patches by status. For performance reason, they
2080 2080 only represent a subset of non-status selections and cannot be used alone.
2081 2081
2082 2082 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2083 2083 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2084 2084 stack up to D9.
2085 2085
2086 2086 If --stack is given, follow dependencies information and read all patches.
2087 2087 It is equivalent to the ``:`` operator.
2088 2088 """
2089 2089 opts = pycompat.byteskwargs(opts)
2090 2090 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2091 2091
2092 2092 def _write(patches):
2093 2093 for drev, content in patches:
2094 2094 ui.write(content)
2095 2095
2096 2096 readpatch(ui, drevs, _write)
2097 2097
2098 2098
2099 2099 @vcrcommand(
2100 2100 b'phabimport',
2101 2101 [(b'', b'stack', False, _(b'import dependencies as well'))],
2102 2102 _(b'DREVSPEC... [OPTIONS]'),
2103 2103 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2104 2104 )
2105 2105 def phabimport(ui, repo, *specs, **opts):
2106 2106 """import patches from Phabricator for the specified Differential Revisions
2107 2107
2108 2108 The patches are read and applied starting at the parent of the working
2109 2109 directory.
2110 2110
2111 2111 See ``hg help phabread`` for how to specify DREVSPEC.
2112 2112 """
2113 2113 opts = pycompat.byteskwargs(opts)
2114 2114
2115 2115 # --bypass avoids losing exec and symlink bits when importing on Windows,
2116 2116 # and allows importing with a dirty wdir. It also aborts instead of leaving
2117 2117 # rejects.
2118 2118 opts[b'bypass'] = True
2119 2119
2120 2120 # Mandatory default values, synced with commands.import
2121 2121 opts[b'strip'] = 1
2122 2122 opts[b'prefix'] = b''
2123 2123 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2124 2124 opts[b'obsolete'] = False
2125 2125
2126 2126 if ui.configbool(b'phabimport', b'secret'):
2127 2127 opts[b'secret'] = True
2128 2128 if ui.configbool(b'phabimport', b'obsolete'):
2129 2129 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2130 2130
2131 2131 def _write(patches):
2132 2132 parents = repo[None].parents()
2133 2133
2134 2134 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2135 2135 for drev, contents in patches:
2136 2136 ui.status(_(b'applying patch from D%s\n') % drev)
2137 2137
2138 2138 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2139 2139 msg, node, rej = cmdutil.tryimportone(
2140 2140 ui,
2141 2141 repo,
2142 2142 patchdata,
2143 2143 parents,
2144 2144 opts,
2145 2145 [],
2146 2146 None, # Never update wdir to another revision
2147 2147 )
2148 2148
2149 2149 if not node:
2150 2150 raise error.Abort(_(b'D%s: no diffs found') % drev)
2151 2151
2152 2152 ui.note(msg + b'\n')
2153 2153 parents = [repo[node]]
2154 2154
2155 2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2156 2156
2157 2157 readpatch(repo.ui, drevs, _write)
2158 2158
2159 2159
2160 2160 @vcrcommand(
2161 2161 b'phabupdate',
2162 2162 [
2163 2163 (b'', b'accept', False, _(b'accept revisions')),
2164 2164 (b'', b'reject', False, _(b'reject revisions')),
2165 2165 (b'', b'request-review', False, _(b'request review on revisions')),
2166 2166 (b'', b'abandon', False, _(b'abandon revisions')),
2167 2167 (b'', b'reclaim', False, _(b'reclaim revisions')),
2168 2168 (b'', b'close', False, _(b'close revisions')),
2169 (b'', b'reopen', False, _(b'reopen revisions')),
2169 2170 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2170 2171 (b'm', b'comment', b'', _(b'comment on the last revision')),
2171 2172 ],
2172 2173 _(b'DREVSPEC... [OPTIONS]'),
2173 2174 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2174 2175 optionalrepo=True,
2175 2176 )
2176 2177 def phabupdate(ui, repo, *specs, **opts):
2177 2178 """update Differential Revision in batch
2178 2179
2179 2180 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2180 2181 """
2181 2182 opts = pycompat.byteskwargs(opts)
2182 2183 transactions = [
2183 2184 b'abandon',
2184 2185 b'accept',
2185 2186 b'close',
2186 2187 b'plan-changes',
2187 2188 b'reclaim',
2188 2189 b'reject',
2190 b'reopen',
2189 2191 b'request-review',
2190 2192 ]
2191 2193 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2192 2194 if len(flags) > 1:
2193 2195 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2194 2196
2195 2197 actions = []
2196 2198 for f in flags:
2197 2199 actions.append({b'type': f, b'value': True})
2198 2200
2199 2201 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2200 2202 for i, drev in enumerate(drevs):
2201 2203 if i + 1 == len(drevs) and opts.get(b'comment'):
2202 2204 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2203 2205 if actions:
2204 2206 params = {
2205 2207 b'objectIdentifier': drev[b'phid'],
2206 2208 b'transactions': actions,
2207 2209 }
2208 2210 callconduit(ui, b'differential.revision.edit', params)
2209 2211
2210 2212
2211 2213 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2212 2214 def template_review(context, mapping):
2213 2215 """:phabreview: Object describing the review for this changeset.
2214 2216 Has attributes `url` and `id`.
2215 2217 """
2216 2218 ctx = context.resource(mapping, b'ctx')
2217 2219 m = _differentialrevisiondescre.search(ctx.description())
2218 2220 if m:
2219 2221 return templateutil.hybriddict(
2220 2222 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2221 2223 )
2222 2224 else:
2223 2225 tags = ctx.repo().nodetags(ctx.node())
2224 2226 for t in tags:
2225 2227 if _differentialrevisiontagre.match(t):
2226 2228 url = ctx.repo().ui.config(b'phabricator', b'url')
2227 2229 if not url.endswith(b'/'):
2228 2230 url += b'/'
2229 2231 url += t
2230 2232
2231 2233 return templateutil.hybriddict({b'url': url, b'id': t,})
2232 2234 return None
2233 2235
2234 2236
2235 2237 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2236 2238 def template_status(context, mapping):
2237 2239 """:phabstatus: String. Status of Phabricator differential.
2238 2240 """
2239 2241 ctx = context.resource(mapping, b'ctx')
2240 2242 repo = context.resource(mapping, b'repo')
2241 2243 ui = context.resource(mapping, b'ui')
2242 2244
2243 2245 rev = ctx.rev()
2244 2246 try:
2245 2247 drevid = getdrevmap(repo, [rev])[rev]
2246 2248 except KeyError:
2247 2249 return None
2248 2250 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2249 2251 for drev in drevs:
2250 2252 if int(drev[b'id']) == drevid:
2251 2253 return templateutil.hybriddict(
2252 2254 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2253 2255 )
2254 2256 return None
2255 2257
2256 2258
2257 2259 @show.showview(b'phabstatus', csettopic=b'work')
2258 2260 def phabstatusshowview(ui, repo, displayer):
2259 2261 """Phabricator differiential status"""
2260 2262 revs = repo.revs('sort(_underway(), topo)')
2261 2263 drevmap = getdrevmap(repo, revs)
2262 2264 unknownrevs, drevids, revsbydrevid = [], set(), {}
2263 2265 for rev, drevid in pycompat.iteritems(drevmap):
2264 2266 if drevid is not None:
2265 2267 drevids.add(drevid)
2266 2268 revsbydrevid.setdefault(drevid, set()).add(rev)
2267 2269 else:
2268 2270 unknownrevs.append(rev)
2269 2271
2270 2272 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2271 2273 drevsbyrev = {}
2272 2274 for drev in drevs:
2273 2275 for rev in revsbydrevid[int(drev[b'id'])]:
2274 2276 drevsbyrev[rev] = drev
2275 2277
2276 2278 def phabstatus(ctx):
2277 2279 drev = drevsbyrev[ctx.rev()]
2278 2280 status = ui.label(
2279 2281 b'%(statusName)s' % drev,
2280 2282 b'phabricator.status.%s' % _getstatusname(drev),
2281 2283 )
2282 2284 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2283 2285
2284 2286 revs -= smartset.baseset(unknownrevs)
2285 2287 revdag = graphmod.dagwalker(repo, revs)
2286 2288
2287 2289 ui.setconfig(b'experimental', b'graphshorten', True)
2288 2290 displayer._exthook = phabstatus
2289 2291 nodelen = show.longestshortest(repo, revs)
2290 2292 logcmdutil.displaygraph(
2291 2293 ui,
2292 2294 repo,
2293 2295 revdag,
2294 2296 displayer,
2295 2297 graphmod.asciiedges,
2296 2298 props={b'nodelen': nodelen},
2297 2299 )
General Comments 0
You need to be logged in to leave comments. Login now