##// END OF EJS Templates
phabricator: fix loadhgrc() override broken by D8656...
Martin von Zweigbergk -
r46074:c7fe0dfb default
parent child Browse files
Show More
@@ -1,2312 +1,2312 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid, short
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 rewriteutil,
80 80 scmutil,
81 81 smartset,
82 82 tags,
83 83 templatefilters,
84 84 templateutil,
85 85 url as urlmod,
86 86 util,
87 87 )
88 88 from mercurial.utils import (
89 89 procutil,
90 90 stringutil,
91 91 )
92 92 from . import show
93 93
94 94
95 95 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
96 96 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
97 97 # be specifying the version(s) of Mercurial they are tested with, or
98 98 # leave the attribute unspecified.
99 99 testedwith = b'ships-with-hg-core'
100 100
101 101 eh = exthelper.exthelper()
102 102
103 103 cmdtable = eh.cmdtable
104 104 command = eh.command
105 105 configtable = eh.configtable
106 106 templatekeyword = eh.templatekeyword
107 107 uisetup = eh.finaluisetup
108 108
109 109 # developer config: phabricator.batchsize
110 110 eh.configitem(
111 111 b'phabricator', b'batchsize', default=12,
112 112 )
113 113 eh.configitem(
114 114 b'phabricator', b'callsign', default=None,
115 115 )
116 116 eh.configitem(
117 117 b'phabricator', b'curlcmd', default=None,
118 118 )
119 119 # developer config: phabricator.debug
120 120 eh.configitem(
121 121 b'phabricator', b'debug', default=False,
122 122 )
123 123 # developer config: phabricator.repophid
124 124 eh.configitem(
125 125 b'phabricator', b'repophid', default=None,
126 126 )
127 127 eh.configitem(
128 128 b'phabricator', b'url', default=None,
129 129 )
130 130 eh.configitem(
131 131 b'phabsend', b'confirm', default=False,
132 132 )
133 133 eh.configitem(
134 134 b'phabimport', b'secret', default=False,
135 135 )
136 136 eh.configitem(
137 137 b'phabimport', b'obsolete', default=False,
138 138 )
139 139
140 140 colortable = {
141 141 b'phabricator.action.created': b'green',
142 142 b'phabricator.action.skipped': b'magenta',
143 143 b'phabricator.action.updated': b'magenta',
144 144 b'phabricator.desc': b'',
145 145 b'phabricator.drev': b'bold',
146 146 b'phabricator.node': b'',
147 147 b'phabricator.status.abandoned': b'magenta dim',
148 148 b'phabricator.status.accepted': b'green bold',
149 149 b'phabricator.status.closed': b'green',
150 150 b'phabricator.status.needsreview': b'yellow',
151 151 b'phabricator.status.needsrevision': b'red',
152 152 b'phabricator.status.changesplanned': b'red',
153 153 }
154 154
155 155 _VCR_FLAGS = [
156 156 (
157 157 b'',
158 158 b'test-vcr',
159 159 b'',
160 160 _(
161 161 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
162 162 b', otherwise will mock all http requests using the specified vcr file.'
163 163 b' (ADVANCED)'
164 164 ),
165 165 ),
166 166 ]
167 167
168 168
169 169 @eh.wrapfunction(localrepo, "loadhgrc")
170 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, **opts):
170 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
171 171 """Load ``.arcconfig`` content into a ui instance on repository open.
172 172 """
173 173 result = False
174 174 arcconfig = {}
175 175
176 176 try:
177 177 # json.loads only accepts bytes from 3.6+
178 178 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
179 179 # json.loads only returns unicode strings
180 180 arcconfig = pycompat.rapply(
181 181 lambda x: encoding.unitolocal(x)
182 182 if isinstance(x, pycompat.unicode)
183 183 else x,
184 184 pycompat.json_loads(rawparams),
185 185 )
186 186
187 187 result = True
188 188 except ValueError:
189 189 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
190 190 except IOError:
191 191 pass
192 192
193 193 cfg = util.sortdict()
194 194
195 195 if b"repository.callsign" in arcconfig:
196 196 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
197 197
198 198 if b"phabricator.uri" in arcconfig:
199 199 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
200 200
201 201 if cfg:
202 202 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
203 203
204 204 return (
205 orig(ui, wdirvfs, hgvfs, requirements, **opts) or result
205 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
206 206 ) # Load .hg/hgrc
207 207
208 208
209 209 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
210 210 fullflags = flags + _VCR_FLAGS
211 211
212 212 def hgmatcher(r1, r2):
213 213 if r1.uri != r2.uri or r1.method != r2.method:
214 214 return False
215 215 r1params = util.urlreq.parseqs(r1.body)
216 216 r2params = util.urlreq.parseqs(r2.body)
217 217 for key in r1params:
218 218 if key not in r2params:
219 219 return False
220 220 value = r1params[key][0]
221 221 # we want to compare json payloads without worrying about ordering
222 222 if value.startswith(b'{') and value.endswith(b'}'):
223 223 r1json = pycompat.json_loads(value)
224 224 r2json = pycompat.json_loads(r2params[key][0])
225 225 if r1json != r2json:
226 226 return False
227 227 elif r2params[key][0] != value:
228 228 return False
229 229 return True
230 230
231 231 def sanitiserequest(request):
232 232 request.body = re.sub(
233 233 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
234 234 )
235 235 return request
236 236
237 237 def sanitiseresponse(response):
238 238 if 'set-cookie' in response['headers']:
239 239 del response['headers']['set-cookie']
240 240 return response
241 241
242 242 def decorate(fn):
243 243 def inner(*args, **kwargs):
244 244 vcr = kwargs.pop('test_vcr')
245 245 if vcr:
246 246 cassette = pycompat.fsdecode(vcr)
247 247 import hgdemandimport
248 248
249 249 with hgdemandimport.deactivated():
250 250 import vcr as vcrmod
251 251 import vcr.stubs as stubs
252 252
253 253 vcr = vcrmod.VCR(
254 254 serializer='json',
255 255 before_record_request=sanitiserequest,
256 256 before_record_response=sanitiseresponse,
257 257 custom_patches=[
258 258 (
259 259 urlmod,
260 260 'httpconnection',
261 261 stubs.VCRHTTPConnection,
262 262 ),
263 263 (
264 264 urlmod,
265 265 'httpsconnection',
266 266 stubs.VCRHTTPSConnection,
267 267 ),
268 268 ],
269 269 )
270 270 vcr.register_matcher('hgmatcher', hgmatcher)
271 271 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
272 272 return fn(*args, **kwargs)
273 273 return fn(*args, **kwargs)
274 274
275 275 cmd = util.checksignature(inner, depth=2)
276 276 cmd.__name__ = fn.__name__
277 277 cmd.__doc__ = fn.__doc__
278 278
279 279 return command(
280 280 name,
281 281 fullflags,
282 282 spec,
283 283 helpcategory=helpcategory,
284 284 optionalrepo=optionalrepo,
285 285 )(cmd)
286 286
287 287 return decorate
288 288
289 289
290 290 def _debug(ui, *msg, **opts):
291 291 """write debug output for Phabricator if ``phabricator.debug`` is set
292 292
293 293 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
294 294 printed with the --debug argument.
295 295 """
296 296 if ui.configbool(b"phabricator", b"debug"):
297 297 flag = ui.debugflag
298 298 try:
299 299 ui.debugflag = True
300 300 ui.write(*msg, **opts)
301 301 finally:
302 302 ui.debugflag = flag
303 303
304 304
305 305 def urlencodenested(params):
306 306 """like urlencode, but works with nested parameters.
307 307
308 308 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
309 309 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
310 310 urlencode. Note: the encoding is consistent with PHP's http_build_query.
311 311 """
312 312 flatparams = util.sortdict()
313 313
314 314 def process(prefix, obj):
315 315 if isinstance(obj, bool):
316 316 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
317 317 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
318 318 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
319 319 if items is None:
320 320 flatparams[prefix] = obj
321 321 else:
322 322 for k, v in items(obj):
323 323 if prefix:
324 324 process(b'%s[%s]' % (prefix, k), v)
325 325 else:
326 326 process(k, v)
327 327
328 328 process(b'', params)
329 329 return util.urlreq.urlencode(flatparams)
330 330
331 331
332 332 def readurltoken(ui):
333 333 """return conduit url, token and make sure they exist
334 334
335 335 Currently read from [auth] config section. In the future, it might
336 336 make sense to read from .arcconfig and .arcrc as well.
337 337 """
338 338 url = ui.config(b'phabricator', b'url')
339 339 if not url:
340 340 raise error.Abort(
341 341 _(b'config %s.%s is required') % (b'phabricator', b'url')
342 342 )
343 343
344 344 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
345 345 token = None
346 346
347 347 if res:
348 348 group, auth = res
349 349
350 350 ui.debug(b"using auth.%s.* for authentication\n" % group)
351 351
352 352 token = auth.get(b'phabtoken')
353 353
354 354 if not token:
355 355 raise error.Abort(
356 356 _(b'Can\'t find conduit token associated to %s') % (url,)
357 357 )
358 358
359 359 return url, token
360 360
361 361
362 362 def callconduit(ui, name, params):
363 363 """call Conduit API, params is a dict. return json.loads result, or None"""
364 364 host, token = readurltoken(ui)
365 365 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
366 366 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
367 367 params = params.copy()
368 368 params[b'__conduit__'] = {
369 369 b'token': token,
370 370 }
371 371 rawdata = {
372 372 b'params': templatefilters.json(params),
373 373 b'output': b'json',
374 374 b'__conduit__': 1,
375 375 }
376 376 data = urlencodenested(rawdata)
377 377 curlcmd = ui.config(b'phabricator', b'curlcmd')
378 378 if curlcmd:
379 379 sin, sout = procutil.popen2(
380 380 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
381 381 )
382 382 sin.write(data)
383 383 sin.close()
384 384 body = sout.read()
385 385 else:
386 386 urlopener = urlmod.opener(ui, authinfo)
387 387 request = util.urlreq.request(pycompat.strurl(url), data=data)
388 388 with contextlib.closing(urlopener.open(request)) as rsp:
389 389 body = rsp.read()
390 390 ui.debug(b'Conduit Response: %s\n' % body)
391 391 parsed = pycompat.rapply(
392 392 lambda x: encoding.unitolocal(x)
393 393 if isinstance(x, pycompat.unicode)
394 394 else x,
395 395 # json.loads only accepts bytes from py3.6+
396 396 pycompat.json_loads(encoding.unifromlocal(body)),
397 397 )
398 398 if parsed.get(b'error_code'):
399 399 msg = _(b'Conduit Error (%s): %s') % (
400 400 parsed[b'error_code'],
401 401 parsed[b'error_info'],
402 402 )
403 403 raise error.Abort(msg)
404 404 return parsed[b'result']
405 405
406 406
407 407 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
408 408 def debugcallconduit(ui, repo, name):
409 409 """call Conduit API
410 410
411 411 Call parameters are read from stdin as a JSON blob. Result will be written
412 412 to stdout as a JSON blob.
413 413 """
414 414 # json.loads only accepts bytes from 3.6+
415 415 rawparams = encoding.unifromlocal(ui.fin.read())
416 416 # json.loads only returns unicode strings
417 417 params = pycompat.rapply(
418 418 lambda x: encoding.unitolocal(x)
419 419 if isinstance(x, pycompat.unicode)
420 420 else x,
421 421 pycompat.json_loads(rawparams),
422 422 )
423 423 # json.dumps only accepts unicode strings
424 424 result = pycompat.rapply(
425 425 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
426 426 callconduit(ui, name, params),
427 427 )
428 428 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
429 429 ui.write(b'%s\n' % encoding.unitolocal(s))
430 430
431 431
432 432 def getrepophid(repo):
433 433 """given callsign, return repository PHID or None"""
434 434 # developer config: phabricator.repophid
435 435 repophid = repo.ui.config(b'phabricator', b'repophid')
436 436 if repophid:
437 437 return repophid
438 438 callsign = repo.ui.config(b'phabricator', b'callsign')
439 439 if not callsign:
440 440 return None
441 441 query = callconduit(
442 442 repo.ui,
443 443 b'diffusion.repository.search',
444 444 {b'constraints': {b'callsigns': [callsign]}},
445 445 )
446 446 if len(query[b'data']) == 0:
447 447 return None
448 448 repophid = query[b'data'][0][b'phid']
449 449 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
450 450 return repophid
451 451
452 452
453 453 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
454 454 _differentialrevisiondescre = re.compile(
455 455 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
456 456 )
457 457
458 458
459 459 def getoldnodedrevmap(repo, nodelist):
460 460 """find previous nodes that has been sent to Phabricator
461 461
462 462 return {node: (oldnode, Differential diff, Differential Revision ID)}
463 463 for node in nodelist with known previous sent versions, or associated
464 464 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
465 465 be ``None``.
466 466
467 467 Examines commit messages like "Differential Revision:" to get the
468 468 association information.
469 469
470 470 If such commit message line is not found, examines all precursors and their
471 471 tags. Tags with format like "D1234" are considered a match and the node
472 472 with that tag, and the number after "D" (ex. 1234) will be returned.
473 473
474 474 The ``old node``, if not None, is guaranteed to be the last diff of
475 475 corresponding Differential Revision, and exist in the repo.
476 476 """
477 477 unfi = repo.unfiltered()
478 478 has_node = unfi.changelog.index.has_node
479 479
480 480 result = {} # {node: (oldnode?, lastdiff?, drev)}
481 481 # ordered for test stability when printing new -> old mapping below
482 482 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
483 483 for node in nodelist:
484 484 ctx = unfi[node]
485 485 # For tags like "D123", put them into "toconfirm" to verify later
486 486 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
487 487 for n in precnodes:
488 488 if has_node(n):
489 489 for tag in unfi.nodetags(n):
490 490 m = _differentialrevisiontagre.match(tag)
491 491 if m:
492 492 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
493 493 break
494 494 else:
495 495 continue # move to next predecessor
496 496 break # found a tag, stop
497 497 else:
498 498 # Check commit message
499 499 m = _differentialrevisiondescre.search(ctx.description())
500 500 if m:
501 501 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
502 502
503 503 # Double check if tags are genuine by collecting all old nodes from
504 504 # Phabricator, and expect precursors overlap with it.
505 505 if toconfirm:
506 506 drevs = [drev for force, precs, drev in toconfirm.values()]
507 507 alldiffs = callconduit(
508 508 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
509 509 )
510 510
511 511 def getnodes(d, precset):
512 512 # Ignore other nodes that were combined into the Differential
513 513 # that aren't predecessors of the current local node.
514 514 return [n for n in getlocalcommits(d) if n in precset]
515 515
516 516 for newnode, (force, precset, drev) in toconfirm.items():
517 517 diffs = [
518 518 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
519 519 ]
520 520
521 521 # local predecessors known by Phabricator
522 522 phprecset = {n for d in diffs for n in getnodes(d, precset)}
523 523
524 524 # Ignore if precursors (Phabricator and local repo) do not overlap,
525 525 # and force is not set (when commit message says nothing)
526 526 if not force and not phprecset:
527 527 tagname = b'D%d' % drev
528 528 tags.tag(
529 529 repo,
530 530 tagname,
531 531 nullid,
532 532 message=None,
533 533 user=None,
534 534 date=None,
535 535 local=True,
536 536 )
537 537 unfi.ui.warn(
538 538 _(
539 539 b'D%d: local tag removed - does not match '
540 540 b'Differential history\n'
541 541 )
542 542 % drev
543 543 )
544 544 continue
545 545
546 546 # Find the last node using Phabricator metadata, and make sure it
547 547 # exists in the repo
548 548 oldnode = lastdiff = None
549 549 if diffs:
550 550 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
551 551 oldnodes = getnodes(lastdiff, precset)
552 552
553 553 _debug(
554 554 unfi.ui,
555 555 b"%s mapped to old nodes %s\n"
556 556 % (
557 557 short(newnode),
558 558 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
559 559 ),
560 560 )
561 561
562 562 # If this commit was the result of `hg fold` after submission,
563 563 # and now resubmitted with --fold, the easiest thing to do is
564 564 # to leave the node clear. This only results in creating a new
565 565 # diff for the _same_ Differential Revision if this commit is
566 566 # the first or last in the selected range. If we picked a node
567 567 # from the list instead, it would have to be the lowest if at
568 568 # the beginning of the --fold range, or the highest at the end.
569 569 # Otherwise, one or more of the nodes wouldn't be considered in
570 570 # the diff, and the Differential wouldn't be properly updated.
571 571 # If this commit is the result of `hg split` in the same
572 572 # scenario, there is a single oldnode here (and multiple
573 573 # newnodes mapped to it). That makes it the same as the normal
574 574 # case, as the edges of the newnode range cleanly maps to one
575 575 # oldnode each.
576 576 if len(oldnodes) == 1:
577 577 oldnode = oldnodes[0]
578 578 if oldnode and not has_node(oldnode):
579 579 oldnode = None
580 580
581 581 result[newnode] = (oldnode, lastdiff, drev)
582 582
583 583 return result
584 584
585 585
586 586 def getdrevmap(repo, revs):
587 587 """Return a dict mapping each rev in `revs` to their Differential Revision
588 588 ID or None.
589 589 """
590 590 result = {}
591 591 for rev in revs:
592 592 result[rev] = None
593 593 ctx = repo[rev]
594 594 # Check commit message
595 595 m = _differentialrevisiondescre.search(ctx.description())
596 596 if m:
597 597 result[rev] = int(m.group('id'))
598 598 continue
599 599 # Check tags
600 600 for tag in repo.nodetags(ctx.node()):
601 601 m = _differentialrevisiontagre.match(tag)
602 602 if m:
603 603 result[rev] = int(m.group(1))
604 604 break
605 605
606 606 return result
607 607
608 608
609 609 def getdiff(basectx, ctx, diffopts):
610 610 """plain-text diff without header (user, commit message, etc)"""
611 611 output = util.stringio()
612 612 for chunk, _label in patch.diffui(
613 613 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
614 614 ):
615 615 output.write(chunk)
616 616 return output.getvalue()
617 617
618 618
619 619 class DiffChangeType(object):
620 620 ADD = 1
621 621 CHANGE = 2
622 622 DELETE = 3
623 623 MOVE_AWAY = 4
624 624 COPY_AWAY = 5
625 625 MOVE_HERE = 6
626 626 COPY_HERE = 7
627 627 MULTICOPY = 8
628 628
629 629
630 630 class DiffFileType(object):
631 631 TEXT = 1
632 632 IMAGE = 2
633 633 BINARY = 3
634 634
635 635
636 636 @attr.s
637 637 class phabhunk(dict):
638 638 """Represents a Differential hunk, which is owned by a Differential change
639 639 """
640 640
641 641 oldOffset = attr.ib(default=0) # camelcase-required
642 642 oldLength = attr.ib(default=0) # camelcase-required
643 643 newOffset = attr.ib(default=0) # camelcase-required
644 644 newLength = attr.ib(default=0) # camelcase-required
645 645 corpus = attr.ib(default='')
646 646 # These get added to the phabchange's equivalents
647 647 addLines = attr.ib(default=0) # camelcase-required
648 648 delLines = attr.ib(default=0) # camelcase-required
649 649
650 650
651 651 @attr.s
652 652 class phabchange(object):
653 653 """Represents a Differential change, owns Differential hunks and owned by a
654 654 Differential diff. Each one represents one file in a diff.
655 655 """
656 656
657 657 currentPath = attr.ib(default=None) # camelcase-required
658 658 oldPath = attr.ib(default=None) # camelcase-required
659 659 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
660 660 metadata = attr.ib(default=attr.Factory(dict))
661 661 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
662 662 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
663 663 type = attr.ib(default=DiffChangeType.CHANGE)
664 664 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
665 665 commitHash = attr.ib(default=None) # camelcase-required
666 666 addLines = attr.ib(default=0) # camelcase-required
667 667 delLines = attr.ib(default=0) # camelcase-required
668 668 hunks = attr.ib(default=attr.Factory(list))
669 669
670 670 def copynewmetadatatoold(self):
671 671 for key in list(self.metadata.keys()):
672 672 newkey = key.replace(b'new:', b'old:')
673 673 self.metadata[newkey] = self.metadata[key]
674 674
675 675 def addoldmode(self, value):
676 676 self.oldProperties[b'unix:filemode'] = value
677 677
678 678 def addnewmode(self, value):
679 679 self.newProperties[b'unix:filemode'] = value
680 680
681 681 def addhunk(self, hunk):
682 682 if not isinstance(hunk, phabhunk):
683 683 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
684 684 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
685 685 # It's useful to include these stats since the Phab web UI shows them,
686 686 # and uses them to estimate how large a change a Revision is. Also used
687 687 # in email subjects for the [+++--] bit.
688 688 self.addLines += hunk.addLines
689 689 self.delLines += hunk.delLines
690 690
691 691
692 692 @attr.s
693 693 class phabdiff(object):
694 694 """Represents a Differential diff, owns Differential changes. Corresponds
695 695 to a commit.
696 696 """
697 697
698 698 # Doesn't seem to be any reason to send this (output of uname -n)
699 699 sourceMachine = attr.ib(default=b'') # camelcase-required
700 700 sourcePath = attr.ib(default=b'/') # camelcase-required
701 701 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
702 702 sourceControlPath = attr.ib(default=b'/') # camelcase-required
703 703 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
704 704 branch = attr.ib(default=b'default')
705 705 bookmark = attr.ib(default=None)
706 706 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
707 707 lintStatus = attr.ib(default=b'none') # camelcase-required
708 708 unitStatus = attr.ib(default=b'none') # camelcase-required
709 709 changes = attr.ib(default=attr.Factory(dict))
710 710 repositoryPHID = attr.ib(default=None) # camelcase-required
711 711
712 712 def addchange(self, change):
713 713 if not isinstance(change, phabchange):
714 714 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
715 715 self.changes[change.currentPath] = pycompat.byteskwargs(
716 716 attr.asdict(change)
717 717 )
718 718
719 719
720 720 def maketext(pchange, basectx, ctx, fname):
721 721 """populate the phabchange for a text file"""
722 722 repo = ctx.repo()
723 723 fmatcher = match.exact([fname])
724 724 diffopts = mdiff.diffopts(git=True, context=32767)
725 725 _pfctx, _fctx, header, fhunks = next(
726 726 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
727 727 )
728 728
729 729 for fhunk in fhunks:
730 730 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
731 731 corpus = b''.join(lines[1:])
732 732 shunk = list(header)
733 733 shunk.extend(lines)
734 734 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
735 735 patch.diffstatdata(util.iterlines(shunk))
736 736 )
737 737 pchange.addhunk(
738 738 phabhunk(
739 739 oldOffset,
740 740 oldLength,
741 741 newOffset,
742 742 newLength,
743 743 corpus,
744 744 addLines,
745 745 delLines,
746 746 )
747 747 )
748 748
749 749
750 750 def uploadchunks(fctx, fphid):
751 751 """upload large binary files as separate chunks.
752 752 Phab requests chunking over 8MiB, and splits into 4MiB chunks
753 753 """
754 754 ui = fctx.repo().ui
755 755 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
756 756 with ui.makeprogress(
757 757 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
758 758 ) as progress:
759 759 for chunk in chunks:
760 760 progress.increment()
761 761 if chunk[b'complete']:
762 762 continue
763 763 bstart = int(chunk[b'byteStart'])
764 764 bend = int(chunk[b'byteEnd'])
765 765 callconduit(
766 766 ui,
767 767 b'file.uploadchunk',
768 768 {
769 769 b'filePHID': fphid,
770 770 b'byteStart': bstart,
771 771 b'data': base64.b64encode(fctx.data()[bstart:bend]),
772 772 b'dataEncoding': b'base64',
773 773 },
774 774 )
775 775
776 776
777 777 def uploadfile(fctx):
778 778 """upload binary files to Phabricator"""
779 779 repo = fctx.repo()
780 780 ui = repo.ui
781 781 fname = fctx.path()
782 782 size = fctx.size()
783 783 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
784 784
785 785 # an allocate call is required first to see if an upload is even required
786 786 # (Phab might already have it) and to determine if chunking is needed
787 787 allocateparams = {
788 788 b'name': fname,
789 789 b'contentLength': size,
790 790 b'contentHash': fhash,
791 791 }
792 792 filealloc = callconduit(ui, b'file.allocate', allocateparams)
793 793 fphid = filealloc[b'filePHID']
794 794
795 795 if filealloc[b'upload']:
796 796 ui.write(_(b'uploading %s\n') % bytes(fctx))
797 797 if not fphid:
798 798 uploadparams = {
799 799 b'name': fname,
800 800 b'data_base64': base64.b64encode(fctx.data()),
801 801 }
802 802 fphid = callconduit(ui, b'file.upload', uploadparams)
803 803 else:
804 804 uploadchunks(fctx, fphid)
805 805 else:
806 806 ui.debug(b'server already has %s\n' % bytes(fctx))
807 807
808 808 if not fphid:
809 809 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
810 810
811 811 return fphid
812 812
813 813
814 814 def addoldbinary(pchange, oldfctx, fctx):
815 815 """add the metadata for the previous version of a binary file to the
816 816 phabchange for the new version
817 817
818 818 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
819 819 version of the file, or None if the file is being removed.
820 820 """
821 821 if not fctx or fctx.cmp(oldfctx):
822 822 # Files differ, add the old one
823 823 pchange.metadata[b'old:file:size'] = oldfctx.size()
824 824 mimeguess, _enc = mimetypes.guess_type(
825 825 encoding.unifromlocal(oldfctx.path())
826 826 )
827 827 if mimeguess:
828 828 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
829 829 mimeguess
830 830 )
831 831 fphid = uploadfile(oldfctx)
832 832 pchange.metadata[b'old:binary-phid'] = fphid
833 833 else:
834 834 # If it's left as IMAGE/BINARY web UI might try to display it
835 835 pchange.fileType = DiffFileType.TEXT
836 836 pchange.copynewmetadatatoold()
837 837
838 838
839 839 def makebinary(pchange, fctx):
840 840 """populate the phabchange for a binary file"""
841 841 pchange.fileType = DiffFileType.BINARY
842 842 fphid = uploadfile(fctx)
843 843 pchange.metadata[b'new:binary-phid'] = fphid
844 844 pchange.metadata[b'new:file:size'] = fctx.size()
845 845 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
846 846 if mimeguess:
847 847 mimeguess = pycompat.bytestr(mimeguess)
848 848 pchange.metadata[b'new:file:mime-type'] = mimeguess
849 849 if mimeguess.startswith(b'image/'):
850 850 pchange.fileType = DiffFileType.IMAGE
851 851
852 852
853 853 # Copied from mercurial/patch.py
854 854 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
855 855
856 856
857 857 def notutf8(fctx):
858 858 """detect non-UTF-8 text files since Phabricator requires them to be marked
859 859 as binary
860 860 """
861 861 try:
862 862 fctx.data().decode('utf-8')
863 863 return False
864 864 except UnicodeDecodeError:
865 865 fctx.repo().ui.write(
866 866 _(b'file %s detected as non-UTF-8, marked as binary\n')
867 867 % fctx.path()
868 868 )
869 869 return True
870 870
871 871
872 872 def addremoved(pdiff, basectx, ctx, removed):
873 873 """add removed files to the phabdiff. Shouldn't include moves"""
874 874 for fname in removed:
875 875 pchange = phabchange(
876 876 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
877 877 )
878 878 oldfctx = basectx.p1()[fname]
879 879 pchange.addoldmode(gitmode[oldfctx.flags()])
880 880 if not (oldfctx.isbinary() or notutf8(oldfctx)):
881 881 maketext(pchange, basectx, ctx, fname)
882 882
883 883 pdiff.addchange(pchange)
884 884
885 885
886 886 def addmodified(pdiff, basectx, ctx, modified):
887 887 """add modified files to the phabdiff"""
888 888 for fname in modified:
889 889 fctx = ctx[fname]
890 890 oldfctx = basectx.p1()[fname]
891 891 pchange = phabchange(currentPath=fname, oldPath=fname)
892 892 filemode = gitmode[fctx.flags()]
893 893 originalmode = gitmode[oldfctx.flags()]
894 894 if filemode != originalmode:
895 895 pchange.addoldmode(originalmode)
896 896 pchange.addnewmode(filemode)
897 897
898 898 if (
899 899 fctx.isbinary()
900 900 or notutf8(fctx)
901 901 or oldfctx.isbinary()
902 902 or notutf8(oldfctx)
903 903 ):
904 904 makebinary(pchange, fctx)
905 905 addoldbinary(pchange, oldfctx, fctx)
906 906 else:
907 907 maketext(pchange, basectx, ctx, fname)
908 908
909 909 pdiff.addchange(pchange)
910 910
911 911
912 912 def addadded(pdiff, basectx, ctx, added, removed):
913 913 """add file adds to the phabdiff, both new files and copies/moves"""
914 914 # Keep track of files that've been recorded as moved/copied, so if there are
915 915 # additional copies we can mark them (moves get removed from removed)
916 916 copiedchanges = {}
917 917 movedchanges = {}
918 918
919 919 copy = {}
920 920 if basectx != ctx:
921 921 copy = copies.pathcopies(basectx.p1(), ctx)
922 922
923 923 for fname in added:
924 924 fctx = ctx[fname]
925 925 oldfctx = None
926 926 pchange = phabchange(currentPath=fname)
927 927
928 928 filemode = gitmode[fctx.flags()]
929 929
930 930 if copy:
931 931 originalfname = copy.get(fname, fname)
932 932 else:
933 933 originalfname = fname
934 934 if fctx.renamed():
935 935 originalfname = fctx.renamed()[0]
936 936
937 937 renamed = fname != originalfname
938 938
939 939 if renamed:
940 940 oldfctx = basectx.p1()[originalfname]
941 941 originalmode = gitmode[oldfctx.flags()]
942 942 pchange.oldPath = originalfname
943 943
944 944 if originalfname in removed:
945 945 origpchange = phabchange(
946 946 currentPath=originalfname,
947 947 oldPath=originalfname,
948 948 type=DiffChangeType.MOVE_AWAY,
949 949 awayPaths=[fname],
950 950 )
951 951 movedchanges[originalfname] = origpchange
952 952 removed.remove(originalfname)
953 953 pchange.type = DiffChangeType.MOVE_HERE
954 954 elif originalfname in movedchanges:
955 955 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
956 956 movedchanges[originalfname].awayPaths.append(fname)
957 957 pchange.type = DiffChangeType.COPY_HERE
958 958 else: # pure copy
959 959 if originalfname not in copiedchanges:
960 960 origpchange = phabchange(
961 961 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
962 962 )
963 963 copiedchanges[originalfname] = origpchange
964 964 else:
965 965 origpchange = copiedchanges[originalfname]
966 966 origpchange.awayPaths.append(fname)
967 967 pchange.type = DiffChangeType.COPY_HERE
968 968
969 969 if filemode != originalmode:
970 970 pchange.addoldmode(originalmode)
971 971 pchange.addnewmode(filemode)
972 972 else: # Brand-new file
973 973 pchange.addnewmode(gitmode[fctx.flags()])
974 974 pchange.type = DiffChangeType.ADD
975 975
976 976 if (
977 977 fctx.isbinary()
978 978 or notutf8(fctx)
979 979 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
980 980 ):
981 981 makebinary(pchange, fctx)
982 982 if renamed:
983 983 addoldbinary(pchange, oldfctx, fctx)
984 984 else:
985 985 maketext(pchange, basectx, ctx, fname)
986 986
987 987 pdiff.addchange(pchange)
988 988
989 989 for _path, copiedchange in copiedchanges.items():
990 990 pdiff.addchange(copiedchange)
991 991 for _path, movedchange in movedchanges.items():
992 992 pdiff.addchange(movedchange)
993 993
994 994
995 995 def creatediff(basectx, ctx):
996 996 """create a Differential Diff"""
997 997 repo = ctx.repo()
998 998 repophid = getrepophid(repo)
999 999 # Create a "Differential Diff" via "differential.creatediff" API
1000 1000 pdiff = phabdiff(
1001 1001 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1002 1002 branch=b'%s' % ctx.branch(),
1003 1003 )
1004 1004 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1005 1005 # addadded will remove moved files from removed, so addremoved won't get
1006 1006 # them
1007 1007 addadded(pdiff, basectx, ctx, added, removed)
1008 1008 addmodified(pdiff, basectx, ctx, modified)
1009 1009 addremoved(pdiff, basectx, ctx, removed)
1010 1010 if repophid:
1011 1011 pdiff.repositoryPHID = repophid
1012 1012 diff = callconduit(
1013 1013 repo.ui,
1014 1014 b'differential.creatediff',
1015 1015 pycompat.byteskwargs(attr.asdict(pdiff)),
1016 1016 )
1017 1017 if not diff:
1018 1018 if basectx != ctx:
1019 1019 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1020 1020 else:
1021 1021 msg = _(b'cannot create diff for %s') % ctx
1022 1022 raise error.Abort(msg)
1023 1023 return diff
1024 1024
1025 1025
1026 1026 def writediffproperties(ctxs, diff):
1027 1027 """write metadata to diff so patches could be applied losslessly
1028 1028
1029 1029 ``ctxs`` is the list of commits that created the diff, in ascending order.
1030 1030 The list is generally a single commit, but may be several when using
1031 1031 ``phabsend --fold``.
1032 1032 """
1033 1033 # creatediff returns with a diffid but query returns with an id
1034 1034 diffid = diff.get(b'diffid', diff.get(b'id'))
1035 1035 basectx = ctxs[0]
1036 1036 tipctx = ctxs[-1]
1037 1037
1038 1038 params = {
1039 1039 b'diff_id': diffid,
1040 1040 b'name': b'hg:meta',
1041 1041 b'data': templatefilters.json(
1042 1042 {
1043 1043 b'user': tipctx.user(),
1044 1044 b'date': b'%d %d' % tipctx.date(),
1045 1045 b'branch': tipctx.branch(),
1046 1046 b'node': tipctx.hex(),
1047 1047 b'parent': basectx.p1().hex(),
1048 1048 }
1049 1049 ),
1050 1050 }
1051 1051 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1052 1052
1053 1053 commits = {}
1054 1054 for ctx in ctxs:
1055 1055 commits[ctx.hex()] = {
1056 1056 b'author': stringutil.person(ctx.user()),
1057 1057 b'authorEmail': stringutil.email(ctx.user()),
1058 1058 b'time': int(ctx.date()[0]),
1059 1059 b'commit': ctx.hex(),
1060 1060 b'parents': [ctx.p1().hex()],
1061 1061 b'branch': ctx.branch(),
1062 1062 }
1063 1063 params = {
1064 1064 b'diff_id': diffid,
1065 1065 b'name': b'local:commits',
1066 1066 b'data': templatefilters.json(commits),
1067 1067 }
1068 1068 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1069 1069
1070 1070
1071 1071 def createdifferentialrevision(
1072 1072 ctxs,
1073 1073 revid=None,
1074 1074 parentrevphid=None,
1075 1075 oldbasenode=None,
1076 1076 oldnode=None,
1077 1077 olddiff=None,
1078 1078 actions=None,
1079 1079 comment=None,
1080 1080 ):
1081 1081 """create or update a Differential Revision
1082 1082
1083 1083 If revid is None, create a new Differential Revision, otherwise update
1084 1084 revid. If parentrevphid is not None, set it as a dependency.
1085 1085
1086 1086 If there is a single commit for the new Differential Revision, ``ctxs`` will
1087 1087 be a list of that single context. Otherwise, it is a list that covers the
1088 1088 range of changes for the differential, where ``ctxs[0]`` is the first change
1089 1089 to include and ``ctxs[-1]`` is the last.
1090 1090
1091 1091 If oldnode is not None, check if the patch content (without commit message
1092 1092 and metadata) has changed before creating another diff. For a Revision with
1093 1093 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1094 1094 Revision covering multiple commits, ``oldbasenode`` corresponds to
1095 1095 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1096 1096 corresponds to ``ctxs[-1]``.
1097 1097
1098 1098 If actions is not None, they will be appended to the transaction.
1099 1099 """
1100 1100 ctx = ctxs[-1]
1101 1101 basectx = ctxs[0]
1102 1102
1103 1103 repo = ctx.repo()
1104 1104 if oldnode:
1105 1105 diffopts = mdiff.diffopts(git=True, context=32767)
1106 1106 unfi = repo.unfiltered()
1107 1107 oldctx = unfi[oldnode]
1108 1108 oldbasectx = unfi[oldbasenode]
1109 1109 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1110 1110 oldbasectx, oldctx, diffopts
1111 1111 )
1112 1112 else:
1113 1113 neednewdiff = True
1114 1114
1115 1115 transactions = []
1116 1116 if neednewdiff:
1117 1117 diff = creatediff(basectx, ctx)
1118 1118 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1119 1119 if comment:
1120 1120 transactions.append({b'type': b'comment', b'value': comment})
1121 1121 else:
1122 1122 # Even if we don't need to upload a new diff because the patch content
1123 1123 # does not change. We might still need to update its metadata so
1124 1124 # pushers could know the correct node metadata.
1125 1125 assert olddiff
1126 1126 diff = olddiff
1127 1127 writediffproperties(ctxs, diff)
1128 1128
1129 1129 # Set the parent Revision every time, so commit re-ordering is picked-up
1130 1130 if parentrevphid:
1131 1131 transactions.append(
1132 1132 {b'type': b'parents.set', b'value': [parentrevphid]}
1133 1133 )
1134 1134
1135 1135 if actions:
1136 1136 transactions += actions
1137 1137
1138 1138 # When folding multiple local commits into a single review, arcanist will
1139 1139 # take the summary line of the first commit as the title, and then
1140 1140 # concatenate the rest of the remaining messages (including each of their
1141 1141 # first lines) to the rest of the first commit message (each separated by
1142 1142 # an empty line), and use that as the summary field. Do the same here.
1143 1143 # For commits with only a one line message, there is no summary field, as
1144 1144 # this gets assigned to the title.
1145 1145 fields = util.sortdict() # sorted for stable wire protocol in tests
1146 1146
1147 1147 for i, _ctx in enumerate(ctxs):
1148 1148 # Parse commit message and update related fields.
1149 1149 desc = _ctx.description()
1150 1150 info = callconduit(
1151 1151 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1152 1152 )
1153 1153
1154 1154 for k in [b'title', b'summary', b'testPlan']:
1155 1155 v = info[b'fields'].get(k)
1156 1156 if not v:
1157 1157 continue
1158 1158
1159 1159 if i == 0:
1160 1160 # Title, summary and test plan (if present) are taken verbatim
1161 1161 # for the first commit.
1162 1162 fields[k] = v.rstrip()
1163 1163 continue
1164 1164 elif k == b'title':
1165 1165 # Add subsequent titles (i.e. the first line of the commit
1166 1166 # message) back to the summary.
1167 1167 k = b'summary'
1168 1168
1169 1169 # Append any current field to the existing composite field
1170 1170 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1171 1171
1172 1172 for k, v in fields.items():
1173 1173 transactions.append({b'type': k, b'value': v})
1174 1174
1175 1175 params = {b'transactions': transactions}
1176 1176 if revid is not None:
1177 1177 # Update an existing Differential Revision
1178 1178 params[b'objectIdentifier'] = revid
1179 1179
1180 1180 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1181 1181 if not revision:
1182 1182 if len(ctxs) == 1:
1183 1183 msg = _(b'cannot create revision for %s') % ctx
1184 1184 else:
1185 1185 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1186 1186 raise error.Abort(msg)
1187 1187
1188 1188 return revision, diff
1189 1189
1190 1190
1191 1191 def userphids(ui, names):
1192 1192 """convert user names to PHIDs"""
1193 1193 names = [name.lower() for name in names]
1194 1194 query = {b'constraints': {b'usernames': names}}
1195 1195 result = callconduit(ui, b'user.search', query)
1196 1196 # username not found is not an error of the API. So check if we have missed
1197 1197 # some names here.
1198 1198 data = result[b'data']
1199 1199 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1200 1200 unresolved = set(names) - resolved
1201 1201 if unresolved:
1202 1202 raise error.Abort(
1203 1203 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1204 1204 )
1205 1205 return [entry[b'phid'] for entry in data]
1206 1206
1207 1207
1208 1208 def _print_phabsend_action(ui, ctx, newrevid, action):
1209 1209 """print the ``action`` that occurred when posting ``ctx`` for review
1210 1210
1211 1211 This is a utility function for the sending phase of ``phabsend``, which
1212 1212 makes it easier to show a status for all local commits with `--fold``.
1213 1213 """
1214 1214 actiondesc = ui.label(
1215 1215 {
1216 1216 b'created': _(b'created'),
1217 1217 b'skipped': _(b'skipped'),
1218 1218 b'updated': _(b'updated'),
1219 1219 }[action],
1220 1220 b'phabricator.action.%s' % action,
1221 1221 )
1222 1222 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1223 1223 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1224 1224 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1225 1225 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1226 1226
1227 1227
1228 1228 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1229 1229 """update the local commit list for the ``diff`` associated with ``drevid``
1230 1230
1231 1231 This is a utility function for the amend phase of ``phabsend``, which
1232 1232 converts failures to warning messages.
1233 1233 """
1234 1234 _debug(
1235 1235 unfi.ui,
1236 1236 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1237 1237 )
1238 1238
1239 1239 try:
1240 1240 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1241 1241 except util.urlerr.urlerror:
1242 1242 # If it fails just warn and keep going, otherwise the DREV
1243 1243 # associations will be lost
1244 1244 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1245 1245
1246 1246
1247 1247 @vcrcommand(
1248 1248 b'phabsend',
1249 1249 [
1250 1250 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1251 1251 (b'', b'amend', True, _(b'update commit messages')),
1252 1252 (b'', b'reviewer', [], _(b'specify reviewers')),
1253 1253 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1254 1254 (
1255 1255 b'm',
1256 1256 b'comment',
1257 1257 b'',
1258 1258 _(b'add a comment to Revisions with new/updated Diffs'),
1259 1259 ),
1260 1260 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1261 1261 (b'', b'fold', False, _(b'combine the revisions into one review')),
1262 1262 ],
1263 1263 _(b'REV [OPTIONS]'),
1264 1264 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1265 1265 )
1266 1266 def phabsend(ui, repo, *revs, **opts):
1267 1267 """upload changesets to Phabricator
1268 1268
1269 1269 If there are multiple revisions specified, they will be send as a stack
1270 1270 with a linear dependencies relationship using the order specified by the
1271 1271 revset.
1272 1272
1273 1273 For the first time uploading changesets, local tags will be created to
1274 1274 maintain the association. After the first time, phabsend will check
1275 1275 obsstore and tags information so it can figure out whether to update an
1276 1276 existing Differential Revision, or create a new one.
1277 1277
1278 1278 If --amend is set, update commit messages so they have the
1279 1279 ``Differential Revision`` URL, remove related tags. This is similar to what
1280 1280 arcanist will do, and is more desired in author-push workflows. Otherwise,
1281 1281 use local tags to record the ``Differential Revision`` association.
1282 1282
1283 1283 The --confirm option lets you confirm changesets before sending them. You
1284 1284 can also add following to your configuration file to make it default
1285 1285 behaviour::
1286 1286
1287 1287 [phabsend]
1288 1288 confirm = true
1289 1289
1290 1290 By default, a separate review will be created for each commit that is
1291 1291 selected, and will have the same parent/child relationship in Phabricator.
1292 1292 If ``--fold`` is set, multiple commits are rolled up into a single review
1293 1293 as if diffed from the parent of the first revision to the last. The commit
1294 1294 messages are concatenated in the summary field on Phabricator.
1295 1295
1296 1296 phabsend will check obsstore and the above association to decide whether to
1297 1297 update an existing Differential Revision, or create a new one.
1298 1298 """
1299 1299 opts = pycompat.byteskwargs(opts)
1300 1300 revs = list(revs) + opts.get(b'rev', [])
1301 1301 revs = scmutil.revrange(repo, revs)
1302 1302 revs.sort() # ascending order to preserve topological parent/child in phab
1303 1303
1304 1304 if not revs:
1305 1305 raise error.Abort(_(b'phabsend requires at least one changeset'))
1306 1306 if opts.get(b'amend'):
1307 1307 cmdutil.checkunfinished(repo)
1308 1308
1309 1309 ctxs = [repo[rev] for rev in revs]
1310 1310
1311 1311 if any(c for c in ctxs if c.obsolete()):
1312 1312 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1313 1313
1314 1314 # Ensure the local commits are an unbroken range. The semantics of the
1315 1315 # --fold option implies this, and the auto restacking of orphans requires
1316 1316 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1317 1317 # get A' as a parent.
1318 1318 def _fail_nonlinear_revs(revs, revtype):
1319 1319 badnodes = [repo[r].node() for r in revs]
1320 1320 raise error.Abort(
1321 1321 _(b"cannot phabsend multiple %s revisions: %s")
1322 1322 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1323 1323 hint=_(b"the revisions must form a linear chain"),
1324 1324 )
1325 1325
1326 1326 heads = repo.revs(b'heads(%ld)', revs)
1327 1327 if len(heads) > 1:
1328 1328 _fail_nonlinear_revs(heads, b"head")
1329 1329
1330 1330 roots = repo.revs(b'roots(%ld)', revs)
1331 1331 if len(roots) > 1:
1332 1332 _fail_nonlinear_revs(roots, b"root")
1333 1333
1334 1334 fold = opts.get(b'fold')
1335 1335 if fold:
1336 1336 if len(revs) == 1:
1337 1337 # TODO: just switch to --no-fold instead?
1338 1338 raise error.Abort(_(b"cannot fold a single revision"))
1339 1339
1340 1340 # There's no clear way to manage multiple commits with a Dxxx tag, so
1341 1341 # require the amend option. (We could append "_nnn", but then it
1342 1342 # becomes jumbled if earlier commits are added to an update.) It should
1343 1343 # lock the repo and ensure that the range is editable, but that would
1344 1344 # make the code pretty convoluted. The default behavior of `arc` is to
1345 1345 # create a new review anyway.
1346 1346 if not opts.get(b"amend"):
1347 1347 raise error.Abort(_(b"cannot fold with --no-amend"))
1348 1348
1349 1349 # It might be possible to bucketize the revisions by the DREV value, and
1350 1350 # iterate over those groups when posting, and then again when amending.
1351 1351 # But for simplicity, require all selected revisions to be for the same
1352 1352 # DREV (if present). Adding local revisions to an existing DREV is
1353 1353 # acceptable.
1354 1354 drevmatchers = [
1355 1355 _differentialrevisiondescre.search(ctx.description())
1356 1356 for ctx in ctxs
1357 1357 ]
1358 1358 if len({m.group('url') for m in drevmatchers if m}) > 1:
1359 1359 raise error.Abort(
1360 1360 _(b"cannot fold revisions with different DREV values")
1361 1361 )
1362 1362
1363 1363 # {newnode: (oldnode, olddiff, olddrev}
1364 1364 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1365 1365
1366 1366 confirm = ui.configbool(b'phabsend', b'confirm')
1367 1367 confirm |= bool(opts.get(b'confirm'))
1368 1368 if confirm:
1369 1369 confirmed = _confirmbeforesend(repo, revs, oldmap)
1370 1370 if not confirmed:
1371 1371 raise error.Abort(_(b'phabsend cancelled'))
1372 1372
1373 1373 actions = []
1374 1374 reviewers = opts.get(b'reviewer', [])
1375 1375 blockers = opts.get(b'blocker', [])
1376 1376 phids = []
1377 1377 if reviewers:
1378 1378 phids.extend(userphids(repo.ui, reviewers))
1379 1379 if blockers:
1380 1380 phids.extend(
1381 1381 map(
1382 1382 lambda phid: b'blocking(%s)' % phid,
1383 1383 userphids(repo.ui, blockers),
1384 1384 )
1385 1385 )
1386 1386 if phids:
1387 1387 actions.append({b'type': b'reviewers.add', b'value': phids})
1388 1388
1389 1389 drevids = [] # [int]
1390 1390 diffmap = {} # {newnode: diff}
1391 1391
1392 1392 # Send patches one by one so we know their Differential Revision PHIDs and
1393 1393 # can provide dependency relationship
1394 1394 lastrevphid = None
1395 1395 for ctx in ctxs:
1396 1396 if fold:
1397 1397 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1398 1398 else:
1399 1399 ui.debug(b'sending rev %d\n' % ctx.rev())
1400 1400
1401 1401 # Get Differential Revision ID
1402 1402 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1403 1403 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1404 1404
1405 1405 if fold:
1406 1406 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1407 1407 ctxs[-1].node(), (None, None, None)
1408 1408 )
1409 1409
1410 1410 if oldnode != ctx.node() or opts.get(b'amend'):
1411 1411 # Create or update Differential Revision
1412 1412 revision, diff = createdifferentialrevision(
1413 1413 ctxs if fold else [ctx],
1414 1414 revid,
1415 1415 lastrevphid,
1416 1416 oldbasenode,
1417 1417 oldnode,
1418 1418 olddiff,
1419 1419 actions,
1420 1420 opts.get(b'comment'),
1421 1421 )
1422 1422
1423 1423 if fold:
1424 1424 for ctx in ctxs:
1425 1425 diffmap[ctx.node()] = diff
1426 1426 else:
1427 1427 diffmap[ctx.node()] = diff
1428 1428
1429 1429 newrevid = int(revision[b'object'][b'id'])
1430 1430 newrevphid = revision[b'object'][b'phid']
1431 1431 if revid:
1432 1432 action = b'updated'
1433 1433 else:
1434 1434 action = b'created'
1435 1435
1436 1436 # Create a local tag to note the association, if commit message
1437 1437 # does not have it already
1438 1438 if not fold:
1439 1439 m = _differentialrevisiondescre.search(ctx.description())
1440 1440 if not m or int(m.group('id')) != newrevid:
1441 1441 tagname = b'D%d' % newrevid
1442 1442 tags.tag(
1443 1443 repo,
1444 1444 tagname,
1445 1445 ctx.node(),
1446 1446 message=None,
1447 1447 user=None,
1448 1448 date=None,
1449 1449 local=True,
1450 1450 )
1451 1451 else:
1452 1452 # Nothing changed. But still set "newrevphid" so the next revision
1453 1453 # could depend on this one and "newrevid" for the summary line.
1454 1454 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1455 1455 newrevid = revid
1456 1456 action = b'skipped'
1457 1457
1458 1458 drevids.append(newrevid)
1459 1459 lastrevphid = newrevphid
1460 1460
1461 1461 if fold:
1462 1462 for c in ctxs:
1463 1463 if oldmap.get(c.node(), (None, None, None))[2]:
1464 1464 action = b'updated'
1465 1465 else:
1466 1466 action = b'created'
1467 1467 _print_phabsend_action(ui, c, newrevid, action)
1468 1468 break
1469 1469
1470 1470 _print_phabsend_action(ui, ctx, newrevid, action)
1471 1471
1472 1472 # Update commit messages and remove tags
1473 1473 if opts.get(b'amend'):
1474 1474 unfi = repo.unfiltered()
1475 1475 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1476 1476 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1477 1477 # Eagerly evaluate commits to restabilize before creating new
1478 1478 # commits. The selected revisions are excluded because they are
1479 1479 # automatically restacked as part of the submission process.
1480 1480 restack = [
1481 1481 c
1482 1482 for c in repo.set(
1483 1483 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1484 1484 revs,
1485 1485 revs,
1486 1486 )
1487 1487 ]
1488 1488 wnode = unfi[b'.'].node()
1489 1489 mapping = {} # {oldnode: [newnode]}
1490 1490 newnodes = []
1491 1491
1492 1492 drevid = drevids[0]
1493 1493
1494 1494 for i, rev in enumerate(revs):
1495 1495 old = unfi[rev]
1496 1496 if not fold:
1497 1497 drevid = drevids[i]
1498 1498 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1499 1499
1500 1500 newdesc = get_amended_desc(drev, old, fold)
1501 1501 # Make sure commit message contain "Differential Revision"
1502 1502 if (
1503 1503 old.description() != newdesc
1504 1504 or old.p1().node() in mapping
1505 1505 or old.p2().node() in mapping
1506 1506 ):
1507 1507 if old.phase() == phases.public:
1508 1508 ui.warn(
1509 1509 _(b"warning: not updating public commit %s\n")
1510 1510 % scmutil.formatchangeid(old)
1511 1511 )
1512 1512 continue
1513 1513 parents = [
1514 1514 mapping.get(old.p1().node(), (old.p1(),))[0],
1515 1515 mapping.get(old.p2().node(), (old.p2(),))[0],
1516 1516 ]
1517 1517 newdesc = rewriteutil.update_hash_refs(
1518 1518 repo, newdesc, mapping,
1519 1519 )
1520 1520 new = context.metadataonlyctx(
1521 1521 repo,
1522 1522 old,
1523 1523 parents=parents,
1524 1524 text=newdesc,
1525 1525 user=old.user(),
1526 1526 date=old.date(),
1527 1527 extra=old.extra(),
1528 1528 )
1529 1529
1530 1530 newnode = new.commit()
1531 1531
1532 1532 mapping[old.node()] = [newnode]
1533 1533
1534 1534 if fold:
1535 1535 # Defer updating the (single) Diff until all nodes are
1536 1536 # collected. No tags were created, so none need to be
1537 1537 # removed.
1538 1538 newnodes.append(newnode)
1539 1539 continue
1540 1540
1541 1541 _amend_diff_properties(
1542 1542 unfi, drevid, [newnode], diffmap[old.node()]
1543 1543 )
1544 1544
1545 1545 # Remove local tags since it's no longer necessary
1546 1546 tagname = b'D%d' % drevid
1547 1547 if tagname in repo.tags():
1548 1548 tags.tag(
1549 1549 repo,
1550 1550 tagname,
1551 1551 nullid,
1552 1552 message=None,
1553 1553 user=None,
1554 1554 date=None,
1555 1555 local=True,
1556 1556 )
1557 1557 elif fold:
1558 1558 # When folding multiple commits into one review with
1559 1559 # --fold, track even the commits that weren't amended, so
1560 1560 # that their association isn't lost if the properties are
1561 1561 # rewritten below.
1562 1562 newnodes.append(old.node())
1563 1563
1564 1564 # If the submitted commits are public, no amend takes place so
1565 1565 # there are no newnodes and therefore no diff update to do.
1566 1566 if fold and newnodes:
1567 1567 diff = diffmap[old.node()]
1568 1568
1569 1569 # The diff object in diffmap doesn't have the local commits
1570 1570 # because that could be returned from differential.creatediff,
1571 1571 # not differential.querydiffs. So use the queried diff (if
1572 1572 # present), or force the amend (a new revision is being posted.)
1573 1573 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1574 1574 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1575 1575 _amend_diff_properties(unfi, drevid, newnodes, diff)
1576 1576 else:
1577 1577 _debug(
1578 1578 ui,
1579 1579 b"local commit list for D%d is already up-to-date\n"
1580 1580 % drevid,
1581 1581 )
1582 1582 elif fold:
1583 1583 _debug(ui, b"no newnodes to update\n")
1584 1584
1585 1585 # Restack any children of first-time submissions that were orphaned
1586 1586 # in the process. The ctx won't report that it is an orphan until
1587 1587 # the cleanup takes place below.
1588 1588 for old in restack:
1589 1589 parents = [
1590 1590 mapping.get(old.p1().node(), (old.p1(),))[0],
1591 1591 mapping.get(old.p2().node(), (old.p2(),))[0],
1592 1592 ]
1593 1593 new = context.metadataonlyctx(
1594 1594 repo,
1595 1595 old,
1596 1596 parents=parents,
1597 1597 text=rewriteutil.update_hash_refs(
1598 1598 repo, old.description(), mapping
1599 1599 ),
1600 1600 user=old.user(),
1601 1601 date=old.date(),
1602 1602 extra=old.extra(),
1603 1603 )
1604 1604
1605 1605 newnode = new.commit()
1606 1606
1607 1607 # Don't obsolete unselected descendants of nodes that have not
1608 1608 # been changed in this transaction- that results in an error.
1609 1609 if newnode != old.node():
1610 1610 mapping[old.node()] = [newnode]
1611 1611 _debug(
1612 1612 ui,
1613 1613 b"restabilizing %s as %s\n"
1614 1614 % (short(old.node()), short(newnode)),
1615 1615 )
1616 1616 else:
1617 1617 _debug(
1618 1618 ui,
1619 1619 b"not restabilizing unchanged %s\n" % short(old.node()),
1620 1620 )
1621 1621
1622 1622 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1623 1623 if wnode in mapping:
1624 1624 unfi.setparents(mapping[wnode][0])
1625 1625
1626 1626
1627 1627 # Map from "hg:meta" keys to header understood by "hg import". The order is
1628 1628 # consistent with "hg export" output.
1629 1629 _metanamemap = util.sortdict(
1630 1630 [
1631 1631 (b'user', b'User'),
1632 1632 (b'date', b'Date'),
1633 1633 (b'branch', b'Branch'),
1634 1634 (b'node', b'Node ID'),
1635 1635 (b'parent', b'Parent '),
1636 1636 ]
1637 1637 )
1638 1638
1639 1639
1640 1640 def _confirmbeforesend(repo, revs, oldmap):
1641 1641 url, token = readurltoken(repo.ui)
1642 1642 ui = repo.ui
1643 1643 for rev in revs:
1644 1644 ctx = repo[rev]
1645 1645 desc = ctx.description().splitlines()[0]
1646 1646 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1647 1647 if drevid:
1648 1648 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1649 1649 else:
1650 1650 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1651 1651
1652 1652 ui.write(
1653 1653 _(b'%s - %s: %s\n')
1654 1654 % (
1655 1655 drevdesc,
1656 1656 ui.label(bytes(ctx), b'phabricator.node'),
1657 1657 ui.label(desc, b'phabricator.desc'),
1658 1658 )
1659 1659 )
1660 1660
1661 1661 if ui.promptchoice(
1662 1662 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1663 1663 ):
1664 1664 return False
1665 1665
1666 1666 return True
1667 1667
1668 1668
1669 1669 _knownstatusnames = {
1670 1670 b'accepted',
1671 1671 b'needsreview',
1672 1672 b'needsrevision',
1673 1673 b'closed',
1674 1674 b'abandoned',
1675 1675 b'changesplanned',
1676 1676 }
1677 1677
1678 1678
1679 1679 def _getstatusname(drev):
1680 1680 """get normalized status name from a Differential Revision"""
1681 1681 return drev[b'statusName'].replace(b' ', b'').lower()
1682 1682
1683 1683
1684 1684 # Small language to specify differential revisions. Support symbols: (), :X,
1685 1685 # +, and -.
1686 1686
1687 1687 _elements = {
1688 1688 # token-type: binding-strength, primary, prefix, infix, suffix
1689 1689 b'(': (12, None, (b'group', 1, b')'), None, None),
1690 1690 b':': (8, None, (b'ancestors', 8), None, None),
1691 1691 b'&': (5, None, None, (b'and_', 5), None),
1692 1692 b'+': (4, None, None, (b'add', 4), None),
1693 1693 b'-': (4, None, None, (b'sub', 4), None),
1694 1694 b')': (0, None, None, None, None),
1695 1695 b'symbol': (0, b'symbol', None, None, None),
1696 1696 b'end': (0, None, None, None, None),
1697 1697 }
1698 1698
1699 1699
1700 1700 def _tokenize(text):
1701 1701 view = memoryview(text) # zero-copy slice
1702 1702 special = b'():+-& '
1703 1703 pos = 0
1704 1704 length = len(text)
1705 1705 while pos < length:
1706 1706 symbol = b''.join(
1707 1707 itertools.takewhile(
1708 1708 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1709 1709 )
1710 1710 )
1711 1711 if symbol:
1712 1712 yield (b'symbol', symbol, pos)
1713 1713 pos += len(symbol)
1714 1714 else: # special char, ignore space
1715 1715 if text[pos : pos + 1] != b' ':
1716 1716 yield (text[pos : pos + 1], None, pos)
1717 1717 pos += 1
1718 1718 yield (b'end', None, pos)
1719 1719
1720 1720
1721 1721 def _parse(text):
1722 1722 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1723 1723 if pos != len(text):
1724 1724 raise error.ParseError(b'invalid token', pos)
1725 1725 return tree
1726 1726
1727 1727
1728 1728 def _parsedrev(symbol):
1729 1729 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1730 1730 if symbol.startswith(b'D') and symbol[1:].isdigit():
1731 1731 return int(symbol[1:])
1732 1732 if symbol.isdigit():
1733 1733 return int(symbol)
1734 1734
1735 1735
1736 1736 def _prefetchdrevs(tree):
1737 1737 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1738 1738 drevs = set()
1739 1739 ancestordrevs = set()
1740 1740 op = tree[0]
1741 1741 if op == b'symbol':
1742 1742 r = _parsedrev(tree[1])
1743 1743 if r:
1744 1744 drevs.add(r)
1745 1745 elif op == b'ancestors':
1746 1746 r, a = _prefetchdrevs(tree[1])
1747 1747 drevs.update(r)
1748 1748 ancestordrevs.update(r)
1749 1749 ancestordrevs.update(a)
1750 1750 else:
1751 1751 for t in tree[1:]:
1752 1752 r, a = _prefetchdrevs(t)
1753 1753 drevs.update(r)
1754 1754 ancestordrevs.update(a)
1755 1755 return drevs, ancestordrevs
1756 1756
1757 1757
1758 1758 def querydrev(ui, spec):
1759 1759 """return a list of "Differential Revision" dicts
1760 1760
1761 1761 spec is a string using a simple query language, see docstring in phabread
1762 1762 for details.
1763 1763
1764 1764 A "Differential Revision dict" looks like:
1765 1765
1766 1766 {
1767 1767 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1768 1768 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1769 1769 "auxiliary": {
1770 1770 "phabricator:depends-on": [
1771 1771 "PHID-DREV-gbapp366kutjebt7agcd"
1772 1772 ]
1773 1773 "phabricator:projects": [],
1774 1774 },
1775 1775 "branch": "default",
1776 1776 "ccs": [],
1777 1777 "commits": [],
1778 1778 "dateCreated": "1499181406",
1779 1779 "dateModified": "1499182103",
1780 1780 "diffs": [
1781 1781 "3",
1782 1782 "4",
1783 1783 ],
1784 1784 "hashes": [],
1785 1785 "id": "2",
1786 1786 "lineCount": "2",
1787 1787 "phid": "PHID-DREV-672qvysjcczopag46qty",
1788 1788 "properties": {},
1789 1789 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1790 1790 "reviewers": [],
1791 1791 "sourcePath": null
1792 1792 "status": "0",
1793 1793 "statusName": "Needs Review",
1794 1794 "summary": "",
1795 1795 "testPlan": "",
1796 1796 "title": "example",
1797 1797 "uri": "https://phab.example.com/D2",
1798 1798 }
1799 1799 """
1800 1800 # TODO: replace differential.query and differential.querydiffs with
1801 1801 # differential.diff.search because the former (and their output) are
1802 1802 # frozen, and planned to be deprecated and removed.
1803 1803
1804 1804 def fetch(params):
1805 1805 """params -> single drev or None"""
1806 1806 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1807 1807 if key in prefetched:
1808 1808 return prefetched[key]
1809 1809 drevs = callconduit(ui, b'differential.query', params)
1810 1810 # Fill prefetched with the result
1811 1811 for drev in drevs:
1812 1812 prefetched[drev[b'phid']] = drev
1813 1813 prefetched[int(drev[b'id'])] = drev
1814 1814 if key not in prefetched:
1815 1815 raise error.Abort(
1816 1816 _(b'cannot get Differential Revision %r') % params
1817 1817 )
1818 1818 return prefetched[key]
1819 1819
1820 1820 def getstack(topdrevids):
1821 1821 """given a top, get a stack from the bottom, [id] -> [id]"""
1822 1822 visited = set()
1823 1823 result = []
1824 1824 queue = [{b'ids': [i]} for i in topdrevids]
1825 1825 while queue:
1826 1826 params = queue.pop()
1827 1827 drev = fetch(params)
1828 1828 if drev[b'id'] in visited:
1829 1829 continue
1830 1830 visited.add(drev[b'id'])
1831 1831 result.append(int(drev[b'id']))
1832 1832 auxiliary = drev.get(b'auxiliary', {})
1833 1833 depends = auxiliary.get(b'phabricator:depends-on', [])
1834 1834 for phid in depends:
1835 1835 queue.append({b'phids': [phid]})
1836 1836 result.reverse()
1837 1837 return smartset.baseset(result)
1838 1838
1839 1839 # Initialize prefetch cache
1840 1840 prefetched = {} # {id or phid: drev}
1841 1841
1842 1842 tree = _parse(spec)
1843 1843 drevs, ancestordrevs = _prefetchdrevs(tree)
1844 1844
1845 1845 # developer config: phabricator.batchsize
1846 1846 batchsize = ui.configint(b'phabricator', b'batchsize')
1847 1847
1848 1848 # Prefetch Differential Revisions in batch
1849 1849 tofetch = set(drevs)
1850 1850 for r in ancestordrevs:
1851 1851 tofetch.update(range(max(1, r - batchsize), r + 1))
1852 1852 if drevs:
1853 1853 fetch({b'ids': list(tofetch)})
1854 1854 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1855 1855
1856 1856 # Walk through the tree, return smartsets
1857 1857 def walk(tree):
1858 1858 op = tree[0]
1859 1859 if op == b'symbol':
1860 1860 drev = _parsedrev(tree[1])
1861 1861 if drev:
1862 1862 return smartset.baseset([drev])
1863 1863 elif tree[1] in _knownstatusnames:
1864 1864 drevs = [
1865 1865 r
1866 1866 for r in validids
1867 1867 if _getstatusname(prefetched[r]) == tree[1]
1868 1868 ]
1869 1869 return smartset.baseset(drevs)
1870 1870 else:
1871 1871 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1872 1872 elif op in {b'and_', b'add', b'sub'}:
1873 1873 assert len(tree) == 3
1874 1874 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1875 1875 elif op == b'group':
1876 1876 return walk(tree[1])
1877 1877 elif op == b'ancestors':
1878 1878 return getstack(walk(tree[1]))
1879 1879 else:
1880 1880 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1881 1881
1882 1882 return [prefetched[r] for r in walk(tree)]
1883 1883
1884 1884
1885 1885 def getdescfromdrev(drev):
1886 1886 """get description (commit message) from "Differential Revision"
1887 1887
1888 1888 This is similar to differential.getcommitmessage API. But we only care
1889 1889 about limited fields: title, summary, test plan, and URL.
1890 1890 """
1891 1891 title = drev[b'title']
1892 1892 summary = drev[b'summary'].rstrip()
1893 1893 testplan = drev[b'testPlan'].rstrip()
1894 1894 if testplan:
1895 1895 testplan = b'Test Plan:\n%s' % testplan
1896 1896 uri = b'Differential Revision: %s' % drev[b'uri']
1897 1897 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1898 1898
1899 1899
1900 1900 def get_amended_desc(drev, ctx, folded):
1901 1901 """similar to ``getdescfromdrev``, but supports a folded series of commits
1902 1902
1903 1903 This is used when determining if an individual commit needs to have its
1904 1904 message amended after posting it for review. The determination is made for
1905 1905 each individual commit, even when they were folded into one review.
1906 1906 """
1907 1907 if not folded:
1908 1908 return getdescfromdrev(drev)
1909 1909
1910 1910 uri = b'Differential Revision: %s' % drev[b'uri']
1911 1911
1912 1912 # Since the commit messages were combined when posting multiple commits
1913 1913 # with --fold, the fields can't be read from Phabricator here, or *all*
1914 1914 # affected local revisions will end up with the same commit message after
1915 1915 # the URI is amended in. Append in the DREV line, or update it if it
1916 1916 # exists. At worst, this means commit message or test plan updates on
1917 1917 # Phabricator aren't propagated back to the repository, but that seems
1918 1918 # reasonable for the case where local commits are effectively combined
1919 1919 # in Phabricator.
1920 1920 m = _differentialrevisiondescre.search(ctx.description())
1921 1921 if not m:
1922 1922 return b'\n\n'.join([ctx.description(), uri])
1923 1923
1924 1924 return _differentialrevisiondescre.sub(uri, ctx.description())
1925 1925
1926 1926
1927 1927 def getlocalcommits(diff):
1928 1928 """get the set of local commits from a diff object
1929 1929
1930 1930 See ``getdiffmeta()`` for an example diff object.
1931 1931 """
1932 1932 props = diff.get(b'properties') or {}
1933 1933 commits = props.get(b'local:commits') or {}
1934 1934 if len(commits) > 1:
1935 1935 return {bin(c) for c in commits.keys()}
1936 1936
1937 1937 # Storing the diff metadata predates storing `local:commits`, so continue
1938 1938 # to use that in the --no-fold case.
1939 1939 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1940 1940
1941 1941
1942 1942 def getdiffmeta(diff):
1943 1943 """get commit metadata (date, node, user, p1) from a diff object
1944 1944
1945 1945 The metadata could be "hg:meta", sent by phabsend, like:
1946 1946
1947 1947 "properties": {
1948 1948 "hg:meta": {
1949 1949 "branch": "default",
1950 1950 "date": "1499571514 25200",
1951 1951 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1952 1952 "user": "Foo Bar <foo@example.com>",
1953 1953 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1954 1954 }
1955 1955 }
1956 1956
1957 1957 Or converted from "local:commits", sent by "arc", like:
1958 1958
1959 1959 "properties": {
1960 1960 "local:commits": {
1961 1961 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1962 1962 "author": "Foo Bar",
1963 1963 "authorEmail": "foo@example.com"
1964 1964 "branch": "default",
1965 1965 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1966 1966 "local": "1000",
1967 1967 "message": "...",
1968 1968 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1969 1969 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1970 1970 "summary": "...",
1971 1971 "tag": "",
1972 1972 "time": 1499546314,
1973 1973 }
1974 1974 }
1975 1975 }
1976 1976
1977 1977 Note: metadata extracted from "local:commits" will lose time zone
1978 1978 information.
1979 1979 """
1980 1980 props = diff.get(b'properties') or {}
1981 1981 meta = props.get(b'hg:meta')
1982 1982 if not meta:
1983 1983 if props.get(b'local:commits'):
1984 1984 commit = sorted(props[b'local:commits'].values())[0]
1985 1985 meta = {}
1986 1986 if b'author' in commit and b'authorEmail' in commit:
1987 1987 meta[b'user'] = b'%s <%s>' % (
1988 1988 commit[b'author'],
1989 1989 commit[b'authorEmail'],
1990 1990 )
1991 1991 if b'time' in commit:
1992 1992 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1993 1993 if b'branch' in commit:
1994 1994 meta[b'branch'] = commit[b'branch']
1995 1995 node = commit.get(b'commit', commit.get(b'rev'))
1996 1996 if node:
1997 1997 meta[b'node'] = node
1998 1998 if len(commit.get(b'parents', ())) >= 1:
1999 1999 meta[b'parent'] = commit[b'parents'][0]
2000 2000 else:
2001 2001 meta = {}
2002 2002 if b'date' not in meta and b'dateCreated' in diff:
2003 2003 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2004 2004 if b'branch' not in meta and diff.get(b'branch'):
2005 2005 meta[b'branch'] = diff[b'branch']
2006 2006 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2007 2007 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2008 2008 return meta
2009 2009
2010 2010
2011 2011 def _getdrevs(ui, stack, specs):
2012 2012 """convert user supplied DREVSPECs into "Differential Revision" dicts
2013 2013
2014 2014 See ``hg help phabread`` for how to specify each DREVSPEC.
2015 2015 """
2016 2016 if len(specs) > 0:
2017 2017
2018 2018 def _formatspec(s):
2019 2019 if stack:
2020 2020 s = b':(%s)' % s
2021 2021 return b'(%s)' % s
2022 2022
2023 2023 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2024 2024
2025 2025 drevs = querydrev(ui, spec)
2026 2026 if drevs:
2027 2027 return drevs
2028 2028
2029 2029 raise error.Abort(_(b"empty DREVSPEC set"))
2030 2030
2031 2031
2032 2032 def readpatch(ui, drevs, write):
2033 2033 """generate plain-text patch readable by 'hg import'
2034 2034
2035 2035 write takes a list of (DREV, bytes), where DREV is the differential number
2036 2036 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2037 2037 to be imported. drevs is what "querydrev" returns, results of
2038 2038 "differential.query".
2039 2039 """
2040 2040 # Prefetch hg:meta property for all diffs
2041 2041 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2042 2042 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2043 2043
2044 2044 patches = []
2045 2045
2046 2046 # Generate patch for each drev
2047 2047 for drev in drevs:
2048 2048 ui.note(_(b'reading D%s\n') % drev[b'id'])
2049 2049
2050 2050 diffid = max(int(v) for v in drev[b'diffs'])
2051 2051 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2052 2052 desc = getdescfromdrev(drev)
2053 2053 header = b'# HG changeset patch\n'
2054 2054
2055 2055 # Try to preserve metadata from hg:meta property. Write hg patch
2056 2056 # headers that can be read by the "import" command. See patchheadermap
2057 2057 # and extract in mercurial/patch.py for supported headers.
2058 2058 meta = getdiffmeta(diffs[b'%d' % diffid])
2059 2059 for k in _metanamemap.keys():
2060 2060 if k in meta:
2061 2061 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2062 2062
2063 2063 content = b'%s%s\n%s' % (header, desc, body)
2064 2064 patches.append((drev[b'id'], content))
2065 2065
2066 2066 # Write patches to the supplied callback
2067 2067 write(patches)
2068 2068
2069 2069
2070 2070 @vcrcommand(
2071 2071 b'phabread',
2072 2072 [(b'', b'stack', False, _(b'read dependencies'))],
2073 2073 _(b'DREVSPEC... [OPTIONS]'),
2074 2074 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2075 2075 optionalrepo=True,
2076 2076 )
2077 2077 def phabread(ui, repo, *specs, **opts):
2078 2078 """print patches from Phabricator suitable for importing
2079 2079
2080 2080 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2081 2081 the number ``123``. It could also have common operators like ``+``, ``-``,
2082 2082 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2083 2083 select a stack. If multiple DREVSPEC values are given, the result is the
2084 2084 union of each individually evaluated value. No attempt is currently made
2085 2085 to reorder the values to run from parent to child.
2086 2086
2087 2087 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2088 2088 could be used to filter patches by status. For performance reason, they
2089 2089 only represent a subset of non-status selections and cannot be used alone.
2090 2090
2091 2091 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2092 2092 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2093 2093 stack up to D9.
2094 2094
2095 2095 If --stack is given, follow dependencies information and read all patches.
2096 2096 It is equivalent to the ``:`` operator.
2097 2097 """
2098 2098 opts = pycompat.byteskwargs(opts)
2099 2099 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2100 2100
2101 2101 def _write(patches):
2102 2102 for drev, content in patches:
2103 2103 ui.write(content)
2104 2104
2105 2105 readpatch(ui, drevs, _write)
2106 2106
2107 2107
2108 2108 @vcrcommand(
2109 2109 b'phabimport',
2110 2110 [(b'', b'stack', False, _(b'import dependencies as well'))],
2111 2111 _(b'DREVSPEC... [OPTIONS]'),
2112 2112 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2113 2113 )
2114 2114 def phabimport(ui, repo, *specs, **opts):
2115 2115 """import patches from Phabricator for the specified Differential Revisions
2116 2116
2117 2117 The patches are read and applied starting at the parent of the working
2118 2118 directory.
2119 2119
2120 2120 See ``hg help phabread`` for how to specify DREVSPEC.
2121 2121 """
2122 2122 opts = pycompat.byteskwargs(opts)
2123 2123
2124 2124 # --bypass avoids losing exec and symlink bits when importing on Windows,
2125 2125 # and allows importing with a dirty wdir. It also aborts instead of leaving
2126 2126 # rejects.
2127 2127 opts[b'bypass'] = True
2128 2128
2129 2129 # Mandatory default values, synced with commands.import
2130 2130 opts[b'strip'] = 1
2131 2131 opts[b'prefix'] = b''
2132 2132 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2133 2133 opts[b'obsolete'] = False
2134 2134
2135 2135 if ui.configbool(b'phabimport', b'secret'):
2136 2136 opts[b'secret'] = True
2137 2137 if ui.configbool(b'phabimport', b'obsolete'):
2138 2138 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2139 2139
2140 2140 def _write(patches):
2141 2141 parents = repo[None].parents()
2142 2142
2143 2143 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2144 2144 for drev, contents in patches:
2145 2145 ui.status(_(b'applying patch from D%s\n') % drev)
2146 2146
2147 2147 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2148 2148 msg, node, rej = cmdutil.tryimportone(
2149 2149 ui,
2150 2150 repo,
2151 2151 patchdata,
2152 2152 parents,
2153 2153 opts,
2154 2154 [],
2155 2155 None, # Never update wdir to another revision
2156 2156 )
2157 2157
2158 2158 if not node:
2159 2159 raise error.Abort(_(b'D%s: no diffs found') % drev)
2160 2160
2161 2161 ui.note(msg + b'\n')
2162 2162 parents = [repo[node]]
2163 2163
2164 2164 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2165 2165
2166 2166 readpatch(repo.ui, drevs, _write)
2167 2167
2168 2168
2169 2169 @vcrcommand(
2170 2170 b'phabupdate',
2171 2171 [
2172 2172 (b'', b'accept', False, _(b'accept revisions')),
2173 2173 (b'', b'reject', False, _(b'reject revisions')),
2174 2174 (b'', b'request-review', False, _(b'request review on revisions')),
2175 2175 (b'', b'abandon', False, _(b'abandon revisions')),
2176 2176 (b'', b'reclaim', False, _(b'reclaim revisions')),
2177 2177 (b'', b'close', False, _(b'close revisions')),
2178 2178 (b'', b'reopen', False, _(b'reopen revisions')),
2179 2179 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2180 2180 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2181 2181 (b'', b'commandeer', False, _(b'commandeer revisions')),
2182 2182 (b'm', b'comment', b'', _(b'comment on the last revision')),
2183 2183 ],
2184 2184 _(b'DREVSPEC... [OPTIONS]'),
2185 2185 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2186 2186 optionalrepo=True,
2187 2187 )
2188 2188 def phabupdate(ui, repo, *specs, **opts):
2189 2189 """update Differential Revision in batch
2190 2190
2191 2191 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2192 2192 """
2193 2193 opts = pycompat.byteskwargs(opts)
2194 2194 transactions = [
2195 2195 b'abandon',
2196 2196 b'accept',
2197 2197 b'close',
2198 2198 b'commandeer',
2199 2199 b'plan-changes',
2200 2200 b'reclaim',
2201 2201 b'reject',
2202 2202 b'reopen',
2203 2203 b'request-review',
2204 2204 b'resign',
2205 2205 ]
2206 2206 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2207 2207 if len(flags) > 1:
2208 2208 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2209 2209
2210 2210 actions = []
2211 2211 for f in flags:
2212 2212 actions.append({b'type': f, b'value': True})
2213 2213
2214 2214 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2215 2215 for i, drev in enumerate(drevs):
2216 2216 if i + 1 == len(drevs) and opts.get(b'comment'):
2217 2217 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2218 2218 if actions:
2219 2219 params = {
2220 2220 b'objectIdentifier': drev[b'phid'],
2221 2221 b'transactions': actions,
2222 2222 }
2223 2223 callconduit(ui, b'differential.revision.edit', params)
2224 2224
2225 2225
2226 2226 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2227 2227 def template_review(context, mapping):
2228 2228 """:phabreview: Object describing the review for this changeset.
2229 2229 Has attributes `url` and `id`.
2230 2230 """
2231 2231 ctx = context.resource(mapping, b'ctx')
2232 2232 m = _differentialrevisiondescre.search(ctx.description())
2233 2233 if m:
2234 2234 return templateutil.hybriddict(
2235 2235 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2236 2236 )
2237 2237 else:
2238 2238 tags = ctx.repo().nodetags(ctx.node())
2239 2239 for t in tags:
2240 2240 if _differentialrevisiontagre.match(t):
2241 2241 url = ctx.repo().ui.config(b'phabricator', b'url')
2242 2242 if not url.endswith(b'/'):
2243 2243 url += b'/'
2244 2244 url += t
2245 2245
2246 2246 return templateutil.hybriddict({b'url': url, b'id': t,})
2247 2247 return None
2248 2248
2249 2249
2250 2250 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2251 2251 def template_status(context, mapping):
2252 2252 """:phabstatus: String. Status of Phabricator differential.
2253 2253 """
2254 2254 ctx = context.resource(mapping, b'ctx')
2255 2255 repo = context.resource(mapping, b'repo')
2256 2256 ui = context.resource(mapping, b'ui')
2257 2257
2258 2258 rev = ctx.rev()
2259 2259 try:
2260 2260 drevid = getdrevmap(repo, [rev])[rev]
2261 2261 except KeyError:
2262 2262 return None
2263 2263 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2264 2264 for drev in drevs:
2265 2265 if int(drev[b'id']) == drevid:
2266 2266 return templateutil.hybriddict(
2267 2267 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2268 2268 )
2269 2269 return None
2270 2270
2271 2271
2272 2272 @show.showview(b'phabstatus', csettopic=b'work')
2273 2273 def phabstatusshowview(ui, repo, displayer):
2274 2274 """Phabricator differiential status"""
2275 2275 revs = repo.revs('sort(_underway(), topo)')
2276 2276 drevmap = getdrevmap(repo, revs)
2277 2277 unknownrevs, drevids, revsbydrevid = [], set(), {}
2278 2278 for rev, drevid in pycompat.iteritems(drevmap):
2279 2279 if drevid is not None:
2280 2280 drevids.add(drevid)
2281 2281 revsbydrevid.setdefault(drevid, set()).add(rev)
2282 2282 else:
2283 2283 unknownrevs.append(rev)
2284 2284
2285 2285 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2286 2286 drevsbyrev = {}
2287 2287 for drev in drevs:
2288 2288 for rev in revsbydrevid[int(drev[b'id'])]:
2289 2289 drevsbyrev[rev] = drev
2290 2290
2291 2291 def phabstatus(ctx):
2292 2292 drev = drevsbyrev[ctx.rev()]
2293 2293 status = ui.label(
2294 2294 b'%(statusName)s' % drev,
2295 2295 b'phabricator.status.%s' % _getstatusname(drev),
2296 2296 )
2297 2297 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2298 2298
2299 2299 revs -= smartset.baseset(unknownrevs)
2300 2300 revdag = graphmod.dagwalker(repo, revs)
2301 2301
2302 2302 ui.setconfig(b'experimental', b'graphshorten', True)
2303 2303 displayer._exthook = phabstatus
2304 2304 nodelen = show.longestshortest(repo, revs)
2305 2305 logcmdutil.displaygraph(
2306 2306 ui,
2307 2307 repo,
2308 2308 revdag,
2309 2309 displayer,
2310 2310 graphmod.asciiedges,
2311 2311 props={b'nodelen': nodelen},
2312 2312 )
General Comments 0
You need to be logged in to leave comments. Login now