##// END OF EJS Templates
phabricator: extract the logic to amend diff properties to a function...
Matt Harbison -
r45137:99fa161a default
parent child Browse files
Show More
@@ -1,2055 +1,2062 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.repophid
119 119 eh.configitem(
120 120 b'phabricator', b'repophid', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabricator', b'url', default=None,
124 124 )
125 125 eh.configitem(
126 126 b'phabsend', b'confirm', default=False,
127 127 )
128 128 eh.configitem(
129 129 b'phabimport', b'secret', default=False,
130 130 )
131 131 eh.configitem(
132 132 b'phabimport', b'obsolete', default=False,
133 133 )
134 134
135 135 colortable = {
136 136 b'phabricator.action.created': b'green',
137 137 b'phabricator.action.skipped': b'magenta',
138 138 b'phabricator.action.updated': b'magenta',
139 139 b'phabricator.desc': b'',
140 140 b'phabricator.drev': b'bold',
141 141 b'phabricator.node': b'',
142 142 b'phabricator.status.abandoned': b'magenta dim',
143 143 b'phabricator.status.accepted': b'green bold',
144 144 b'phabricator.status.closed': b'green',
145 145 b'phabricator.status.needsreview': b'yellow',
146 146 b'phabricator.status.needsrevision': b'red',
147 147 b'phabricator.status.changesplanned': b'red',
148 148 }
149 149
150 150 _VCR_FLAGS = [
151 151 (
152 152 b'',
153 153 b'test-vcr',
154 154 b'',
155 155 _(
156 156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 157 b', otherwise will mock all http requests using the specified vcr file.'
158 158 b' (ADVANCED)'
159 159 ),
160 160 ),
161 161 ]
162 162
163 163
164 164 @eh.wrapfunction(localrepo, "loadhgrc")
165 165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 167 """
168 168 result = False
169 169 arcconfig = {}
170 170
171 171 try:
172 172 # json.loads only accepts bytes from 3.6+
173 173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 174 # json.loads only returns unicode strings
175 175 arcconfig = pycompat.rapply(
176 176 lambda x: encoding.unitolocal(x)
177 177 if isinstance(x, pycompat.unicode)
178 178 else x,
179 179 pycompat.json_loads(rawparams),
180 180 )
181 181
182 182 result = True
183 183 except ValueError:
184 184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 185 except IOError:
186 186 pass
187 187
188 188 cfg = util.sortdict()
189 189
190 190 if b"repository.callsign" in arcconfig:
191 191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192 192
193 193 if b"phabricator.uri" in arcconfig:
194 194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195 195
196 196 if cfg:
197 197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198 198
199 199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200 200
201 201
202 202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 203 fullflags = flags + _VCR_FLAGS
204 204
205 205 def hgmatcher(r1, r2):
206 206 if r1.uri != r2.uri or r1.method != r2.method:
207 207 return False
208 208 r1params = util.urlreq.parseqs(r1.body)
209 209 r2params = util.urlreq.parseqs(r2.body)
210 210 for key in r1params:
211 211 if key not in r2params:
212 212 return False
213 213 value = r1params[key][0]
214 214 # we want to compare json payloads without worrying about ordering
215 215 if value.startswith(b'{') and value.endswith(b'}'):
216 216 r1json = pycompat.json_loads(value)
217 217 r2json = pycompat.json_loads(r2params[key][0])
218 218 if r1json != r2json:
219 219 return False
220 220 elif r2params[key][0] != value:
221 221 return False
222 222 return True
223 223
224 224 def sanitiserequest(request):
225 225 request.body = re.sub(
226 226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 227 )
228 228 return request
229 229
230 230 def sanitiseresponse(response):
231 231 if 'set-cookie' in response['headers']:
232 232 del response['headers']['set-cookie']
233 233 return response
234 234
235 235 def decorate(fn):
236 236 def inner(*args, **kwargs):
237 237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 238 if cassette:
239 239 import hgdemandimport
240 240
241 241 with hgdemandimport.deactivated():
242 242 import vcr as vcrmod
243 243 import vcr.stubs as stubs
244 244
245 245 vcr = vcrmod.VCR(
246 246 serializer='json',
247 247 before_record_request=sanitiserequest,
248 248 before_record_response=sanitiseresponse,
249 249 custom_patches=[
250 250 (
251 251 urlmod,
252 252 'httpconnection',
253 253 stubs.VCRHTTPConnection,
254 254 ),
255 255 (
256 256 urlmod,
257 257 'httpsconnection',
258 258 stubs.VCRHTTPSConnection,
259 259 ),
260 260 ],
261 261 )
262 262 vcr.register_matcher('hgmatcher', hgmatcher)
263 263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 264 return fn(*args, **kwargs)
265 265 return fn(*args, **kwargs)
266 266
267 267 cmd = util.checksignature(inner, depth=2)
268 268 cmd.__name__ = fn.__name__
269 269 cmd.__doc__ = fn.__doc__
270 270
271 271 return command(
272 272 name,
273 273 fullflags,
274 274 spec,
275 275 helpcategory=helpcategory,
276 276 optionalrepo=optionalrepo,
277 277 )(cmd)
278 278
279 279 return decorate
280 280
281 281
282 282 def urlencodenested(params):
283 283 """like urlencode, but works with nested parameters.
284 284
285 285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 288 """
289 289 flatparams = util.sortdict()
290 290
291 291 def process(prefix, obj):
292 292 if isinstance(obj, bool):
293 293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 296 if items is None:
297 297 flatparams[prefix] = obj
298 298 else:
299 299 for k, v in items(obj):
300 300 if prefix:
301 301 process(b'%s[%s]' % (prefix, k), v)
302 302 else:
303 303 process(k, v)
304 304
305 305 process(b'', params)
306 306 return util.urlreq.urlencode(flatparams)
307 307
308 308
309 309 def readurltoken(ui):
310 310 """return conduit url, token and make sure they exist
311 311
312 312 Currently read from [auth] config section. In the future, it might
313 313 make sense to read from .arcconfig and .arcrc as well.
314 314 """
315 315 url = ui.config(b'phabricator', b'url')
316 316 if not url:
317 317 raise error.Abort(
318 318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 319 )
320 320
321 321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 322 token = None
323 323
324 324 if res:
325 325 group, auth = res
326 326
327 327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328 328
329 329 token = auth.get(b'phabtoken')
330 330
331 331 if not token:
332 332 raise error.Abort(
333 333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 334 )
335 335
336 336 return url, token
337 337
338 338
339 339 def callconduit(ui, name, params):
340 340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 341 host, token = readurltoken(ui)
342 342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 344 params = params.copy()
345 345 params[b'__conduit__'] = {
346 346 b'token': token,
347 347 }
348 348 rawdata = {
349 349 b'params': templatefilters.json(params),
350 350 b'output': b'json',
351 351 b'__conduit__': 1,
352 352 }
353 353 data = urlencodenested(rawdata)
354 354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 355 if curlcmd:
356 356 sin, sout = procutil.popen2(
357 357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 358 )
359 359 sin.write(data)
360 360 sin.close()
361 361 body = sout.read()
362 362 else:
363 363 urlopener = urlmod.opener(ui, authinfo)
364 364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 365 with contextlib.closing(urlopener.open(request)) as rsp:
366 366 body = rsp.read()
367 367 ui.debug(b'Conduit Response: %s\n' % body)
368 368 parsed = pycompat.rapply(
369 369 lambda x: encoding.unitolocal(x)
370 370 if isinstance(x, pycompat.unicode)
371 371 else x,
372 372 # json.loads only accepts bytes from py3.6+
373 373 pycompat.json_loads(encoding.unifromlocal(body)),
374 374 )
375 375 if parsed.get(b'error_code'):
376 376 msg = _(b'Conduit Error (%s): %s') % (
377 377 parsed[b'error_code'],
378 378 parsed[b'error_info'],
379 379 )
380 380 raise error.Abort(msg)
381 381 return parsed[b'result']
382 382
383 383
384 384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 385 def debugcallconduit(ui, repo, name):
386 386 """call Conduit API
387 387
388 388 Call parameters are read from stdin as a JSON blob. Result will be written
389 389 to stdout as a JSON blob.
390 390 """
391 391 # json.loads only accepts bytes from 3.6+
392 392 rawparams = encoding.unifromlocal(ui.fin.read())
393 393 # json.loads only returns unicode strings
394 394 params = pycompat.rapply(
395 395 lambda x: encoding.unitolocal(x)
396 396 if isinstance(x, pycompat.unicode)
397 397 else x,
398 398 pycompat.json_loads(rawparams),
399 399 )
400 400 # json.dumps only accepts unicode strings
401 401 result = pycompat.rapply(
402 402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 403 callconduit(ui, name, params),
404 404 )
405 405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 406 ui.write(b'%s\n' % encoding.unitolocal(s))
407 407
408 408
409 409 def getrepophid(repo):
410 410 """given callsign, return repository PHID or None"""
411 411 # developer config: phabricator.repophid
412 412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 413 if repophid:
414 414 return repophid
415 415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 416 if not callsign:
417 417 return None
418 418 query = callconduit(
419 419 repo.ui,
420 420 b'diffusion.repository.search',
421 421 {b'constraints': {b'callsigns': [callsign]}},
422 422 )
423 423 if len(query[b'data']) == 0:
424 424 return None
425 425 repophid = query[b'data'][0][b'phid']
426 426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 427 return repophid
428 428
429 429
430 430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 431 _differentialrevisiondescre = re.compile(
432 432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 433 )
434 434
435 435
436 436 def getoldnodedrevmap(repo, nodelist):
437 437 """find previous nodes that has been sent to Phabricator
438 438
439 439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 440 for node in nodelist with known previous sent versions, or associated
441 441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 442 be ``None``.
443 443
444 444 Examines commit messages like "Differential Revision:" to get the
445 445 association information.
446 446
447 447 If such commit message line is not found, examines all precursors and their
448 448 tags. Tags with format like "D1234" are considered a match and the node
449 449 with that tag, and the number after "D" (ex. 1234) will be returned.
450 450
451 451 The ``old node``, if not None, is guaranteed to be the last diff of
452 452 corresponding Differential Revision, and exist in the repo.
453 453 """
454 454 unfi = repo.unfiltered()
455 455 has_node = unfi.changelog.index.has_node
456 456
457 457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 459 for node in nodelist:
460 460 ctx = unfi[node]
461 461 # For tags like "D123", put them into "toconfirm" to verify later
462 462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 463 for n in precnodes:
464 464 if has_node(n):
465 465 for tag in unfi.nodetags(n):
466 466 m = _differentialrevisiontagre.match(tag)
467 467 if m:
468 468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 469 break
470 470 else:
471 471 continue # move to next predecessor
472 472 break # found a tag, stop
473 473 else:
474 474 # Check commit message
475 475 m = _differentialrevisiondescre.search(ctx.description())
476 476 if m:
477 477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478 478
479 479 # Double check if tags are genuine by collecting all old nodes from
480 480 # Phabricator, and expect precursors overlap with it.
481 481 if toconfirm:
482 482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 483 alldiffs = callconduit(
484 484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 485 )
486 486
487 487 def getnodes(d, precset):
488 488 # Ignore other nodes that were combined into the Differential
489 489 # that aren't predecessors of the current local node.
490 490 return [n for n in getlocalcommits(d) if n in precset]
491 491
492 492 for newnode, (force, precset, drev) in toconfirm.items():
493 493 diffs = [
494 494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
495 495 ]
496 496
497 497 # local predecessors known by Phabricator
498 498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
499 499
500 500 # Ignore if precursors (Phabricator and local repo) do not overlap,
501 501 # and force is not set (when commit message says nothing)
502 502 if not force and not phprecset:
503 503 tagname = b'D%d' % drev
504 504 tags.tag(
505 505 repo,
506 506 tagname,
507 507 nullid,
508 508 message=None,
509 509 user=None,
510 510 date=None,
511 511 local=True,
512 512 )
513 513 unfi.ui.warn(
514 514 _(
515 515 b'D%d: local tag removed - does not match '
516 516 b'Differential history\n'
517 517 )
518 518 % drev
519 519 )
520 520 continue
521 521
522 522 # Find the last node using Phabricator metadata, and make sure it
523 523 # exists in the repo
524 524 oldnode = lastdiff = None
525 525 if diffs:
526 526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
527 527 oldnodes = getnodes(lastdiff, precset)
528 528
529 529 # If this commit was the result of `hg fold` after submission,
530 530 # and now resubmitted with --fold, the easiest thing to do is
531 531 # to leave the node clear. This only results in creating a new
532 532 # diff for the _same_ Differential Revision if this commit is
533 533 # the first or last in the selected range.
534 534 # If this commit is the result of `hg split` in the same
535 535 # scenario, there is a single oldnode here (and multiple
536 536 # newnodes mapped to it). That makes it the same as the normal
537 537 # case, as the edges of the newnode range cleanly maps to one
538 538 # oldnode each.
539 539 if len(oldnodes) == 1:
540 540 oldnode = oldnodes[0]
541 541 if oldnode and not has_node(oldnode):
542 542 oldnode = None
543 543
544 544 result[newnode] = (oldnode, lastdiff, drev)
545 545
546 546 return result
547 547
548 548
549 549 def getdrevmap(repo, revs):
550 550 """Return a dict mapping each rev in `revs` to their Differential Revision
551 551 ID or None.
552 552 """
553 553 result = {}
554 554 for rev in revs:
555 555 result[rev] = None
556 556 ctx = repo[rev]
557 557 # Check commit message
558 558 m = _differentialrevisiondescre.search(ctx.description())
559 559 if m:
560 560 result[rev] = int(m.group('id'))
561 561 continue
562 562 # Check tags
563 563 for tag in repo.nodetags(ctx.node()):
564 564 m = _differentialrevisiontagre.match(tag)
565 565 if m:
566 566 result[rev] = int(m.group(1))
567 567 break
568 568
569 569 return result
570 570
571 571
572 572 def getdiff(basectx, ctx, diffopts):
573 573 """plain-text diff without header (user, commit message, etc)"""
574 574 output = util.stringio()
575 575 for chunk, _label in patch.diffui(
576 576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
577 577 ):
578 578 output.write(chunk)
579 579 return output.getvalue()
580 580
581 581
582 582 class DiffChangeType(object):
583 583 ADD = 1
584 584 CHANGE = 2
585 585 DELETE = 3
586 586 MOVE_AWAY = 4
587 587 COPY_AWAY = 5
588 588 MOVE_HERE = 6
589 589 COPY_HERE = 7
590 590 MULTICOPY = 8
591 591
592 592
593 593 class DiffFileType(object):
594 594 TEXT = 1
595 595 IMAGE = 2
596 596 BINARY = 3
597 597
598 598
599 599 @attr.s
600 600 class phabhunk(dict):
601 601 """Represents a Differential hunk, which is owned by a Differential change
602 602 """
603 603
604 604 oldOffset = attr.ib(default=0) # camelcase-required
605 605 oldLength = attr.ib(default=0) # camelcase-required
606 606 newOffset = attr.ib(default=0) # camelcase-required
607 607 newLength = attr.ib(default=0) # camelcase-required
608 608 corpus = attr.ib(default='')
609 609 # These get added to the phabchange's equivalents
610 610 addLines = attr.ib(default=0) # camelcase-required
611 611 delLines = attr.ib(default=0) # camelcase-required
612 612
613 613
614 614 @attr.s
615 615 class phabchange(object):
616 616 """Represents a Differential change, owns Differential hunks and owned by a
617 617 Differential diff. Each one represents one file in a diff.
618 618 """
619 619
620 620 currentPath = attr.ib(default=None) # camelcase-required
621 621 oldPath = attr.ib(default=None) # camelcase-required
622 622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
623 623 metadata = attr.ib(default=attr.Factory(dict))
624 624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
625 625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
626 626 type = attr.ib(default=DiffChangeType.CHANGE)
627 627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
628 628 commitHash = attr.ib(default=None) # camelcase-required
629 629 addLines = attr.ib(default=0) # camelcase-required
630 630 delLines = attr.ib(default=0) # camelcase-required
631 631 hunks = attr.ib(default=attr.Factory(list))
632 632
633 633 def copynewmetadatatoold(self):
634 634 for key in list(self.metadata.keys()):
635 635 newkey = key.replace(b'new:', b'old:')
636 636 self.metadata[newkey] = self.metadata[key]
637 637
638 638 def addoldmode(self, value):
639 639 self.oldProperties[b'unix:filemode'] = value
640 640
641 641 def addnewmode(self, value):
642 642 self.newProperties[b'unix:filemode'] = value
643 643
644 644 def addhunk(self, hunk):
645 645 if not isinstance(hunk, phabhunk):
646 646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
647 647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
648 648 # It's useful to include these stats since the Phab web UI shows them,
649 649 # and uses them to estimate how large a change a Revision is. Also used
650 650 # in email subjects for the [+++--] bit.
651 651 self.addLines += hunk.addLines
652 652 self.delLines += hunk.delLines
653 653
654 654
655 655 @attr.s
656 656 class phabdiff(object):
657 657 """Represents a Differential diff, owns Differential changes. Corresponds
658 658 to a commit.
659 659 """
660 660
661 661 # Doesn't seem to be any reason to send this (output of uname -n)
662 662 sourceMachine = attr.ib(default=b'') # camelcase-required
663 663 sourcePath = attr.ib(default=b'/') # camelcase-required
664 664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
665 665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
666 666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
667 667 branch = attr.ib(default=b'default')
668 668 bookmark = attr.ib(default=None)
669 669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
670 670 lintStatus = attr.ib(default=b'none') # camelcase-required
671 671 unitStatus = attr.ib(default=b'none') # camelcase-required
672 672 changes = attr.ib(default=attr.Factory(dict))
673 673 repositoryPHID = attr.ib(default=None) # camelcase-required
674 674
675 675 def addchange(self, change):
676 676 if not isinstance(change, phabchange):
677 677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
678 678 self.changes[change.currentPath] = pycompat.byteskwargs(
679 679 attr.asdict(change)
680 680 )
681 681
682 682
683 683 def maketext(pchange, basectx, ctx, fname):
684 684 """populate the phabchange for a text file"""
685 685 repo = ctx.repo()
686 686 fmatcher = match.exact([fname])
687 687 diffopts = mdiff.diffopts(git=True, context=32767)
688 688 _pfctx, _fctx, header, fhunks = next(
689 689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
690 690 )
691 691
692 692 for fhunk in fhunks:
693 693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
694 694 corpus = b''.join(lines[1:])
695 695 shunk = list(header)
696 696 shunk.extend(lines)
697 697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
698 698 patch.diffstatdata(util.iterlines(shunk))
699 699 )
700 700 pchange.addhunk(
701 701 phabhunk(
702 702 oldOffset,
703 703 oldLength,
704 704 newOffset,
705 705 newLength,
706 706 corpus,
707 707 addLines,
708 708 delLines,
709 709 )
710 710 )
711 711
712 712
713 713 def uploadchunks(fctx, fphid):
714 714 """upload large binary files as separate chunks.
715 715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
716 716 """
717 717 ui = fctx.repo().ui
718 718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
719 719 with ui.makeprogress(
720 720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
721 721 ) as progress:
722 722 for chunk in chunks:
723 723 progress.increment()
724 724 if chunk[b'complete']:
725 725 continue
726 726 bstart = int(chunk[b'byteStart'])
727 727 bend = int(chunk[b'byteEnd'])
728 728 callconduit(
729 729 ui,
730 730 b'file.uploadchunk',
731 731 {
732 732 b'filePHID': fphid,
733 733 b'byteStart': bstart,
734 734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
735 735 b'dataEncoding': b'base64',
736 736 },
737 737 )
738 738
739 739
740 740 def uploadfile(fctx):
741 741 """upload binary files to Phabricator"""
742 742 repo = fctx.repo()
743 743 ui = repo.ui
744 744 fname = fctx.path()
745 745 size = fctx.size()
746 746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
747 747
748 748 # an allocate call is required first to see if an upload is even required
749 749 # (Phab might already have it) and to determine if chunking is needed
750 750 allocateparams = {
751 751 b'name': fname,
752 752 b'contentLength': size,
753 753 b'contentHash': fhash,
754 754 }
755 755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
756 756 fphid = filealloc[b'filePHID']
757 757
758 758 if filealloc[b'upload']:
759 759 ui.write(_(b'uploading %s\n') % bytes(fctx))
760 760 if not fphid:
761 761 uploadparams = {
762 762 b'name': fname,
763 763 b'data_base64': base64.b64encode(fctx.data()),
764 764 }
765 765 fphid = callconduit(ui, b'file.upload', uploadparams)
766 766 else:
767 767 uploadchunks(fctx, fphid)
768 768 else:
769 769 ui.debug(b'server already has %s\n' % bytes(fctx))
770 770
771 771 if not fphid:
772 772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
773 773
774 774 return fphid
775 775
776 776
777 777 def addoldbinary(pchange, oldfctx, fctx):
778 778 """add the metadata for the previous version of a binary file to the
779 779 phabchange for the new version
780 780
781 781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
782 782 version of the file, or None if the file is being removed.
783 783 """
784 784 if not fctx or fctx.cmp(oldfctx):
785 785 # Files differ, add the old one
786 786 pchange.metadata[b'old:file:size'] = oldfctx.size()
787 787 mimeguess, _enc = mimetypes.guess_type(
788 788 encoding.unifromlocal(oldfctx.path())
789 789 )
790 790 if mimeguess:
791 791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
792 792 mimeguess
793 793 )
794 794 fphid = uploadfile(oldfctx)
795 795 pchange.metadata[b'old:binary-phid'] = fphid
796 796 else:
797 797 # If it's left as IMAGE/BINARY web UI might try to display it
798 798 pchange.fileType = DiffFileType.TEXT
799 799 pchange.copynewmetadatatoold()
800 800
801 801
802 802 def makebinary(pchange, fctx):
803 803 """populate the phabchange for a binary file"""
804 804 pchange.fileType = DiffFileType.BINARY
805 805 fphid = uploadfile(fctx)
806 806 pchange.metadata[b'new:binary-phid'] = fphid
807 807 pchange.metadata[b'new:file:size'] = fctx.size()
808 808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
809 809 if mimeguess:
810 810 mimeguess = pycompat.bytestr(mimeguess)
811 811 pchange.metadata[b'new:file:mime-type'] = mimeguess
812 812 if mimeguess.startswith(b'image/'):
813 813 pchange.fileType = DiffFileType.IMAGE
814 814
815 815
816 816 # Copied from mercurial/patch.py
817 817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
818 818
819 819
820 820 def notutf8(fctx):
821 821 """detect non-UTF-8 text files since Phabricator requires them to be marked
822 822 as binary
823 823 """
824 824 try:
825 825 fctx.data().decode('utf-8')
826 826 return False
827 827 except UnicodeDecodeError:
828 828 fctx.repo().ui.write(
829 829 _(b'file %s detected as non-UTF-8, marked as binary\n')
830 830 % fctx.path()
831 831 )
832 832 return True
833 833
834 834
835 835 def addremoved(pdiff, basectx, ctx, removed):
836 836 """add removed files to the phabdiff. Shouldn't include moves"""
837 837 for fname in removed:
838 838 pchange = phabchange(
839 839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
840 840 )
841 841 oldfctx = basectx.p1()[fname]
842 842 pchange.addoldmode(gitmode[oldfctx.flags()])
843 843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
844 844 maketext(pchange, basectx, ctx, fname)
845 845
846 846 pdiff.addchange(pchange)
847 847
848 848
849 849 def addmodified(pdiff, basectx, ctx, modified):
850 850 """add modified files to the phabdiff"""
851 851 for fname in modified:
852 852 fctx = ctx[fname]
853 853 oldfctx = basectx.p1()[fname]
854 854 pchange = phabchange(currentPath=fname, oldPath=fname)
855 855 filemode = gitmode[fctx.flags()]
856 856 originalmode = gitmode[oldfctx.flags()]
857 857 if filemode != originalmode:
858 858 pchange.addoldmode(originalmode)
859 859 pchange.addnewmode(filemode)
860 860
861 861 if (
862 862 fctx.isbinary()
863 863 or notutf8(fctx)
864 864 or oldfctx.isbinary()
865 865 or notutf8(oldfctx)
866 866 ):
867 867 makebinary(pchange, fctx)
868 868 addoldbinary(pchange, oldfctx, fctx)
869 869 else:
870 870 maketext(pchange, basectx, ctx, fname)
871 871
872 872 pdiff.addchange(pchange)
873 873
874 874
875 875 def addadded(pdiff, basectx, ctx, added, removed):
876 876 """add file adds to the phabdiff, both new files and copies/moves"""
877 877 # Keep track of files that've been recorded as moved/copied, so if there are
878 878 # additional copies we can mark them (moves get removed from removed)
879 879 copiedchanges = {}
880 880 movedchanges = {}
881 881
882 882 copy = {}
883 883 if basectx != ctx:
884 884 copy = copies.pathcopies(basectx.p1(), ctx)
885 885
886 886 for fname in added:
887 887 fctx = ctx[fname]
888 888 oldfctx = None
889 889 pchange = phabchange(currentPath=fname)
890 890
891 891 filemode = gitmode[fctx.flags()]
892 892
893 893 if copy:
894 894 originalfname = copy.get(fname, fname)
895 895 else:
896 896 originalfname = fname
897 897 if fctx.renamed():
898 898 originalfname = fctx.renamed()[0]
899 899
900 900 renamed = fname != originalfname
901 901
902 902 if renamed:
903 903 oldfctx = basectx.p1()[originalfname]
904 904 originalmode = gitmode[oldfctx.flags()]
905 905 pchange.oldPath = originalfname
906 906
907 907 if originalfname in removed:
908 908 origpchange = phabchange(
909 909 currentPath=originalfname,
910 910 oldPath=originalfname,
911 911 type=DiffChangeType.MOVE_AWAY,
912 912 awayPaths=[fname],
913 913 )
914 914 movedchanges[originalfname] = origpchange
915 915 removed.remove(originalfname)
916 916 pchange.type = DiffChangeType.MOVE_HERE
917 917 elif originalfname in movedchanges:
918 918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
919 919 movedchanges[originalfname].awayPaths.append(fname)
920 920 pchange.type = DiffChangeType.COPY_HERE
921 921 else: # pure copy
922 922 if originalfname not in copiedchanges:
923 923 origpchange = phabchange(
924 924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
925 925 )
926 926 copiedchanges[originalfname] = origpchange
927 927 else:
928 928 origpchange = copiedchanges[originalfname]
929 929 origpchange.awayPaths.append(fname)
930 930 pchange.type = DiffChangeType.COPY_HERE
931 931
932 932 if filemode != originalmode:
933 933 pchange.addoldmode(originalmode)
934 934 pchange.addnewmode(filemode)
935 935 else: # Brand-new file
936 936 pchange.addnewmode(gitmode[fctx.flags()])
937 937 pchange.type = DiffChangeType.ADD
938 938
939 939 if (
940 940 fctx.isbinary()
941 941 or notutf8(fctx)
942 942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
943 943 ):
944 944 makebinary(pchange, fctx)
945 945 if renamed:
946 946 addoldbinary(pchange, oldfctx, fctx)
947 947 else:
948 948 maketext(pchange, basectx, ctx, fname)
949 949
950 950 pdiff.addchange(pchange)
951 951
952 952 for _path, copiedchange in copiedchanges.items():
953 953 pdiff.addchange(copiedchange)
954 954 for _path, movedchange in movedchanges.items():
955 955 pdiff.addchange(movedchange)
956 956
957 957
958 958 def creatediff(basectx, ctx):
959 959 """create a Differential Diff"""
960 960 repo = ctx.repo()
961 961 repophid = getrepophid(repo)
962 962 # Create a "Differential Diff" via "differential.creatediff" API
963 963 pdiff = phabdiff(
964 964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
965 965 branch=b'%s' % ctx.branch(),
966 966 )
967 967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
968 968 # addadded will remove moved files from removed, so addremoved won't get
969 969 # them
970 970 addadded(pdiff, basectx, ctx, added, removed)
971 971 addmodified(pdiff, basectx, ctx, modified)
972 972 addremoved(pdiff, basectx, ctx, removed)
973 973 if repophid:
974 974 pdiff.repositoryPHID = repophid
975 975 diff = callconduit(
976 976 repo.ui,
977 977 b'differential.creatediff',
978 978 pycompat.byteskwargs(attr.asdict(pdiff)),
979 979 )
980 980 if not diff:
981 981 if basectx != ctx:
982 982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
983 983 else:
984 984 msg = _(b'cannot create diff for %s') % ctx
985 985 raise error.Abort(msg)
986 986 return diff
987 987
988 988
989 989 def writediffproperties(ctxs, diff):
990 990 """write metadata to diff so patches could be applied losslessly
991 991
992 992 ``ctxs`` is the list of commits that created the diff, in ascending order.
993 993 The list is generally a single commit, but may be several when using
994 994 ``phabsend --fold``.
995 995 """
996 996 # creatediff returns with a diffid but query returns with an id
997 997 diffid = diff.get(b'diffid', diff.get(b'id'))
998 998 basectx = ctxs[0]
999 999 tipctx = ctxs[-1]
1000 1000
1001 1001 params = {
1002 1002 b'diff_id': diffid,
1003 1003 b'name': b'hg:meta',
1004 1004 b'data': templatefilters.json(
1005 1005 {
1006 1006 b'user': tipctx.user(),
1007 1007 b'date': b'%d %d' % tipctx.date(),
1008 1008 b'branch': tipctx.branch(),
1009 1009 b'node': tipctx.hex(),
1010 1010 b'parent': basectx.p1().hex(),
1011 1011 }
1012 1012 ),
1013 1013 }
1014 1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1015 1015
1016 1016 commits = {}
1017 1017 for ctx in ctxs:
1018 1018 commits[ctx.hex()] = {
1019 1019 b'author': stringutil.person(ctx.user()),
1020 1020 b'authorEmail': stringutil.email(ctx.user()),
1021 1021 b'time': int(ctx.date()[0]),
1022 1022 b'commit': ctx.hex(),
1023 1023 b'parents': [ctx.p1().hex()],
1024 1024 b'branch': ctx.branch(),
1025 1025 }
1026 1026 params = {
1027 1027 b'diff_id': diffid,
1028 1028 b'name': b'local:commits',
1029 1029 b'data': templatefilters.json(commits),
1030 1030 }
1031 1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1032 1032
1033 1033
1034 1034 def createdifferentialrevision(
1035 1035 ctxs,
1036 1036 revid=None,
1037 1037 parentrevphid=None,
1038 1038 oldbasenode=None,
1039 1039 oldnode=None,
1040 1040 olddiff=None,
1041 1041 actions=None,
1042 1042 comment=None,
1043 1043 ):
1044 1044 """create or update a Differential Revision
1045 1045
1046 1046 If revid is None, create a new Differential Revision, otherwise update
1047 1047 revid. If parentrevphid is not None, set it as a dependency.
1048 1048
1049 1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1050 1050 be a list of that single context. Otherwise, it is a list that covers the
1051 1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1052 1052 to include and ``ctxs[-1]`` is the last.
1053 1053
1054 1054 If oldnode is not None, check if the patch content (without commit message
1055 1055 and metadata) has changed before creating another diff. For a Revision with
1056 1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1057 1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1058 1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1059 1059 corresponds to ``ctxs[-1]``.
1060 1060
1061 1061 If actions is not None, they will be appended to the transaction.
1062 1062 """
1063 1063 ctx = ctxs[-1]
1064 1064 basectx = ctxs[0]
1065 1065
1066 1066 repo = ctx.repo()
1067 1067 if oldnode:
1068 1068 diffopts = mdiff.diffopts(git=True, context=32767)
1069 1069 unfi = repo.unfiltered()
1070 1070 oldctx = unfi[oldnode]
1071 1071 oldbasectx = unfi[oldbasenode]
1072 1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1073 1073 oldbasectx, oldctx, diffopts
1074 1074 )
1075 1075 else:
1076 1076 neednewdiff = True
1077 1077
1078 1078 transactions = []
1079 1079 if neednewdiff:
1080 1080 diff = creatediff(basectx, ctx)
1081 1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1082 1082 if comment:
1083 1083 transactions.append({b'type': b'comment', b'value': comment})
1084 1084 else:
1085 1085 # Even if we don't need to upload a new diff because the patch content
1086 1086 # does not change. We might still need to update its metadata so
1087 1087 # pushers could know the correct node metadata.
1088 1088 assert olddiff
1089 1089 diff = olddiff
1090 1090 writediffproperties(ctxs, diff)
1091 1091
1092 1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1093 1093 if parentrevphid:
1094 1094 transactions.append(
1095 1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1096 1096 )
1097 1097
1098 1098 if actions:
1099 1099 transactions += actions
1100 1100
1101 1101 # When folding multiple local commits into a single review, arcanist will
1102 1102 # take the summary line of the first commit as the title, and then
1103 1103 # concatenate the rest of the remaining messages (including each of their
1104 1104 # first lines) to the rest of the first commit message (each separated by
1105 1105 # an empty line), and use that as the summary field. Do the same here.
1106 1106 # For commits with only a one line message, there is no summary field, as
1107 1107 # this gets assigned to the title.
1108 1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1109 1109
1110 1110 for i, _ctx in enumerate(ctxs):
1111 1111 # Parse commit message and update related fields.
1112 1112 desc = _ctx.description()
1113 1113 info = callconduit(
1114 1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1115 1115 )
1116 1116
1117 1117 for k in [b'title', b'summary', b'testPlan']:
1118 1118 v = info[b'fields'].get(k)
1119 1119 if not v:
1120 1120 continue
1121 1121
1122 1122 if i == 0:
1123 1123 # Title, summary and test plan (if present) are taken verbatim
1124 1124 # for the first commit.
1125 1125 fields[k] = v.rstrip()
1126 1126 continue
1127 1127 elif k == b'title':
1128 1128 # Add subsequent titles (i.e. the first line of the commit
1129 1129 # message) back to the summary.
1130 1130 k = b'summary'
1131 1131
1132 1132 # Append any current field to the existing composite field
1133 1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1134 1134
1135 1135 for k, v in fields.items():
1136 1136 transactions.append({b'type': k, b'value': v})
1137 1137
1138 1138 params = {b'transactions': transactions}
1139 1139 if revid is not None:
1140 1140 # Update an existing Differential Revision
1141 1141 params[b'objectIdentifier'] = revid
1142 1142
1143 1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1144 1144 if not revision:
1145 1145 if len(ctxs) == 1:
1146 1146 msg = _(b'cannot create revision for %s') % ctx
1147 1147 else:
1148 1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1149 1149 raise error.Abort(msg)
1150 1150
1151 1151 return revision, diff
1152 1152
1153 1153
1154 1154 def userphids(ui, names):
1155 1155 """convert user names to PHIDs"""
1156 1156 names = [name.lower() for name in names]
1157 1157 query = {b'constraints': {b'usernames': names}}
1158 1158 result = callconduit(ui, b'user.search', query)
1159 1159 # username not found is not an error of the API. So check if we have missed
1160 1160 # some names here.
1161 1161 data = result[b'data']
1162 1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1163 1163 unresolved = set(names) - resolved
1164 1164 if unresolved:
1165 1165 raise error.Abort(
1166 1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1167 1167 )
1168 1168 return [entry[b'phid'] for entry in data]
1169 1169
1170 1170
1171 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1172 """update the local commit list for the ``diff`` associated with ``drevid``
1173
1174 This is a utility function for the amend phase of ``phabsend``, which
1175 converts failures to warning messages.
1176 """
1177 try:
1178 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1179 except util.urlerr.urlerror:
1180 # If it fails just warn and keep going, otherwise the DREV
1181 # associations will be lost
1182 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1183
1184
1171 1185 @vcrcommand(
1172 1186 b'phabsend',
1173 1187 [
1174 1188 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1175 1189 (b'', b'amend', True, _(b'update commit messages')),
1176 1190 (b'', b'reviewer', [], _(b'specify reviewers')),
1177 1191 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1178 1192 (
1179 1193 b'm',
1180 1194 b'comment',
1181 1195 b'',
1182 1196 _(b'add a comment to Revisions with new/updated Diffs'),
1183 1197 ),
1184 1198 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1185 1199 ],
1186 1200 _(b'REV [OPTIONS]'),
1187 1201 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1188 1202 )
1189 1203 def phabsend(ui, repo, *revs, **opts):
1190 1204 """upload changesets to Phabricator
1191 1205
1192 1206 If there are multiple revisions specified, they will be send as a stack
1193 1207 with a linear dependencies relationship using the order specified by the
1194 1208 revset.
1195 1209
1196 1210 For the first time uploading changesets, local tags will be created to
1197 1211 maintain the association. After the first time, phabsend will check
1198 1212 obsstore and tags information so it can figure out whether to update an
1199 1213 existing Differential Revision, or create a new one.
1200 1214
1201 1215 If --amend is set, update commit messages so they have the
1202 1216 ``Differential Revision`` URL, remove related tags. This is similar to what
1203 1217 arcanist will do, and is more desired in author-push workflows. Otherwise,
1204 1218 use local tags to record the ``Differential Revision`` association.
1205 1219
1206 1220 The --confirm option lets you confirm changesets before sending them. You
1207 1221 can also add following to your configuration file to make it default
1208 1222 behaviour::
1209 1223
1210 1224 [phabsend]
1211 1225 confirm = true
1212 1226
1213 1227 phabsend will check obsstore and the above association to decide whether to
1214 1228 update an existing Differential Revision, or create a new one.
1215 1229 """
1216 1230 opts = pycompat.byteskwargs(opts)
1217 1231 revs = list(revs) + opts.get(b'rev', [])
1218 1232 revs = scmutil.revrange(repo, revs)
1219 1233 revs.sort() # ascending order to preserve topological parent/child in phab
1220 1234
1221 1235 if not revs:
1222 1236 raise error.Abort(_(b'phabsend requires at least one changeset'))
1223 1237 if opts.get(b'amend'):
1224 1238 cmdutil.checkunfinished(repo)
1225 1239
1226 1240 # {newnode: (oldnode, olddiff, olddrev}
1227 1241 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1228 1242
1229 1243 confirm = ui.configbool(b'phabsend', b'confirm')
1230 1244 confirm |= bool(opts.get(b'confirm'))
1231 1245 if confirm:
1232 1246 confirmed = _confirmbeforesend(repo, revs, oldmap)
1233 1247 if not confirmed:
1234 1248 raise error.Abort(_(b'phabsend cancelled'))
1235 1249
1236 1250 actions = []
1237 1251 reviewers = opts.get(b'reviewer', [])
1238 1252 blockers = opts.get(b'blocker', [])
1239 1253 phids = []
1240 1254 if reviewers:
1241 1255 phids.extend(userphids(repo.ui, reviewers))
1242 1256 if blockers:
1243 1257 phids.extend(
1244 1258 map(
1245 1259 lambda phid: b'blocking(%s)' % phid,
1246 1260 userphids(repo.ui, blockers),
1247 1261 )
1248 1262 )
1249 1263 if phids:
1250 1264 actions.append({b'type': b'reviewers.add', b'value': phids})
1251 1265
1252 1266 drevids = [] # [int]
1253 1267 diffmap = {} # {newnode: diff}
1254 1268
1255 1269 # Send patches one by one so we know their Differential Revision PHIDs and
1256 1270 # can provide dependency relationship
1257 1271 lastrevphid = None
1258 1272 for rev in revs:
1259 1273 ui.debug(b'sending rev %d\n' % rev)
1260 1274 ctx = repo[rev]
1261 1275
1262 1276 # Get Differential Revision ID
1263 1277 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1264 1278 oldbasenode = oldnode
1265 1279 if oldnode != ctx.node() or opts.get(b'amend'):
1266 1280 # Create or update Differential Revision
1267 1281 revision, diff = createdifferentialrevision(
1268 1282 [ctx],
1269 1283 revid,
1270 1284 lastrevphid,
1271 1285 oldbasenode,
1272 1286 oldnode,
1273 1287 olddiff,
1274 1288 actions,
1275 1289 opts.get(b'comment'),
1276 1290 )
1277 1291 diffmap[ctx.node()] = diff
1278 1292 newrevid = int(revision[b'object'][b'id'])
1279 1293 newrevphid = revision[b'object'][b'phid']
1280 1294 if revid:
1281 1295 action = b'updated'
1282 1296 else:
1283 1297 action = b'created'
1284 1298
1285 1299 # Create a local tag to note the association, if commit message
1286 1300 # does not have it already
1287 1301 m = _differentialrevisiondescre.search(ctx.description())
1288 1302 if not m or int(m.group('id')) != newrevid:
1289 1303 tagname = b'D%d' % newrevid
1290 1304 tags.tag(
1291 1305 repo,
1292 1306 tagname,
1293 1307 ctx.node(),
1294 1308 message=None,
1295 1309 user=None,
1296 1310 date=None,
1297 1311 local=True,
1298 1312 )
1299 1313 else:
1300 1314 # Nothing changed. But still set "newrevphid" so the next revision
1301 1315 # could depend on this one and "newrevid" for the summary line.
1302 1316 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1303 1317 newrevid = revid
1304 1318 action = b'skipped'
1305 1319
1306 1320 actiondesc = ui.label(
1307 1321 {
1308 1322 b'created': _(b'created'),
1309 1323 b'skipped': _(b'skipped'),
1310 1324 b'updated': _(b'updated'),
1311 1325 }[action],
1312 1326 b'phabricator.action.%s' % action,
1313 1327 )
1314 1328 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1315 1329 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1316 1330 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1317 1331 ui.write(
1318 1332 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1319 1333 )
1320 1334 drevids.append(newrevid)
1321 1335 lastrevphid = newrevphid
1322 1336
1323 1337 # Update commit messages and remove tags
1324 1338 if opts.get(b'amend'):
1325 1339 unfi = repo.unfiltered()
1326 1340 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1327 1341 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1328 1342 wnode = unfi[b'.'].node()
1329 1343 mapping = {} # {oldnode: [newnode]}
1330 1344 for i, rev in enumerate(revs):
1331 1345 old = unfi[rev]
1332 1346 drevid = drevids[i]
1333 1347 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1334 1348 newdesc = get_amended_desc(drev, old, False)
1335 1349 # Make sure commit message contain "Differential Revision"
1336 1350 if old.description() != newdesc:
1337 1351 if old.phase() == phases.public:
1338 1352 ui.warn(
1339 1353 _(b"warning: not updating public commit %s\n")
1340 1354 % scmutil.formatchangeid(old)
1341 1355 )
1342 1356 continue
1343 1357 parents = [
1344 1358 mapping.get(old.p1().node(), (old.p1(),))[0],
1345 1359 mapping.get(old.p2().node(), (old.p2(),))[0],
1346 1360 ]
1347 1361 new = context.metadataonlyctx(
1348 1362 repo,
1349 1363 old,
1350 1364 parents=parents,
1351 1365 text=newdesc,
1352 1366 user=old.user(),
1353 1367 date=old.date(),
1354 1368 extra=old.extra(),
1355 1369 )
1356 1370
1357 1371 newnode = new.commit()
1358 1372
1359 1373 mapping[old.node()] = [newnode]
1360 # Update diff property
1361 # If it fails just warn and keep going, otherwise the DREV
1362 # associations will be lost
1363 try:
1364 writediffproperties(
1365 [unfi[newnode]], diffmap[old.node()]
1366 )
1367 except util.urlerr.urlerror:
1368 ui.warnnoi18n(
1369 b'Failed to update metadata for D%d\n' % drevid
1370 )
1374
1375 _amend_diff_properties(
1376 unfi, drevid, [newnode], diffmap[old.node()]
1377 )
1371 1378 # Remove local tags since it's no longer necessary
1372 1379 tagname = b'D%d' % drevid
1373 1380 if tagname in repo.tags():
1374 1381 tags.tag(
1375 1382 repo,
1376 1383 tagname,
1377 1384 nullid,
1378 1385 message=None,
1379 1386 user=None,
1380 1387 date=None,
1381 1388 local=True,
1382 1389 )
1383 1390 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1384 1391 if wnode in mapping:
1385 1392 unfi.setparents(mapping[wnode][0])
1386 1393
1387 1394
1388 1395 # Map from "hg:meta" keys to header understood by "hg import". The order is
1389 1396 # consistent with "hg export" output.
1390 1397 _metanamemap = util.sortdict(
1391 1398 [
1392 1399 (b'user', b'User'),
1393 1400 (b'date', b'Date'),
1394 1401 (b'branch', b'Branch'),
1395 1402 (b'node', b'Node ID'),
1396 1403 (b'parent', b'Parent '),
1397 1404 ]
1398 1405 )
1399 1406
1400 1407
1401 1408 def _confirmbeforesend(repo, revs, oldmap):
1402 1409 url, token = readurltoken(repo.ui)
1403 1410 ui = repo.ui
1404 1411 for rev in revs:
1405 1412 ctx = repo[rev]
1406 1413 desc = ctx.description().splitlines()[0]
1407 1414 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1408 1415 if drevid:
1409 1416 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1410 1417 else:
1411 1418 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1412 1419
1413 1420 ui.write(
1414 1421 _(b'%s - %s: %s\n')
1415 1422 % (
1416 1423 drevdesc,
1417 1424 ui.label(bytes(ctx), b'phabricator.node'),
1418 1425 ui.label(desc, b'phabricator.desc'),
1419 1426 )
1420 1427 )
1421 1428
1422 1429 if ui.promptchoice(
1423 1430 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1424 1431 ):
1425 1432 return False
1426 1433
1427 1434 return True
1428 1435
1429 1436
1430 1437 _knownstatusnames = {
1431 1438 b'accepted',
1432 1439 b'needsreview',
1433 1440 b'needsrevision',
1434 1441 b'closed',
1435 1442 b'abandoned',
1436 1443 b'changesplanned',
1437 1444 }
1438 1445
1439 1446
1440 1447 def _getstatusname(drev):
1441 1448 """get normalized status name from a Differential Revision"""
1442 1449 return drev[b'statusName'].replace(b' ', b'').lower()
1443 1450
1444 1451
1445 1452 # Small language to specify differential revisions. Support symbols: (), :X,
1446 1453 # +, and -.
1447 1454
1448 1455 _elements = {
1449 1456 # token-type: binding-strength, primary, prefix, infix, suffix
1450 1457 b'(': (12, None, (b'group', 1, b')'), None, None),
1451 1458 b':': (8, None, (b'ancestors', 8), None, None),
1452 1459 b'&': (5, None, None, (b'and_', 5), None),
1453 1460 b'+': (4, None, None, (b'add', 4), None),
1454 1461 b'-': (4, None, None, (b'sub', 4), None),
1455 1462 b')': (0, None, None, None, None),
1456 1463 b'symbol': (0, b'symbol', None, None, None),
1457 1464 b'end': (0, None, None, None, None),
1458 1465 }
1459 1466
1460 1467
1461 1468 def _tokenize(text):
1462 1469 view = memoryview(text) # zero-copy slice
1463 1470 special = b'():+-& '
1464 1471 pos = 0
1465 1472 length = len(text)
1466 1473 while pos < length:
1467 1474 symbol = b''.join(
1468 1475 itertools.takewhile(
1469 1476 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1470 1477 )
1471 1478 )
1472 1479 if symbol:
1473 1480 yield (b'symbol', symbol, pos)
1474 1481 pos += len(symbol)
1475 1482 else: # special char, ignore space
1476 1483 if text[pos : pos + 1] != b' ':
1477 1484 yield (text[pos : pos + 1], None, pos)
1478 1485 pos += 1
1479 1486 yield (b'end', None, pos)
1480 1487
1481 1488
1482 1489 def _parse(text):
1483 1490 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1484 1491 if pos != len(text):
1485 1492 raise error.ParseError(b'invalid token', pos)
1486 1493 return tree
1487 1494
1488 1495
1489 1496 def _parsedrev(symbol):
1490 1497 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1491 1498 if symbol.startswith(b'D') and symbol[1:].isdigit():
1492 1499 return int(symbol[1:])
1493 1500 if symbol.isdigit():
1494 1501 return int(symbol)
1495 1502
1496 1503
1497 1504 def _prefetchdrevs(tree):
1498 1505 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1499 1506 drevs = set()
1500 1507 ancestordrevs = set()
1501 1508 op = tree[0]
1502 1509 if op == b'symbol':
1503 1510 r = _parsedrev(tree[1])
1504 1511 if r:
1505 1512 drevs.add(r)
1506 1513 elif op == b'ancestors':
1507 1514 r, a = _prefetchdrevs(tree[1])
1508 1515 drevs.update(r)
1509 1516 ancestordrevs.update(r)
1510 1517 ancestordrevs.update(a)
1511 1518 else:
1512 1519 for t in tree[1:]:
1513 1520 r, a = _prefetchdrevs(t)
1514 1521 drevs.update(r)
1515 1522 ancestordrevs.update(a)
1516 1523 return drevs, ancestordrevs
1517 1524
1518 1525
1519 1526 def querydrev(ui, spec):
1520 1527 """return a list of "Differential Revision" dicts
1521 1528
1522 1529 spec is a string using a simple query language, see docstring in phabread
1523 1530 for details.
1524 1531
1525 1532 A "Differential Revision dict" looks like:
1526 1533
1527 1534 {
1528 1535 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1529 1536 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1530 1537 "auxiliary": {
1531 1538 "phabricator:depends-on": [
1532 1539 "PHID-DREV-gbapp366kutjebt7agcd"
1533 1540 ]
1534 1541 "phabricator:projects": [],
1535 1542 },
1536 1543 "branch": "default",
1537 1544 "ccs": [],
1538 1545 "commits": [],
1539 1546 "dateCreated": "1499181406",
1540 1547 "dateModified": "1499182103",
1541 1548 "diffs": [
1542 1549 "3",
1543 1550 "4",
1544 1551 ],
1545 1552 "hashes": [],
1546 1553 "id": "2",
1547 1554 "lineCount": "2",
1548 1555 "phid": "PHID-DREV-672qvysjcczopag46qty",
1549 1556 "properties": {},
1550 1557 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1551 1558 "reviewers": [],
1552 1559 "sourcePath": null
1553 1560 "status": "0",
1554 1561 "statusName": "Needs Review",
1555 1562 "summary": "",
1556 1563 "testPlan": "",
1557 1564 "title": "example",
1558 1565 "uri": "https://phab.example.com/D2",
1559 1566 }
1560 1567 """
1561 1568 # TODO: replace differential.query and differential.querydiffs with
1562 1569 # differential.diff.search because the former (and their output) are
1563 1570 # frozen, and planned to be deprecated and removed.
1564 1571
1565 1572 def fetch(params):
1566 1573 """params -> single drev or None"""
1567 1574 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1568 1575 if key in prefetched:
1569 1576 return prefetched[key]
1570 1577 drevs = callconduit(ui, b'differential.query', params)
1571 1578 # Fill prefetched with the result
1572 1579 for drev in drevs:
1573 1580 prefetched[drev[b'phid']] = drev
1574 1581 prefetched[int(drev[b'id'])] = drev
1575 1582 if key not in prefetched:
1576 1583 raise error.Abort(
1577 1584 _(b'cannot get Differential Revision %r') % params
1578 1585 )
1579 1586 return prefetched[key]
1580 1587
1581 1588 def getstack(topdrevids):
1582 1589 """given a top, get a stack from the bottom, [id] -> [id]"""
1583 1590 visited = set()
1584 1591 result = []
1585 1592 queue = [{b'ids': [i]} for i in topdrevids]
1586 1593 while queue:
1587 1594 params = queue.pop()
1588 1595 drev = fetch(params)
1589 1596 if drev[b'id'] in visited:
1590 1597 continue
1591 1598 visited.add(drev[b'id'])
1592 1599 result.append(int(drev[b'id']))
1593 1600 auxiliary = drev.get(b'auxiliary', {})
1594 1601 depends = auxiliary.get(b'phabricator:depends-on', [])
1595 1602 for phid in depends:
1596 1603 queue.append({b'phids': [phid]})
1597 1604 result.reverse()
1598 1605 return smartset.baseset(result)
1599 1606
1600 1607 # Initialize prefetch cache
1601 1608 prefetched = {} # {id or phid: drev}
1602 1609
1603 1610 tree = _parse(spec)
1604 1611 drevs, ancestordrevs = _prefetchdrevs(tree)
1605 1612
1606 1613 # developer config: phabricator.batchsize
1607 1614 batchsize = ui.configint(b'phabricator', b'batchsize')
1608 1615
1609 1616 # Prefetch Differential Revisions in batch
1610 1617 tofetch = set(drevs)
1611 1618 for r in ancestordrevs:
1612 1619 tofetch.update(range(max(1, r - batchsize), r + 1))
1613 1620 if drevs:
1614 1621 fetch({b'ids': list(tofetch)})
1615 1622 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1616 1623
1617 1624 # Walk through the tree, return smartsets
1618 1625 def walk(tree):
1619 1626 op = tree[0]
1620 1627 if op == b'symbol':
1621 1628 drev = _parsedrev(tree[1])
1622 1629 if drev:
1623 1630 return smartset.baseset([drev])
1624 1631 elif tree[1] in _knownstatusnames:
1625 1632 drevs = [
1626 1633 r
1627 1634 for r in validids
1628 1635 if _getstatusname(prefetched[r]) == tree[1]
1629 1636 ]
1630 1637 return smartset.baseset(drevs)
1631 1638 else:
1632 1639 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1633 1640 elif op in {b'and_', b'add', b'sub'}:
1634 1641 assert len(tree) == 3
1635 1642 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1636 1643 elif op == b'group':
1637 1644 return walk(tree[1])
1638 1645 elif op == b'ancestors':
1639 1646 return getstack(walk(tree[1]))
1640 1647 else:
1641 1648 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1642 1649
1643 1650 return [prefetched[r] for r in walk(tree)]
1644 1651
1645 1652
1646 1653 def getdescfromdrev(drev):
1647 1654 """get description (commit message) from "Differential Revision"
1648 1655
1649 1656 This is similar to differential.getcommitmessage API. But we only care
1650 1657 about limited fields: title, summary, test plan, and URL.
1651 1658 """
1652 1659 title = drev[b'title']
1653 1660 summary = drev[b'summary'].rstrip()
1654 1661 testplan = drev[b'testPlan'].rstrip()
1655 1662 if testplan:
1656 1663 testplan = b'Test Plan:\n%s' % testplan
1657 1664 uri = b'Differential Revision: %s' % drev[b'uri']
1658 1665 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1659 1666
1660 1667
1661 1668 def get_amended_desc(drev, ctx, folded):
1662 1669 """similar to ``getdescfromdrev``, but supports a folded series of commits
1663 1670
1664 1671 This is used when determining if an individual commit needs to have its
1665 1672 message amended after posting it for review. The determination is made for
1666 1673 each individual commit, even when they were folded into one review.
1667 1674 """
1668 1675 if not folded:
1669 1676 return getdescfromdrev(drev)
1670 1677
1671 1678 uri = b'Differential Revision: %s' % drev[b'uri']
1672 1679
1673 1680 # Since the commit messages were combined when posting multiple commits
1674 1681 # with --fold, the fields can't be read from Phabricator here, or *all*
1675 1682 # affected local revisions will end up with the same commit message after
1676 1683 # the URI is amended in. Append in the DREV line, or update it if it
1677 1684 # exists. At worst, this means commit message or test plan updates on
1678 1685 # Phabricator aren't propagated back to the repository, but that seems
1679 1686 # reasonable for the case where local commits are effectively combined
1680 1687 # in Phabricator.
1681 1688 m = _differentialrevisiondescre.search(ctx.description())
1682 1689 if not m:
1683 1690 return b'\n\n'.join([ctx.description(), uri])
1684 1691
1685 1692 return _differentialrevisiondescre.sub(uri, ctx.description())
1686 1693
1687 1694
1688 1695 def getlocalcommits(diff):
1689 1696 """get the set of local commits from a diff object
1690 1697
1691 1698 See ``getdiffmeta()`` for an example diff object.
1692 1699 """
1693 1700 props = diff.get(b'properties') or {}
1694 1701 commits = props.get(b'local:commits') or {}
1695 1702 if len(commits) > 1:
1696 1703 return {bin(c) for c in commits.keys()}
1697 1704
1698 1705 # Storing the diff metadata predates storing `local:commits`, so continue
1699 1706 # to use that in the --no-fold case.
1700 1707 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1701 1708
1702 1709
1703 1710 def getdiffmeta(diff):
1704 1711 """get commit metadata (date, node, user, p1) from a diff object
1705 1712
1706 1713 The metadata could be "hg:meta", sent by phabsend, like:
1707 1714
1708 1715 "properties": {
1709 1716 "hg:meta": {
1710 1717 "branch": "default",
1711 1718 "date": "1499571514 25200",
1712 1719 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1713 1720 "user": "Foo Bar <foo@example.com>",
1714 1721 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1715 1722 }
1716 1723 }
1717 1724
1718 1725 Or converted from "local:commits", sent by "arc", like:
1719 1726
1720 1727 "properties": {
1721 1728 "local:commits": {
1722 1729 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1723 1730 "author": "Foo Bar",
1724 1731 "authorEmail": "foo@example.com"
1725 1732 "branch": "default",
1726 1733 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1727 1734 "local": "1000",
1728 1735 "message": "...",
1729 1736 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1730 1737 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1731 1738 "summary": "...",
1732 1739 "tag": "",
1733 1740 "time": 1499546314,
1734 1741 }
1735 1742 }
1736 1743 }
1737 1744
1738 1745 Note: metadata extracted from "local:commits" will lose time zone
1739 1746 information.
1740 1747 """
1741 1748 props = diff.get(b'properties') or {}
1742 1749 meta = props.get(b'hg:meta')
1743 1750 if not meta:
1744 1751 if props.get(b'local:commits'):
1745 1752 commit = sorted(props[b'local:commits'].values())[0]
1746 1753 meta = {}
1747 1754 if b'author' in commit and b'authorEmail' in commit:
1748 1755 meta[b'user'] = b'%s <%s>' % (
1749 1756 commit[b'author'],
1750 1757 commit[b'authorEmail'],
1751 1758 )
1752 1759 if b'time' in commit:
1753 1760 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1754 1761 if b'branch' in commit:
1755 1762 meta[b'branch'] = commit[b'branch']
1756 1763 node = commit.get(b'commit', commit.get(b'rev'))
1757 1764 if node:
1758 1765 meta[b'node'] = node
1759 1766 if len(commit.get(b'parents', ())) >= 1:
1760 1767 meta[b'parent'] = commit[b'parents'][0]
1761 1768 else:
1762 1769 meta = {}
1763 1770 if b'date' not in meta and b'dateCreated' in diff:
1764 1771 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1765 1772 if b'branch' not in meta and diff.get(b'branch'):
1766 1773 meta[b'branch'] = diff[b'branch']
1767 1774 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1768 1775 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1769 1776 return meta
1770 1777
1771 1778
1772 1779 def _getdrevs(ui, stack, specs):
1773 1780 """convert user supplied DREVSPECs into "Differential Revision" dicts
1774 1781
1775 1782 See ``hg help phabread`` for how to specify each DREVSPEC.
1776 1783 """
1777 1784 if len(specs) > 0:
1778 1785
1779 1786 def _formatspec(s):
1780 1787 if stack:
1781 1788 s = b':(%s)' % s
1782 1789 return b'(%s)' % s
1783 1790
1784 1791 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1785 1792
1786 1793 drevs = querydrev(ui, spec)
1787 1794 if drevs:
1788 1795 return drevs
1789 1796
1790 1797 raise error.Abort(_(b"empty DREVSPEC set"))
1791 1798
1792 1799
1793 1800 def readpatch(ui, drevs, write):
1794 1801 """generate plain-text patch readable by 'hg import'
1795 1802
1796 1803 write takes a list of (DREV, bytes), where DREV is the differential number
1797 1804 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1798 1805 to be imported. drevs is what "querydrev" returns, results of
1799 1806 "differential.query".
1800 1807 """
1801 1808 # Prefetch hg:meta property for all diffs
1802 1809 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1803 1810 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1804 1811
1805 1812 patches = []
1806 1813
1807 1814 # Generate patch for each drev
1808 1815 for drev in drevs:
1809 1816 ui.note(_(b'reading D%s\n') % drev[b'id'])
1810 1817
1811 1818 diffid = max(int(v) for v in drev[b'diffs'])
1812 1819 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1813 1820 desc = getdescfromdrev(drev)
1814 1821 header = b'# HG changeset patch\n'
1815 1822
1816 1823 # Try to preserve metadata from hg:meta property. Write hg patch
1817 1824 # headers that can be read by the "import" command. See patchheadermap
1818 1825 # and extract in mercurial/patch.py for supported headers.
1819 1826 meta = getdiffmeta(diffs[b'%d' % diffid])
1820 1827 for k in _metanamemap.keys():
1821 1828 if k in meta:
1822 1829 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1823 1830
1824 1831 content = b'%s%s\n%s' % (header, desc, body)
1825 1832 patches.append((drev[b'id'], content))
1826 1833
1827 1834 # Write patches to the supplied callback
1828 1835 write(patches)
1829 1836
1830 1837
1831 1838 @vcrcommand(
1832 1839 b'phabread',
1833 1840 [(b'', b'stack', False, _(b'read dependencies'))],
1834 1841 _(b'DREVSPEC... [OPTIONS]'),
1835 1842 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1836 1843 optionalrepo=True,
1837 1844 )
1838 1845 def phabread(ui, repo, *specs, **opts):
1839 1846 """print patches from Phabricator suitable for importing
1840 1847
1841 1848 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1842 1849 the number ``123``. It could also have common operators like ``+``, ``-``,
1843 1850 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1844 1851 select a stack. If multiple DREVSPEC values are given, the result is the
1845 1852 union of each individually evaluated value. No attempt is currently made
1846 1853 to reorder the values to run from parent to child.
1847 1854
1848 1855 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1849 1856 could be used to filter patches by status. For performance reason, they
1850 1857 only represent a subset of non-status selections and cannot be used alone.
1851 1858
1852 1859 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1853 1860 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1854 1861 stack up to D9.
1855 1862
1856 1863 If --stack is given, follow dependencies information and read all patches.
1857 1864 It is equivalent to the ``:`` operator.
1858 1865 """
1859 1866 opts = pycompat.byteskwargs(opts)
1860 1867 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1861 1868
1862 1869 def _write(patches):
1863 1870 for drev, content in patches:
1864 1871 ui.write(content)
1865 1872
1866 1873 readpatch(ui, drevs, _write)
1867 1874
1868 1875
1869 1876 @vcrcommand(
1870 1877 b'phabimport',
1871 1878 [(b'', b'stack', False, _(b'import dependencies as well'))],
1872 1879 _(b'DREVSPEC... [OPTIONS]'),
1873 1880 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1874 1881 )
1875 1882 def phabimport(ui, repo, *specs, **opts):
1876 1883 """import patches from Phabricator for the specified Differential Revisions
1877 1884
1878 1885 The patches are read and applied starting at the parent of the working
1879 1886 directory.
1880 1887
1881 1888 See ``hg help phabread`` for how to specify DREVSPEC.
1882 1889 """
1883 1890 opts = pycompat.byteskwargs(opts)
1884 1891
1885 1892 # --bypass avoids losing exec and symlink bits when importing on Windows,
1886 1893 # and allows importing with a dirty wdir. It also aborts instead of leaving
1887 1894 # rejects.
1888 1895 opts[b'bypass'] = True
1889 1896
1890 1897 # Mandatory default values, synced with commands.import
1891 1898 opts[b'strip'] = 1
1892 1899 opts[b'prefix'] = b''
1893 1900 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1894 1901 opts[b'obsolete'] = False
1895 1902
1896 1903 if ui.configbool(b'phabimport', b'secret'):
1897 1904 opts[b'secret'] = True
1898 1905 if ui.configbool(b'phabimport', b'obsolete'):
1899 1906 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1900 1907
1901 1908 def _write(patches):
1902 1909 parents = repo[None].parents()
1903 1910
1904 1911 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1905 1912 for drev, contents in patches:
1906 1913 ui.status(_(b'applying patch from D%s\n') % drev)
1907 1914
1908 1915 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1909 1916 msg, node, rej = cmdutil.tryimportone(
1910 1917 ui,
1911 1918 repo,
1912 1919 patchdata,
1913 1920 parents,
1914 1921 opts,
1915 1922 [],
1916 1923 None, # Never update wdir to another revision
1917 1924 )
1918 1925
1919 1926 if not node:
1920 1927 raise error.Abort(_(b'D%s: no diffs found') % drev)
1921 1928
1922 1929 ui.note(msg + b'\n')
1923 1930 parents = [repo[node]]
1924 1931
1925 1932 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1926 1933
1927 1934 readpatch(repo.ui, drevs, _write)
1928 1935
1929 1936
1930 1937 @vcrcommand(
1931 1938 b'phabupdate',
1932 1939 [
1933 1940 (b'', b'accept', False, _(b'accept revisions')),
1934 1941 (b'', b'reject', False, _(b'reject revisions')),
1935 1942 (b'', b'abandon', False, _(b'abandon revisions')),
1936 1943 (b'', b'reclaim', False, _(b'reclaim revisions')),
1937 1944 (b'm', b'comment', b'', _(b'comment on the last revision')),
1938 1945 ],
1939 1946 _(b'DREVSPEC... [OPTIONS]'),
1940 1947 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1941 1948 optionalrepo=True,
1942 1949 )
1943 1950 def phabupdate(ui, repo, *specs, **opts):
1944 1951 """update Differential Revision in batch
1945 1952
1946 1953 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1947 1954 """
1948 1955 opts = pycompat.byteskwargs(opts)
1949 1956 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1950 1957 if len(flags) > 1:
1951 1958 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1952 1959
1953 1960 actions = []
1954 1961 for f in flags:
1955 1962 actions.append({b'type': f, b'value': True})
1956 1963
1957 1964 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1958 1965 for i, drev in enumerate(drevs):
1959 1966 if i + 1 == len(drevs) and opts.get(b'comment'):
1960 1967 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1961 1968 if actions:
1962 1969 params = {
1963 1970 b'objectIdentifier': drev[b'phid'],
1964 1971 b'transactions': actions,
1965 1972 }
1966 1973 callconduit(ui, b'differential.revision.edit', params)
1967 1974
1968 1975
1969 1976 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1970 1977 def template_review(context, mapping):
1971 1978 """:phabreview: Object describing the review for this changeset.
1972 1979 Has attributes `url` and `id`.
1973 1980 """
1974 1981 ctx = context.resource(mapping, b'ctx')
1975 1982 m = _differentialrevisiondescre.search(ctx.description())
1976 1983 if m:
1977 1984 return templateutil.hybriddict(
1978 1985 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1979 1986 )
1980 1987 else:
1981 1988 tags = ctx.repo().nodetags(ctx.node())
1982 1989 for t in tags:
1983 1990 if _differentialrevisiontagre.match(t):
1984 1991 url = ctx.repo().ui.config(b'phabricator', b'url')
1985 1992 if not url.endswith(b'/'):
1986 1993 url += b'/'
1987 1994 url += t
1988 1995
1989 1996 return templateutil.hybriddict({b'url': url, b'id': t,})
1990 1997 return None
1991 1998
1992 1999
1993 2000 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1994 2001 def template_status(context, mapping):
1995 2002 """:phabstatus: String. Status of Phabricator differential.
1996 2003 """
1997 2004 ctx = context.resource(mapping, b'ctx')
1998 2005 repo = context.resource(mapping, b'repo')
1999 2006 ui = context.resource(mapping, b'ui')
2000 2007
2001 2008 rev = ctx.rev()
2002 2009 try:
2003 2010 drevid = getdrevmap(repo, [rev])[rev]
2004 2011 except KeyError:
2005 2012 return None
2006 2013 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2007 2014 for drev in drevs:
2008 2015 if int(drev[b'id']) == drevid:
2009 2016 return templateutil.hybriddict(
2010 2017 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2011 2018 )
2012 2019 return None
2013 2020
2014 2021
2015 2022 @show.showview(b'phabstatus', csettopic=b'work')
2016 2023 def phabstatusshowview(ui, repo, displayer):
2017 2024 """Phabricator differiential status"""
2018 2025 revs = repo.revs('sort(_underway(), topo)')
2019 2026 drevmap = getdrevmap(repo, revs)
2020 2027 unknownrevs, drevids, revsbydrevid = [], set(), {}
2021 2028 for rev, drevid in pycompat.iteritems(drevmap):
2022 2029 if drevid is not None:
2023 2030 drevids.add(drevid)
2024 2031 revsbydrevid.setdefault(drevid, set()).add(rev)
2025 2032 else:
2026 2033 unknownrevs.append(rev)
2027 2034
2028 2035 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2029 2036 drevsbyrev = {}
2030 2037 for drev in drevs:
2031 2038 for rev in revsbydrevid[int(drev[b'id'])]:
2032 2039 drevsbyrev[rev] = drev
2033 2040
2034 2041 def phabstatus(ctx):
2035 2042 drev = drevsbyrev[ctx.rev()]
2036 2043 status = ui.label(
2037 2044 b'%(statusName)s' % drev,
2038 2045 b'phabricator.status.%s' % _getstatusname(drev),
2039 2046 )
2040 2047 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2041 2048
2042 2049 revs -= smartset.baseset(unknownrevs)
2043 2050 revdag = graphmod.dagwalker(repo, revs)
2044 2051
2045 2052 ui.setconfig(b'experimental', b'graphshorten', True)
2046 2053 displayer._exthook = phabstatus
2047 2054 nodelen = show.longestshortest(repo, revs)
2048 2055 logcmdutil.displaygraph(
2049 2056 ui,
2050 2057 repo,
2051 2058 revdag,
2052 2059 displayer,
2053 2060 graphmod.asciiedges,
2054 2061 props={b'nodelen': nodelen},
2055 2062 )
General Comments 0
You need to be logged in to leave comments. Login now