##// END OF EJS Templates
phabricator: teach createdifferentialrevision() to allow a folded commit range...
Matt Harbison -
r45135:419fec82 default
parent child Browse files
Show More
@@ -1,2003 +1,2022
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.repophid
119 119 eh.configitem(
120 120 b'phabricator', b'repophid', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabricator', b'url', default=None,
124 124 )
125 125 eh.configitem(
126 126 b'phabsend', b'confirm', default=False,
127 127 )
128 128 eh.configitem(
129 129 b'phabimport', b'secret', default=False,
130 130 )
131 131 eh.configitem(
132 132 b'phabimport', b'obsolete', default=False,
133 133 )
134 134
135 135 colortable = {
136 136 b'phabricator.action.created': b'green',
137 137 b'phabricator.action.skipped': b'magenta',
138 138 b'phabricator.action.updated': b'magenta',
139 139 b'phabricator.desc': b'',
140 140 b'phabricator.drev': b'bold',
141 141 b'phabricator.node': b'',
142 142 b'phabricator.status.abandoned': b'magenta dim',
143 143 b'phabricator.status.accepted': b'green bold',
144 144 b'phabricator.status.closed': b'green',
145 145 b'phabricator.status.needsreview': b'yellow',
146 146 b'phabricator.status.needsrevision': b'red',
147 147 b'phabricator.status.changesplanned': b'red',
148 148 }
149 149
150 150 _VCR_FLAGS = [
151 151 (
152 152 b'',
153 153 b'test-vcr',
154 154 b'',
155 155 _(
156 156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 157 b', otherwise will mock all http requests using the specified vcr file.'
158 158 b' (ADVANCED)'
159 159 ),
160 160 ),
161 161 ]
162 162
163 163
164 164 @eh.wrapfunction(localrepo, "loadhgrc")
165 165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 167 """
168 168 result = False
169 169 arcconfig = {}
170 170
171 171 try:
172 172 # json.loads only accepts bytes from 3.6+
173 173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 174 # json.loads only returns unicode strings
175 175 arcconfig = pycompat.rapply(
176 176 lambda x: encoding.unitolocal(x)
177 177 if isinstance(x, pycompat.unicode)
178 178 else x,
179 179 pycompat.json_loads(rawparams),
180 180 )
181 181
182 182 result = True
183 183 except ValueError:
184 184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 185 except IOError:
186 186 pass
187 187
188 188 cfg = util.sortdict()
189 189
190 190 if b"repository.callsign" in arcconfig:
191 191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192 192
193 193 if b"phabricator.uri" in arcconfig:
194 194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195 195
196 196 if cfg:
197 197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198 198
199 199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200 200
201 201
202 202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 203 fullflags = flags + _VCR_FLAGS
204 204
205 205 def hgmatcher(r1, r2):
206 206 if r1.uri != r2.uri or r1.method != r2.method:
207 207 return False
208 208 r1params = util.urlreq.parseqs(r1.body)
209 209 r2params = util.urlreq.parseqs(r2.body)
210 210 for key in r1params:
211 211 if key not in r2params:
212 212 return False
213 213 value = r1params[key][0]
214 214 # we want to compare json payloads without worrying about ordering
215 215 if value.startswith(b'{') and value.endswith(b'}'):
216 216 r1json = pycompat.json_loads(value)
217 217 r2json = pycompat.json_loads(r2params[key][0])
218 218 if r1json != r2json:
219 219 return False
220 220 elif r2params[key][0] != value:
221 221 return False
222 222 return True
223 223
224 224 def sanitiserequest(request):
225 225 request.body = re.sub(
226 226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 227 )
228 228 return request
229 229
230 230 def sanitiseresponse(response):
231 231 if 'set-cookie' in response['headers']:
232 232 del response['headers']['set-cookie']
233 233 return response
234 234
235 235 def decorate(fn):
236 236 def inner(*args, **kwargs):
237 237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 238 if cassette:
239 239 import hgdemandimport
240 240
241 241 with hgdemandimport.deactivated():
242 242 import vcr as vcrmod
243 243 import vcr.stubs as stubs
244 244
245 245 vcr = vcrmod.VCR(
246 246 serializer='json',
247 247 before_record_request=sanitiserequest,
248 248 before_record_response=sanitiseresponse,
249 249 custom_patches=[
250 250 (
251 251 urlmod,
252 252 'httpconnection',
253 253 stubs.VCRHTTPConnection,
254 254 ),
255 255 (
256 256 urlmod,
257 257 'httpsconnection',
258 258 stubs.VCRHTTPSConnection,
259 259 ),
260 260 ],
261 261 )
262 262 vcr.register_matcher('hgmatcher', hgmatcher)
263 263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 264 return fn(*args, **kwargs)
265 265 return fn(*args, **kwargs)
266 266
267 267 cmd = util.checksignature(inner, depth=2)
268 268 cmd.__name__ = fn.__name__
269 269 cmd.__doc__ = fn.__doc__
270 270
271 271 return command(
272 272 name,
273 273 fullflags,
274 274 spec,
275 275 helpcategory=helpcategory,
276 276 optionalrepo=optionalrepo,
277 277 )(cmd)
278 278
279 279 return decorate
280 280
281 281
282 282 def urlencodenested(params):
283 283 """like urlencode, but works with nested parameters.
284 284
285 285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 288 """
289 289 flatparams = util.sortdict()
290 290
291 291 def process(prefix, obj):
292 292 if isinstance(obj, bool):
293 293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 296 if items is None:
297 297 flatparams[prefix] = obj
298 298 else:
299 299 for k, v in items(obj):
300 300 if prefix:
301 301 process(b'%s[%s]' % (prefix, k), v)
302 302 else:
303 303 process(k, v)
304 304
305 305 process(b'', params)
306 306 return util.urlreq.urlencode(flatparams)
307 307
308 308
309 309 def readurltoken(ui):
310 310 """return conduit url, token and make sure they exist
311 311
312 312 Currently read from [auth] config section. In the future, it might
313 313 make sense to read from .arcconfig and .arcrc as well.
314 314 """
315 315 url = ui.config(b'phabricator', b'url')
316 316 if not url:
317 317 raise error.Abort(
318 318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 319 )
320 320
321 321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 322 token = None
323 323
324 324 if res:
325 325 group, auth = res
326 326
327 327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328 328
329 329 token = auth.get(b'phabtoken')
330 330
331 331 if not token:
332 332 raise error.Abort(
333 333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 334 )
335 335
336 336 return url, token
337 337
338 338
339 339 def callconduit(ui, name, params):
340 340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 341 host, token = readurltoken(ui)
342 342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 344 params = params.copy()
345 345 params[b'__conduit__'] = {
346 346 b'token': token,
347 347 }
348 348 rawdata = {
349 349 b'params': templatefilters.json(params),
350 350 b'output': b'json',
351 351 b'__conduit__': 1,
352 352 }
353 353 data = urlencodenested(rawdata)
354 354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 355 if curlcmd:
356 356 sin, sout = procutil.popen2(
357 357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 358 )
359 359 sin.write(data)
360 360 sin.close()
361 361 body = sout.read()
362 362 else:
363 363 urlopener = urlmod.opener(ui, authinfo)
364 364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 365 with contextlib.closing(urlopener.open(request)) as rsp:
366 366 body = rsp.read()
367 367 ui.debug(b'Conduit Response: %s\n' % body)
368 368 parsed = pycompat.rapply(
369 369 lambda x: encoding.unitolocal(x)
370 370 if isinstance(x, pycompat.unicode)
371 371 else x,
372 372 # json.loads only accepts bytes from py3.6+
373 373 pycompat.json_loads(encoding.unifromlocal(body)),
374 374 )
375 375 if parsed.get(b'error_code'):
376 376 msg = _(b'Conduit Error (%s): %s') % (
377 377 parsed[b'error_code'],
378 378 parsed[b'error_info'],
379 379 )
380 380 raise error.Abort(msg)
381 381 return parsed[b'result']
382 382
383 383
384 384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 385 def debugcallconduit(ui, repo, name):
386 386 """call Conduit API
387 387
388 388 Call parameters are read from stdin as a JSON blob. Result will be written
389 389 to stdout as a JSON blob.
390 390 """
391 391 # json.loads only accepts bytes from 3.6+
392 392 rawparams = encoding.unifromlocal(ui.fin.read())
393 393 # json.loads only returns unicode strings
394 394 params = pycompat.rapply(
395 395 lambda x: encoding.unitolocal(x)
396 396 if isinstance(x, pycompat.unicode)
397 397 else x,
398 398 pycompat.json_loads(rawparams),
399 399 )
400 400 # json.dumps only accepts unicode strings
401 401 result = pycompat.rapply(
402 402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 403 callconduit(ui, name, params),
404 404 )
405 405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 406 ui.write(b'%s\n' % encoding.unitolocal(s))
407 407
408 408
409 409 def getrepophid(repo):
410 410 """given callsign, return repository PHID or None"""
411 411 # developer config: phabricator.repophid
412 412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 413 if repophid:
414 414 return repophid
415 415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 416 if not callsign:
417 417 return None
418 418 query = callconduit(
419 419 repo.ui,
420 420 b'diffusion.repository.search',
421 421 {b'constraints': {b'callsigns': [callsign]}},
422 422 )
423 423 if len(query[b'data']) == 0:
424 424 return None
425 425 repophid = query[b'data'][0][b'phid']
426 426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 427 return repophid
428 428
429 429
430 430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 431 _differentialrevisiondescre = re.compile(
432 432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 433 )
434 434
435 435
436 436 def getoldnodedrevmap(repo, nodelist):
437 437 """find previous nodes that has been sent to Phabricator
438 438
439 439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 440 for node in nodelist with known previous sent versions, or associated
441 441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 442 be ``None``.
443 443
444 444 Examines commit messages like "Differential Revision:" to get the
445 445 association information.
446 446
447 447 If such commit message line is not found, examines all precursors and their
448 448 tags. Tags with format like "D1234" are considered a match and the node
449 449 with that tag, and the number after "D" (ex. 1234) will be returned.
450 450
451 451 The ``old node``, if not None, is guaranteed to be the last diff of
452 452 corresponding Differential Revision, and exist in the repo.
453 453 """
454 454 unfi = repo.unfiltered()
455 455 has_node = unfi.changelog.index.has_node
456 456
457 457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 459 for node in nodelist:
460 460 ctx = unfi[node]
461 461 # For tags like "D123", put them into "toconfirm" to verify later
462 462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 463 for n in precnodes:
464 464 if has_node(n):
465 465 for tag in unfi.nodetags(n):
466 466 m = _differentialrevisiontagre.match(tag)
467 467 if m:
468 468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 469 break
470 470 else:
471 471 continue # move to next predecessor
472 472 break # found a tag, stop
473 473 else:
474 474 # Check commit message
475 475 m = _differentialrevisiondescre.search(ctx.description())
476 476 if m:
477 477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478 478
479 479 # Double check if tags are genuine by collecting all old nodes from
480 480 # Phabricator, and expect precursors overlap with it.
481 481 if toconfirm:
482 482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 483 alldiffs = callconduit(
484 484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 485 )
486 486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 487 for newnode, (force, precset, drev) in toconfirm.items():
488 488 diffs = [
489 489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 490 ]
491 491
492 492 # "precursors" as known by Phabricator
493 493 phprecset = {getnode(d) for d in diffs}
494 494
495 495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 496 # and force is not set (when commit message says nothing)
497 497 if not force and not bool(phprecset & precset):
498 498 tagname = b'D%d' % drev
499 499 tags.tag(
500 500 repo,
501 501 tagname,
502 502 nullid,
503 503 message=None,
504 504 user=None,
505 505 date=None,
506 506 local=True,
507 507 )
508 508 unfi.ui.warn(
509 509 _(
510 510 b'D%d: local tag removed - does not match '
511 511 b'Differential history\n'
512 512 )
513 513 % drev
514 514 )
515 515 continue
516 516
517 517 # Find the last node using Phabricator metadata, and make sure it
518 518 # exists in the repo
519 519 oldnode = lastdiff = None
520 520 if diffs:
521 521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 522 oldnode = getnode(lastdiff)
523 523 if oldnode and not has_node(oldnode):
524 524 oldnode = None
525 525
526 526 result[newnode] = (oldnode, lastdiff, drev)
527 527
528 528 return result
529 529
530 530
531 531 def getdrevmap(repo, revs):
532 532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 533 ID or None.
534 534 """
535 535 result = {}
536 536 for rev in revs:
537 537 result[rev] = None
538 538 ctx = repo[rev]
539 539 # Check commit message
540 540 m = _differentialrevisiondescre.search(ctx.description())
541 541 if m:
542 542 result[rev] = int(m.group('id'))
543 543 continue
544 544 # Check tags
545 545 for tag in repo.nodetags(ctx.node()):
546 546 m = _differentialrevisiontagre.match(tag)
547 547 if m:
548 548 result[rev] = int(m.group(1))
549 549 break
550 550
551 551 return result
552 552
553 553
554 554 def getdiff(basectx, ctx, diffopts):
555 555 """plain-text diff without header (user, commit message, etc)"""
556 556 output = util.stringio()
557 557 for chunk, _label in patch.diffui(
558 558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 559 ):
560 560 output.write(chunk)
561 561 return output.getvalue()
562 562
563 563
564 564 class DiffChangeType(object):
565 565 ADD = 1
566 566 CHANGE = 2
567 567 DELETE = 3
568 568 MOVE_AWAY = 4
569 569 COPY_AWAY = 5
570 570 MOVE_HERE = 6
571 571 COPY_HERE = 7
572 572 MULTICOPY = 8
573 573
574 574
575 575 class DiffFileType(object):
576 576 TEXT = 1
577 577 IMAGE = 2
578 578 BINARY = 3
579 579
580 580
581 581 @attr.s
582 582 class phabhunk(dict):
583 583 """Represents a Differential hunk, which is owned by a Differential change
584 584 """
585 585
586 586 oldOffset = attr.ib(default=0) # camelcase-required
587 587 oldLength = attr.ib(default=0) # camelcase-required
588 588 newOffset = attr.ib(default=0) # camelcase-required
589 589 newLength = attr.ib(default=0) # camelcase-required
590 590 corpus = attr.ib(default='')
591 591 # These get added to the phabchange's equivalents
592 592 addLines = attr.ib(default=0) # camelcase-required
593 593 delLines = attr.ib(default=0) # camelcase-required
594 594
595 595
596 596 @attr.s
597 597 class phabchange(object):
598 598 """Represents a Differential change, owns Differential hunks and owned by a
599 599 Differential diff. Each one represents one file in a diff.
600 600 """
601 601
602 602 currentPath = attr.ib(default=None) # camelcase-required
603 603 oldPath = attr.ib(default=None) # camelcase-required
604 604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 605 metadata = attr.ib(default=attr.Factory(dict))
606 606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 608 type = attr.ib(default=DiffChangeType.CHANGE)
609 609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 610 commitHash = attr.ib(default=None) # camelcase-required
611 611 addLines = attr.ib(default=0) # camelcase-required
612 612 delLines = attr.ib(default=0) # camelcase-required
613 613 hunks = attr.ib(default=attr.Factory(list))
614 614
615 615 def copynewmetadatatoold(self):
616 616 for key in list(self.metadata.keys()):
617 617 newkey = key.replace(b'new:', b'old:')
618 618 self.metadata[newkey] = self.metadata[key]
619 619
620 620 def addoldmode(self, value):
621 621 self.oldProperties[b'unix:filemode'] = value
622 622
623 623 def addnewmode(self, value):
624 624 self.newProperties[b'unix:filemode'] = value
625 625
626 626 def addhunk(self, hunk):
627 627 if not isinstance(hunk, phabhunk):
628 628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 630 # It's useful to include these stats since the Phab web UI shows them,
631 631 # and uses them to estimate how large a change a Revision is. Also used
632 632 # in email subjects for the [+++--] bit.
633 633 self.addLines += hunk.addLines
634 634 self.delLines += hunk.delLines
635 635
636 636
637 637 @attr.s
638 638 class phabdiff(object):
639 639 """Represents a Differential diff, owns Differential changes. Corresponds
640 640 to a commit.
641 641 """
642 642
643 643 # Doesn't seem to be any reason to send this (output of uname -n)
644 644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 649 branch = attr.ib(default=b'default')
650 650 bookmark = attr.ib(default=None)
651 651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 654 changes = attr.ib(default=attr.Factory(dict))
655 655 repositoryPHID = attr.ib(default=None) # camelcase-required
656 656
657 657 def addchange(self, change):
658 658 if not isinstance(change, phabchange):
659 659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 661 attr.asdict(change)
662 662 )
663 663
664 664
665 665 def maketext(pchange, basectx, ctx, fname):
666 666 """populate the phabchange for a text file"""
667 667 repo = ctx.repo()
668 668 fmatcher = match.exact([fname])
669 669 diffopts = mdiff.diffopts(git=True, context=32767)
670 670 _pfctx, _fctx, header, fhunks = next(
671 671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 672 )
673 673
674 674 for fhunk in fhunks:
675 675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 676 corpus = b''.join(lines[1:])
677 677 shunk = list(header)
678 678 shunk.extend(lines)
679 679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 680 patch.diffstatdata(util.iterlines(shunk))
681 681 )
682 682 pchange.addhunk(
683 683 phabhunk(
684 684 oldOffset,
685 685 oldLength,
686 686 newOffset,
687 687 newLength,
688 688 corpus,
689 689 addLines,
690 690 delLines,
691 691 )
692 692 )
693 693
694 694
695 695 def uploadchunks(fctx, fphid):
696 696 """upload large binary files as separate chunks.
697 697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 698 """
699 699 ui = fctx.repo().ui
700 700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 701 with ui.makeprogress(
702 702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 703 ) as progress:
704 704 for chunk in chunks:
705 705 progress.increment()
706 706 if chunk[b'complete']:
707 707 continue
708 708 bstart = int(chunk[b'byteStart'])
709 709 bend = int(chunk[b'byteEnd'])
710 710 callconduit(
711 711 ui,
712 712 b'file.uploadchunk',
713 713 {
714 714 b'filePHID': fphid,
715 715 b'byteStart': bstart,
716 716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 717 b'dataEncoding': b'base64',
718 718 },
719 719 )
720 720
721 721
722 722 def uploadfile(fctx):
723 723 """upload binary files to Phabricator"""
724 724 repo = fctx.repo()
725 725 ui = repo.ui
726 726 fname = fctx.path()
727 727 size = fctx.size()
728 728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729 729
730 730 # an allocate call is required first to see if an upload is even required
731 731 # (Phab might already have it) and to determine if chunking is needed
732 732 allocateparams = {
733 733 b'name': fname,
734 734 b'contentLength': size,
735 735 b'contentHash': fhash,
736 736 }
737 737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 738 fphid = filealloc[b'filePHID']
739 739
740 740 if filealloc[b'upload']:
741 741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 742 if not fphid:
743 743 uploadparams = {
744 744 b'name': fname,
745 745 b'data_base64': base64.b64encode(fctx.data()),
746 746 }
747 747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 748 else:
749 749 uploadchunks(fctx, fphid)
750 750 else:
751 751 ui.debug(b'server already has %s\n' % bytes(fctx))
752 752
753 753 if not fphid:
754 754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755 755
756 756 return fphid
757 757
758 758
759 759 def addoldbinary(pchange, oldfctx, fctx):
760 760 """add the metadata for the previous version of a binary file to the
761 761 phabchange for the new version
762 762
763 763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 764 version of the file, or None if the file is being removed.
765 765 """
766 766 if not fctx or fctx.cmp(oldfctx):
767 767 # Files differ, add the old one
768 768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 769 mimeguess, _enc = mimetypes.guess_type(
770 770 encoding.unifromlocal(oldfctx.path())
771 771 )
772 772 if mimeguess:
773 773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 774 mimeguess
775 775 )
776 776 fphid = uploadfile(oldfctx)
777 777 pchange.metadata[b'old:binary-phid'] = fphid
778 778 else:
779 779 # If it's left as IMAGE/BINARY web UI might try to display it
780 780 pchange.fileType = DiffFileType.TEXT
781 781 pchange.copynewmetadatatoold()
782 782
783 783
784 784 def makebinary(pchange, fctx):
785 785 """populate the phabchange for a binary file"""
786 786 pchange.fileType = DiffFileType.BINARY
787 787 fphid = uploadfile(fctx)
788 788 pchange.metadata[b'new:binary-phid'] = fphid
789 789 pchange.metadata[b'new:file:size'] = fctx.size()
790 790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 791 if mimeguess:
792 792 mimeguess = pycompat.bytestr(mimeguess)
793 793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 794 if mimeguess.startswith(b'image/'):
795 795 pchange.fileType = DiffFileType.IMAGE
796 796
797 797
798 798 # Copied from mercurial/patch.py
799 799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800 800
801 801
802 802 def notutf8(fctx):
803 803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 804 as binary
805 805 """
806 806 try:
807 807 fctx.data().decode('utf-8')
808 808 return False
809 809 except UnicodeDecodeError:
810 810 fctx.repo().ui.write(
811 811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 812 % fctx.path()
813 813 )
814 814 return True
815 815
816 816
817 817 def addremoved(pdiff, basectx, ctx, removed):
818 818 """add removed files to the phabdiff. Shouldn't include moves"""
819 819 for fname in removed:
820 820 pchange = phabchange(
821 821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 822 )
823 823 oldfctx = basectx.p1()[fname]
824 824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 826 maketext(pchange, basectx, ctx, fname)
827 827
828 828 pdiff.addchange(pchange)
829 829
830 830
831 831 def addmodified(pdiff, basectx, ctx, modified):
832 832 """add modified files to the phabdiff"""
833 833 for fname in modified:
834 834 fctx = ctx[fname]
835 835 oldfctx = basectx.p1()[fname]
836 836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 837 filemode = gitmode[fctx.flags()]
838 838 originalmode = gitmode[oldfctx.flags()]
839 839 if filemode != originalmode:
840 840 pchange.addoldmode(originalmode)
841 841 pchange.addnewmode(filemode)
842 842
843 843 if (
844 844 fctx.isbinary()
845 845 or notutf8(fctx)
846 846 or oldfctx.isbinary()
847 847 or notutf8(oldfctx)
848 848 ):
849 849 makebinary(pchange, fctx)
850 850 addoldbinary(pchange, oldfctx, fctx)
851 851 else:
852 852 maketext(pchange, basectx, ctx, fname)
853 853
854 854 pdiff.addchange(pchange)
855 855
856 856
857 857 def addadded(pdiff, basectx, ctx, added, removed):
858 858 """add file adds to the phabdiff, both new files and copies/moves"""
859 859 # Keep track of files that've been recorded as moved/copied, so if there are
860 860 # additional copies we can mark them (moves get removed from removed)
861 861 copiedchanges = {}
862 862 movedchanges = {}
863 863
864 864 copy = {}
865 865 if basectx != ctx:
866 866 copy = copies.pathcopies(basectx.p1(), ctx)
867 867
868 868 for fname in added:
869 869 fctx = ctx[fname]
870 870 oldfctx = None
871 871 pchange = phabchange(currentPath=fname)
872 872
873 873 filemode = gitmode[fctx.flags()]
874 874
875 875 if copy:
876 876 originalfname = copy.get(fname, fname)
877 877 else:
878 878 originalfname = fname
879 879 if fctx.renamed():
880 880 originalfname = fctx.renamed()[0]
881 881
882 882 renamed = fname != originalfname
883 883
884 884 if renamed:
885 885 oldfctx = basectx.p1()[originalfname]
886 886 originalmode = gitmode[oldfctx.flags()]
887 887 pchange.oldPath = originalfname
888 888
889 889 if originalfname in removed:
890 890 origpchange = phabchange(
891 891 currentPath=originalfname,
892 892 oldPath=originalfname,
893 893 type=DiffChangeType.MOVE_AWAY,
894 894 awayPaths=[fname],
895 895 )
896 896 movedchanges[originalfname] = origpchange
897 897 removed.remove(originalfname)
898 898 pchange.type = DiffChangeType.MOVE_HERE
899 899 elif originalfname in movedchanges:
900 900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 901 movedchanges[originalfname].awayPaths.append(fname)
902 902 pchange.type = DiffChangeType.COPY_HERE
903 903 else: # pure copy
904 904 if originalfname not in copiedchanges:
905 905 origpchange = phabchange(
906 906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 907 )
908 908 copiedchanges[originalfname] = origpchange
909 909 else:
910 910 origpchange = copiedchanges[originalfname]
911 911 origpchange.awayPaths.append(fname)
912 912 pchange.type = DiffChangeType.COPY_HERE
913 913
914 914 if filemode != originalmode:
915 915 pchange.addoldmode(originalmode)
916 916 pchange.addnewmode(filemode)
917 917 else: # Brand-new file
918 918 pchange.addnewmode(gitmode[fctx.flags()])
919 919 pchange.type = DiffChangeType.ADD
920 920
921 921 if (
922 922 fctx.isbinary()
923 923 or notutf8(fctx)
924 924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 925 ):
926 926 makebinary(pchange, fctx)
927 927 if renamed:
928 928 addoldbinary(pchange, oldfctx, fctx)
929 929 else:
930 930 maketext(pchange, basectx, ctx, fname)
931 931
932 932 pdiff.addchange(pchange)
933 933
934 934 for _path, copiedchange in copiedchanges.items():
935 935 pdiff.addchange(copiedchange)
936 936 for _path, movedchange in movedchanges.items():
937 937 pdiff.addchange(movedchange)
938 938
939 939
940 940 def creatediff(basectx, ctx):
941 941 """create a Differential Diff"""
942 942 repo = ctx.repo()
943 943 repophid = getrepophid(repo)
944 944 # Create a "Differential Diff" via "differential.creatediff" API
945 945 pdiff = phabdiff(
946 946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 947 branch=b'%s' % ctx.branch(),
948 948 )
949 949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 950 # addadded will remove moved files from removed, so addremoved won't get
951 951 # them
952 952 addadded(pdiff, basectx, ctx, added, removed)
953 953 addmodified(pdiff, basectx, ctx, modified)
954 954 addremoved(pdiff, basectx, ctx, removed)
955 955 if repophid:
956 956 pdiff.repositoryPHID = repophid
957 957 diff = callconduit(
958 958 repo.ui,
959 959 b'differential.creatediff',
960 960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 961 )
962 962 if not diff:
963 963 if basectx != ctx:
964 964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 965 else:
966 966 msg = _(b'cannot create diff for %s') % ctx
967 967 raise error.Abort(msg)
968 968 return diff
969 969
970 970
971 971 def writediffproperties(ctxs, diff):
972 972 """write metadata to diff so patches could be applied losslessly
973 973
974 974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 975 The list is generally a single commit, but may be several when using
976 976 ``phabsend --fold``.
977 977 """
978 978 # creatediff returns with a diffid but query returns with an id
979 979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 980 basectx = ctxs[0]
981 981 tipctx = ctxs[-1]
982 982
983 983 params = {
984 984 b'diff_id': diffid,
985 985 b'name': b'hg:meta',
986 986 b'data': templatefilters.json(
987 987 {
988 988 b'user': tipctx.user(),
989 989 b'date': b'%d %d' % tipctx.date(),
990 990 b'branch': tipctx.branch(),
991 991 b'node': tipctx.hex(),
992 992 b'parent': basectx.p1().hex(),
993 993 }
994 994 ),
995 995 }
996 996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997 997
998 998 commits = {}
999 999 for ctx in ctxs:
1000 1000 commits[ctx.hex()] = {
1001 1001 b'author': stringutil.person(ctx.user()),
1002 1002 b'authorEmail': stringutil.email(ctx.user()),
1003 1003 b'time': int(ctx.date()[0]),
1004 1004 b'commit': ctx.hex(),
1005 1005 b'parents': [ctx.p1().hex()],
1006 1006 b'branch': ctx.branch(),
1007 1007 }
1008 1008 params = {
1009 1009 b'diff_id': diffid,
1010 1010 b'name': b'local:commits',
1011 1011 b'data': templatefilters.json(commits),
1012 1012 }
1013 1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014 1014
1015 1015
1016 1016 def createdifferentialrevision(
1017 ctx,
1017 ctxs,
1018 1018 revid=None,
1019 1019 parentrevphid=None,
1020 oldbasenode=None,
1020 1021 oldnode=None,
1021 1022 olddiff=None,
1022 1023 actions=None,
1023 1024 comment=None,
1024 1025 ):
1025 1026 """create or update a Differential Revision
1026 1027
1027 1028 If revid is None, create a new Differential Revision, otherwise update
1028 1029 revid. If parentrevphid is not None, set it as a dependency.
1029 1030
1031 If there is a single commit for the new Differential Revision, ``ctxs`` will
1032 be a list of that single context. Otherwise, it is a list that covers the
1033 range of changes for the differential, where ``ctxs[0]`` is the first change
1034 to include and ``ctxs[-1]`` is the last.
1035
1030 1036 If oldnode is not None, check if the patch content (without commit message
1031 and metadata) has changed before creating another diff.
1037 and metadata) has changed before creating another diff. For a Revision with
1038 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1039 Revision covering multiple commits, ``oldbasenode`` corresponds to
1040 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1041 corresponds to ``ctxs[-1]``.
1032 1042
1033 1043 If actions is not None, they will be appended to the transaction.
1034 1044 """
1035 basectx = ctx
1045 ctx = ctxs[-1]
1046 basectx = ctxs[0]
1047
1036 1048 repo = ctx.repo()
1037 1049 if oldnode:
1038 1050 diffopts = mdiff.diffopts(git=True, context=32767)
1039 oldctx = repo.unfiltered()[oldnode]
1040 oldbasectx = oldctx
1051 unfi = repo.unfiltered()
1052 oldctx = unfi[oldnode]
1053 oldbasectx = unfi[oldbasenode]
1041 1054 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1042 1055 oldbasectx, oldctx, diffopts
1043 1056 )
1044 1057 else:
1045 1058 neednewdiff = True
1046 1059
1047 1060 transactions = []
1048 1061 if neednewdiff:
1049 1062 diff = creatediff(basectx, ctx)
1050 1063 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1051 1064 if comment:
1052 1065 transactions.append({b'type': b'comment', b'value': comment})
1053 1066 else:
1054 1067 # Even if we don't need to upload a new diff because the patch content
1055 1068 # does not change. We might still need to update its metadata so
1056 1069 # pushers could know the correct node metadata.
1057 1070 assert olddiff
1058 1071 diff = olddiff
1059 writediffproperties([ctx], diff)
1072 writediffproperties(ctxs, diff)
1060 1073
1061 1074 # Set the parent Revision every time, so commit re-ordering is picked-up
1062 1075 if parentrevphid:
1063 1076 transactions.append(
1064 1077 {b'type': b'parents.set', b'value': [parentrevphid]}
1065 1078 )
1066 1079
1067 1080 if actions:
1068 1081 transactions += actions
1069 1082
1070 1083 # When folding multiple local commits into a single review, arcanist will
1071 1084 # take the summary line of the first commit as the title, and then
1072 1085 # concatenate the rest of the remaining messages (including each of their
1073 1086 # first lines) to the rest of the first commit message (each separated by
1074 1087 # an empty line), and use that as the summary field. Do the same here.
1075 1088 # For commits with only a one line message, there is no summary field, as
1076 1089 # this gets assigned to the title.
1077 1090 fields = util.sortdict() # sorted for stable wire protocol in tests
1078 1091
1079 for i, _ctx in enumerate([ctx]):
1092 for i, _ctx in enumerate(ctxs):
1080 1093 # Parse commit message and update related fields.
1081 1094 desc = _ctx.description()
1082 1095 info = callconduit(
1083 1096 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1084 1097 )
1085 1098
1086 1099 for k in [b'title', b'summary', b'testPlan']:
1087 1100 v = info[b'fields'].get(k)
1088 1101 if not v:
1089 1102 continue
1090 1103
1091 1104 if i == 0:
1092 1105 # Title, summary and test plan (if present) are taken verbatim
1093 1106 # for the first commit.
1094 1107 fields[k] = v.rstrip()
1095 1108 continue
1096 1109 elif k == b'title':
1097 1110 # Add subsequent titles (i.e. the first line of the commit
1098 1111 # message) back to the summary.
1099 1112 k = b'summary'
1100 1113
1101 1114 # Append any current field to the existing composite field
1102 1115 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1103 1116
1104 1117 for k, v in fields.items():
1105 1118 transactions.append({b'type': k, b'value': v})
1106 1119
1107 1120 params = {b'transactions': transactions}
1108 1121 if revid is not None:
1109 1122 # Update an existing Differential Revision
1110 1123 params[b'objectIdentifier'] = revid
1111 1124
1112 1125 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1113 1126 if not revision:
1114 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1127 if len(ctxs) == 1:
1128 msg = _(b'cannot create revision for %s') % ctx
1129 else:
1130 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1131 raise error.Abort(msg)
1115 1132
1116 1133 return revision, diff
1117 1134
1118 1135
1119 1136 def userphids(ui, names):
1120 1137 """convert user names to PHIDs"""
1121 1138 names = [name.lower() for name in names]
1122 1139 query = {b'constraints': {b'usernames': names}}
1123 1140 result = callconduit(ui, b'user.search', query)
1124 1141 # username not found is not an error of the API. So check if we have missed
1125 1142 # some names here.
1126 1143 data = result[b'data']
1127 1144 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1128 1145 unresolved = set(names) - resolved
1129 1146 if unresolved:
1130 1147 raise error.Abort(
1131 1148 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1132 1149 )
1133 1150 return [entry[b'phid'] for entry in data]
1134 1151
1135 1152
1136 1153 @vcrcommand(
1137 1154 b'phabsend',
1138 1155 [
1139 1156 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1140 1157 (b'', b'amend', True, _(b'update commit messages')),
1141 1158 (b'', b'reviewer', [], _(b'specify reviewers')),
1142 1159 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1143 1160 (
1144 1161 b'm',
1145 1162 b'comment',
1146 1163 b'',
1147 1164 _(b'add a comment to Revisions with new/updated Diffs'),
1148 1165 ),
1149 1166 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1150 1167 ],
1151 1168 _(b'REV [OPTIONS]'),
1152 1169 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1153 1170 )
1154 1171 def phabsend(ui, repo, *revs, **opts):
1155 1172 """upload changesets to Phabricator
1156 1173
1157 1174 If there are multiple revisions specified, they will be send as a stack
1158 1175 with a linear dependencies relationship using the order specified by the
1159 1176 revset.
1160 1177
1161 1178 For the first time uploading changesets, local tags will be created to
1162 1179 maintain the association. After the first time, phabsend will check
1163 1180 obsstore and tags information so it can figure out whether to update an
1164 1181 existing Differential Revision, or create a new one.
1165 1182
1166 1183 If --amend is set, update commit messages so they have the
1167 1184 ``Differential Revision`` URL, remove related tags. This is similar to what
1168 1185 arcanist will do, and is more desired in author-push workflows. Otherwise,
1169 1186 use local tags to record the ``Differential Revision`` association.
1170 1187
1171 1188 The --confirm option lets you confirm changesets before sending them. You
1172 1189 can also add following to your configuration file to make it default
1173 1190 behaviour::
1174 1191
1175 1192 [phabsend]
1176 1193 confirm = true
1177 1194
1178 1195 phabsend will check obsstore and the above association to decide whether to
1179 1196 update an existing Differential Revision, or create a new one.
1180 1197 """
1181 1198 opts = pycompat.byteskwargs(opts)
1182 1199 revs = list(revs) + opts.get(b'rev', [])
1183 1200 revs = scmutil.revrange(repo, revs)
1184 1201 revs.sort() # ascending order to preserve topological parent/child in phab
1185 1202
1186 1203 if not revs:
1187 1204 raise error.Abort(_(b'phabsend requires at least one changeset'))
1188 1205 if opts.get(b'amend'):
1189 1206 cmdutil.checkunfinished(repo)
1190 1207
1191 1208 # {newnode: (oldnode, olddiff, olddrev}
1192 1209 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1193 1210
1194 1211 confirm = ui.configbool(b'phabsend', b'confirm')
1195 1212 confirm |= bool(opts.get(b'confirm'))
1196 1213 if confirm:
1197 1214 confirmed = _confirmbeforesend(repo, revs, oldmap)
1198 1215 if not confirmed:
1199 1216 raise error.Abort(_(b'phabsend cancelled'))
1200 1217
1201 1218 actions = []
1202 1219 reviewers = opts.get(b'reviewer', [])
1203 1220 blockers = opts.get(b'blocker', [])
1204 1221 phids = []
1205 1222 if reviewers:
1206 1223 phids.extend(userphids(repo.ui, reviewers))
1207 1224 if blockers:
1208 1225 phids.extend(
1209 1226 map(
1210 1227 lambda phid: b'blocking(%s)' % phid,
1211 1228 userphids(repo.ui, blockers),
1212 1229 )
1213 1230 )
1214 1231 if phids:
1215 1232 actions.append({b'type': b'reviewers.add', b'value': phids})
1216 1233
1217 1234 drevids = [] # [int]
1218 1235 diffmap = {} # {newnode: diff}
1219 1236
1220 1237 # Send patches one by one so we know their Differential Revision PHIDs and
1221 1238 # can provide dependency relationship
1222 1239 lastrevphid = None
1223 1240 for rev in revs:
1224 1241 ui.debug(b'sending rev %d\n' % rev)
1225 1242 ctx = repo[rev]
1226 1243
1227 1244 # Get Differential Revision ID
1228 1245 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1246 oldbasenode = oldnode
1229 1247 if oldnode != ctx.node() or opts.get(b'amend'):
1230 1248 # Create or update Differential Revision
1231 1249 revision, diff = createdifferentialrevision(
1232 ctx,
1250 [ctx],
1233 1251 revid,
1234 1252 lastrevphid,
1253 oldbasenode,
1235 1254 oldnode,
1236 1255 olddiff,
1237 1256 actions,
1238 1257 opts.get(b'comment'),
1239 1258 )
1240 1259 diffmap[ctx.node()] = diff
1241 1260 newrevid = int(revision[b'object'][b'id'])
1242 1261 newrevphid = revision[b'object'][b'phid']
1243 1262 if revid:
1244 1263 action = b'updated'
1245 1264 else:
1246 1265 action = b'created'
1247 1266
1248 1267 # Create a local tag to note the association, if commit message
1249 1268 # does not have it already
1250 1269 m = _differentialrevisiondescre.search(ctx.description())
1251 1270 if not m or int(m.group('id')) != newrevid:
1252 1271 tagname = b'D%d' % newrevid
1253 1272 tags.tag(
1254 1273 repo,
1255 1274 tagname,
1256 1275 ctx.node(),
1257 1276 message=None,
1258 1277 user=None,
1259 1278 date=None,
1260 1279 local=True,
1261 1280 )
1262 1281 else:
1263 1282 # Nothing changed. But still set "newrevphid" so the next revision
1264 1283 # could depend on this one and "newrevid" for the summary line.
1265 1284 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1266 1285 newrevid = revid
1267 1286 action = b'skipped'
1268 1287
1269 1288 actiondesc = ui.label(
1270 1289 {
1271 1290 b'created': _(b'created'),
1272 1291 b'skipped': _(b'skipped'),
1273 1292 b'updated': _(b'updated'),
1274 1293 }[action],
1275 1294 b'phabricator.action.%s' % action,
1276 1295 )
1277 1296 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1278 1297 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1279 1298 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1280 1299 ui.write(
1281 1300 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1282 1301 )
1283 1302 drevids.append(newrevid)
1284 1303 lastrevphid = newrevphid
1285 1304
1286 1305 # Update commit messages and remove tags
1287 1306 if opts.get(b'amend'):
1288 1307 unfi = repo.unfiltered()
1289 1308 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1290 1309 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1291 1310 wnode = unfi[b'.'].node()
1292 1311 mapping = {} # {oldnode: [newnode]}
1293 1312 for i, rev in enumerate(revs):
1294 1313 old = unfi[rev]
1295 1314 drevid = drevids[i]
1296 1315 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1297 1316 newdesc = get_amended_desc(drev, old, False)
1298 1317 # Make sure commit message contain "Differential Revision"
1299 1318 if old.description() != newdesc:
1300 1319 if old.phase() == phases.public:
1301 1320 ui.warn(
1302 1321 _(b"warning: not updating public commit %s\n")
1303 1322 % scmutil.formatchangeid(old)
1304 1323 )
1305 1324 continue
1306 1325 parents = [
1307 1326 mapping.get(old.p1().node(), (old.p1(),))[0],
1308 1327 mapping.get(old.p2().node(), (old.p2(),))[0],
1309 1328 ]
1310 1329 new = context.metadataonlyctx(
1311 1330 repo,
1312 1331 old,
1313 1332 parents=parents,
1314 1333 text=newdesc,
1315 1334 user=old.user(),
1316 1335 date=old.date(),
1317 1336 extra=old.extra(),
1318 1337 )
1319 1338
1320 1339 newnode = new.commit()
1321 1340
1322 1341 mapping[old.node()] = [newnode]
1323 1342 # Update diff property
1324 1343 # If it fails just warn and keep going, otherwise the DREV
1325 1344 # associations will be lost
1326 1345 try:
1327 1346 writediffproperties(
1328 1347 [unfi[newnode]], diffmap[old.node()]
1329 1348 )
1330 1349 except util.urlerr.urlerror:
1331 1350 ui.warnnoi18n(
1332 1351 b'Failed to update metadata for D%d\n' % drevid
1333 1352 )
1334 1353 # Remove local tags since it's no longer necessary
1335 1354 tagname = b'D%d' % drevid
1336 1355 if tagname in repo.tags():
1337 1356 tags.tag(
1338 1357 repo,
1339 1358 tagname,
1340 1359 nullid,
1341 1360 message=None,
1342 1361 user=None,
1343 1362 date=None,
1344 1363 local=True,
1345 1364 )
1346 1365 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1347 1366 if wnode in mapping:
1348 1367 unfi.setparents(mapping[wnode][0])
1349 1368
1350 1369
1351 1370 # Map from "hg:meta" keys to header understood by "hg import". The order is
1352 1371 # consistent with "hg export" output.
1353 1372 _metanamemap = util.sortdict(
1354 1373 [
1355 1374 (b'user', b'User'),
1356 1375 (b'date', b'Date'),
1357 1376 (b'branch', b'Branch'),
1358 1377 (b'node', b'Node ID'),
1359 1378 (b'parent', b'Parent '),
1360 1379 ]
1361 1380 )
1362 1381
1363 1382
1364 1383 def _confirmbeforesend(repo, revs, oldmap):
1365 1384 url, token = readurltoken(repo.ui)
1366 1385 ui = repo.ui
1367 1386 for rev in revs:
1368 1387 ctx = repo[rev]
1369 1388 desc = ctx.description().splitlines()[0]
1370 1389 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1371 1390 if drevid:
1372 1391 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1373 1392 else:
1374 1393 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1375 1394
1376 1395 ui.write(
1377 1396 _(b'%s - %s: %s\n')
1378 1397 % (
1379 1398 drevdesc,
1380 1399 ui.label(bytes(ctx), b'phabricator.node'),
1381 1400 ui.label(desc, b'phabricator.desc'),
1382 1401 )
1383 1402 )
1384 1403
1385 1404 if ui.promptchoice(
1386 1405 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1387 1406 ):
1388 1407 return False
1389 1408
1390 1409 return True
1391 1410
1392 1411
1393 1412 _knownstatusnames = {
1394 1413 b'accepted',
1395 1414 b'needsreview',
1396 1415 b'needsrevision',
1397 1416 b'closed',
1398 1417 b'abandoned',
1399 1418 b'changesplanned',
1400 1419 }
1401 1420
1402 1421
1403 1422 def _getstatusname(drev):
1404 1423 """get normalized status name from a Differential Revision"""
1405 1424 return drev[b'statusName'].replace(b' ', b'').lower()
1406 1425
1407 1426
1408 1427 # Small language to specify differential revisions. Support symbols: (), :X,
1409 1428 # +, and -.
1410 1429
1411 1430 _elements = {
1412 1431 # token-type: binding-strength, primary, prefix, infix, suffix
1413 1432 b'(': (12, None, (b'group', 1, b')'), None, None),
1414 1433 b':': (8, None, (b'ancestors', 8), None, None),
1415 1434 b'&': (5, None, None, (b'and_', 5), None),
1416 1435 b'+': (4, None, None, (b'add', 4), None),
1417 1436 b'-': (4, None, None, (b'sub', 4), None),
1418 1437 b')': (0, None, None, None, None),
1419 1438 b'symbol': (0, b'symbol', None, None, None),
1420 1439 b'end': (0, None, None, None, None),
1421 1440 }
1422 1441
1423 1442
1424 1443 def _tokenize(text):
1425 1444 view = memoryview(text) # zero-copy slice
1426 1445 special = b'():+-& '
1427 1446 pos = 0
1428 1447 length = len(text)
1429 1448 while pos < length:
1430 1449 symbol = b''.join(
1431 1450 itertools.takewhile(
1432 1451 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1433 1452 )
1434 1453 )
1435 1454 if symbol:
1436 1455 yield (b'symbol', symbol, pos)
1437 1456 pos += len(symbol)
1438 1457 else: # special char, ignore space
1439 1458 if text[pos : pos + 1] != b' ':
1440 1459 yield (text[pos : pos + 1], None, pos)
1441 1460 pos += 1
1442 1461 yield (b'end', None, pos)
1443 1462
1444 1463
1445 1464 def _parse(text):
1446 1465 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1447 1466 if pos != len(text):
1448 1467 raise error.ParseError(b'invalid token', pos)
1449 1468 return tree
1450 1469
1451 1470
1452 1471 def _parsedrev(symbol):
1453 1472 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1454 1473 if symbol.startswith(b'D') and symbol[1:].isdigit():
1455 1474 return int(symbol[1:])
1456 1475 if symbol.isdigit():
1457 1476 return int(symbol)
1458 1477
1459 1478
1460 1479 def _prefetchdrevs(tree):
1461 1480 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1462 1481 drevs = set()
1463 1482 ancestordrevs = set()
1464 1483 op = tree[0]
1465 1484 if op == b'symbol':
1466 1485 r = _parsedrev(tree[1])
1467 1486 if r:
1468 1487 drevs.add(r)
1469 1488 elif op == b'ancestors':
1470 1489 r, a = _prefetchdrevs(tree[1])
1471 1490 drevs.update(r)
1472 1491 ancestordrevs.update(r)
1473 1492 ancestordrevs.update(a)
1474 1493 else:
1475 1494 for t in tree[1:]:
1476 1495 r, a = _prefetchdrevs(t)
1477 1496 drevs.update(r)
1478 1497 ancestordrevs.update(a)
1479 1498 return drevs, ancestordrevs
1480 1499
1481 1500
1482 1501 def querydrev(ui, spec):
1483 1502 """return a list of "Differential Revision" dicts
1484 1503
1485 1504 spec is a string using a simple query language, see docstring in phabread
1486 1505 for details.
1487 1506
1488 1507 A "Differential Revision dict" looks like:
1489 1508
1490 1509 {
1491 1510 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1492 1511 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1493 1512 "auxiliary": {
1494 1513 "phabricator:depends-on": [
1495 1514 "PHID-DREV-gbapp366kutjebt7agcd"
1496 1515 ]
1497 1516 "phabricator:projects": [],
1498 1517 },
1499 1518 "branch": "default",
1500 1519 "ccs": [],
1501 1520 "commits": [],
1502 1521 "dateCreated": "1499181406",
1503 1522 "dateModified": "1499182103",
1504 1523 "diffs": [
1505 1524 "3",
1506 1525 "4",
1507 1526 ],
1508 1527 "hashes": [],
1509 1528 "id": "2",
1510 1529 "lineCount": "2",
1511 1530 "phid": "PHID-DREV-672qvysjcczopag46qty",
1512 1531 "properties": {},
1513 1532 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1514 1533 "reviewers": [],
1515 1534 "sourcePath": null
1516 1535 "status": "0",
1517 1536 "statusName": "Needs Review",
1518 1537 "summary": "",
1519 1538 "testPlan": "",
1520 1539 "title": "example",
1521 1540 "uri": "https://phab.example.com/D2",
1522 1541 }
1523 1542 """
1524 1543 # TODO: replace differential.query and differential.querydiffs with
1525 1544 # differential.diff.search because the former (and their output) are
1526 1545 # frozen, and planned to be deprecated and removed.
1527 1546
1528 1547 def fetch(params):
1529 1548 """params -> single drev or None"""
1530 1549 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1531 1550 if key in prefetched:
1532 1551 return prefetched[key]
1533 1552 drevs = callconduit(ui, b'differential.query', params)
1534 1553 # Fill prefetched with the result
1535 1554 for drev in drevs:
1536 1555 prefetched[drev[b'phid']] = drev
1537 1556 prefetched[int(drev[b'id'])] = drev
1538 1557 if key not in prefetched:
1539 1558 raise error.Abort(
1540 1559 _(b'cannot get Differential Revision %r') % params
1541 1560 )
1542 1561 return prefetched[key]
1543 1562
1544 1563 def getstack(topdrevids):
1545 1564 """given a top, get a stack from the bottom, [id] -> [id]"""
1546 1565 visited = set()
1547 1566 result = []
1548 1567 queue = [{b'ids': [i]} for i in topdrevids]
1549 1568 while queue:
1550 1569 params = queue.pop()
1551 1570 drev = fetch(params)
1552 1571 if drev[b'id'] in visited:
1553 1572 continue
1554 1573 visited.add(drev[b'id'])
1555 1574 result.append(int(drev[b'id']))
1556 1575 auxiliary = drev.get(b'auxiliary', {})
1557 1576 depends = auxiliary.get(b'phabricator:depends-on', [])
1558 1577 for phid in depends:
1559 1578 queue.append({b'phids': [phid]})
1560 1579 result.reverse()
1561 1580 return smartset.baseset(result)
1562 1581
1563 1582 # Initialize prefetch cache
1564 1583 prefetched = {} # {id or phid: drev}
1565 1584
1566 1585 tree = _parse(spec)
1567 1586 drevs, ancestordrevs = _prefetchdrevs(tree)
1568 1587
1569 1588 # developer config: phabricator.batchsize
1570 1589 batchsize = ui.configint(b'phabricator', b'batchsize')
1571 1590
1572 1591 # Prefetch Differential Revisions in batch
1573 1592 tofetch = set(drevs)
1574 1593 for r in ancestordrevs:
1575 1594 tofetch.update(range(max(1, r - batchsize), r + 1))
1576 1595 if drevs:
1577 1596 fetch({b'ids': list(tofetch)})
1578 1597 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1579 1598
1580 1599 # Walk through the tree, return smartsets
1581 1600 def walk(tree):
1582 1601 op = tree[0]
1583 1602 if op == b'symbol':
1584 1603 drev = _parsedrev(tree[1])
1585 1604 if drev:
1586 1605 return smartset.baseset([drev])
1587 1606 elif tree[1] in _knownstatusnames:
1588 1607 drevs = [
1589 1608 r
1590 1609 for r in validids
1591 1610 if _getstatusname(prefetched[r]) == tree[1]
1592 1611 ]
1593 1612 return smartset.baseset(drevs)
1594 1613 else:
1595 1614 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1596 1615 elif op in {b'and_', b'add', b'sub'}:
1597 1616 assert len(tree) == 3
1598 1617 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1599 1618 elif op == b'group':
1600 1619 return walk(tree[1])
1601 1620 elif op == b'ancestors':
1602 1621 return getstack(walk(tree[1]))
1603 1622 else:
1604 1623 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1605 1624
1606 1625 return [prefetched[r] for r in walk(tree)]
1607 1626
1608 1627
1609 1628 def getdescfromdrev(drev):
1610 1629 """get description (commit message) from "Differential Revision"
1611 1630
1612 1631 This is similar to differential.getcommitmessage API. But we only care
1613 1632 about limited fields: title, summary, test plan, and URL.
1614 1633 """
1615 1634 title = drev[b'title']
1616 1635 summary = drev[b'summary'].rstrip()
1617 1636 testplan = drev[b'testPlan'].rstrip()
1618 1637 if testplan:
1619 1638 testplan = b'Test Plan:\n%s' % testplan
1620 1639 uri = b'Differential Revision: %s' % drev[b'uri']
1621 1640 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1622 1641
1623 1642
1624 1643 def get_amended_desc(drev, ctx, folded):
1625 1644 """similar to ``getdescfromdrev``, but supports a folded series of commits
1626 1645
1627 1646 This is used when determining if an individual commit needs to have its
1628 1647 message amended after posting it for review. The determination is made for
1629 1648 each individual commit, even when they were folded into one review.
1630 1649 """
1631 1650 if not folded:
1632 1651 return getdescfromdrev(drev)
1633 1652
1634 1653 uri = b'Differential Revision: %s' % drev[b'uri']
1635 1654
1636 1655 # Since the commit messages were combined when posting multiple commits
1637 1656 # with --fold, the fields can't be read from Phabricator here, or *all*
1638 1657 # affected local revisions will end up with the same commit message after
1639 1658 # the URI is amended in. Append in the DREV line, or update it if it
1640 1659 # exists. At worst, this means commit message or test plan updates on
1641 1660 # Phabricator aren't propagated back to the repository, but that seems
1642 1661 # reasonable for the case where local commits are effectively combined
1643 1662 # in Phabricator.
1644 1663 m = _differentialrevisiondescre.search(ctx.description())
1645 1664 if not m:
1646 1665 return b'\n\n'.join([ctx.description(), uri])
1647 1666
1648 1667 return _differentialrevisiondescre.sub(uri, ctx.description())
1649 1668
1650 1669
1651 1670 def getdiffmeta(diff):
1652 1671 """get commit metadata (date, node, user, p1) from a diff object
1653 1672
1654 1673 The metadata could be "hg:meta", sent by phabsend, like:
1655 1674
1656 1675 "properties": {
1657 1676 "hg:meta": {
1658 1677 "branch": "default",
1659 1678 "date": "1499571514 25200",
1660 1679 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1661 1680 "user": "Foo Bar <foo@example.com>",
1662 1681 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1663 1682 }
1664 1683 }
1665 1684
1666 1685 Or converted from "local:commits", sent by "arc", like:
1667 1686
1668 1687 "properties": {
1669 1688 "local:commits": {
1670 1689 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1671 1690 "author": "Foo Bar",
1672 1691 "authorEmail": "foo@example.com"
1673 1692 "branch": "default",
1674 1693 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1675 1694 "local": "1000",
1676 1695 "message": "...",
1677 1696 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1678 1697 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1679 1698 "summary": "...",
1680 1699 "tag": "",
1681 1700 "time": 1499546314,
1682 1701 }
1683 1702 }
1684 1703 }
1685 1704
1686 1705 Note: metadata extracted from "local:commits" will lose time zone
1687 1706 information.
1688 1707 """
1689 1708 props = diff.get(b'properties') or {}
1690 1709 meta = props.get(b'hg:meta')
1691 1710 if not meta:
1692 1711 if props.get(b'local:commits'):
1693 1712 commit = sorted(props[b'local:commits'].values())[0]
1694 1713 meta = {}
1695 1714 if b'author' in commit and b'authorEmail' in commit:
1696 1715 meta[b'user'] = b'%s <%s>' % (
1697 1716 commit[b'author'],
1698 1717 commit[b'authorEmail'],
1699 1718 )
1700 1719 if b'time' in commit:
1701 1720 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1702 1721 if b'branch' in commit:
1703 1722 meta[b'branch'] = commit[b'branch']
1704 1723 node = commit.get(b'commit', commit.get(b'rev'))
1705 1724 if node:
1706 1725 meta[b'node'] = node
1707 1726 if len(commit.get(b'parents', ())) >= 1:
1708 1727 meta[b'parent'] = commit[b'parents'][0]
1709 1728 else:
1710 1729 meta = {}
1711 1730 if b'date' not in meta and b'dateCreated' in diff:
1712 1731 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1713 1732 if b'branch' not in meta and diff.get(b'branch'):
1714 1733 meta[b'branch'] = diff[b'branch']
1715 1734 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1716 1735 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1717 1736 return meta
1718 1737
1719 1738
1720 1739 def _getdrevs(ui, stack, specs):
1721 1740 """convert user supplied DREVSPECs into "Differential Revision" dicts
1722 1741
1723 1742 See ``hg help phabread`` for how to specify each DREVSPEC.
1724 1743 """
1725 1744 if len(specs) > 0:
1726 1745
1727 1746 def _formatspec(s):
1728 1747 if stack:
1729 1748 s = b':(%s)' % s
1730 1749 return b'(%s)' % s
1731 1750
1732 1751 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1733 1752
1734 1753 drevs = querydrev(ui, spec)
1735 1754 if drevs:
1736 1755 return drevs
1737 1756
1738 1757 raise error.Abort(_(b"empty DREVSPEC set"))
1739 1758
1740 1759
1741 1760 def readpatch(ui, drevs, write):
1742 1761 """generate plain-text patch readable by 'hg import'
1743 1762
1744 1763 write takes a list of (DREV, bytes), where DREV is the differential number
1745 1764 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1746 1765 to be imported. drevs is what "querydrev" returns, results of
1747 1766 "differential.query".
1748 1767 """
1749 1768 # Prefetch hg:meta property for all diffs
1750 1769 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1751 1770 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1752 1771
1753 1772 patches = []
1754 1773
1755 1774 # Generate patch for each drev
1756 1775 for drev in drevs:
1757 1776 ui.note(_(b'reading D%s\n') % drev[b'id'])
1758 1777
1759 1778 diffid = max(int(v) for v in drev[b'diffs'])
1760 1779 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1761 1780 desc = getdescfromdrev(drev)
1762 1781 header = b'# HG changeset patch\n'
1763 1782
1764 1783 # Try to preserve metadata from hg:meta property. Write hg patch
1765 1784 # headers that can be read by the "import" command. See patchheadermap
1766 1785 # and extract in mercurial/patch.py for supported headers.
1767 1786 meta = getdiffmeta(diffs[b'%d' % diffid])
1768 1787 for k in _metanamemap.keys():
1769 1788 if k in meta:
1770 1789 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1771 1790
1772 1791 content = b'%s%s\n%s' % (header, desc, body)
1773 1792 patches.append((drev[b'id'], content))
1774 1793
1775 1794 # Write patches to the supplied callback
1776 1795 write(patches)
1777 1796
1778 1797
1779 1798 @vcrcommand(
1780 1799 b'phabread',
1781 1800 [(b'', b'stack', False, _(b'read dependencies'))],
1782 1801 _(b'DREVSPEC... [OPTIONS]'),
1783 1802 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1784 1803 optionalrepo=True,
1785 1804 )
1786 1805 def phabread(ui, repo, *specs, **opts):
1787 1806 """print patches from Phabricator suitable for importing
1788 1807
1789 1808 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1790 1809 the number ``123``. It could also have common operators like ``+``, ``-``,
1791 1810 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1792 1811 select a stack. If multiple DREVSPEC values are given, the result is the
1793 1812 union of each individually evaluated value. No attempt is currently made
1794 1813 to reorder the values to run from parent to child.
1795 1814
1796 1815 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1797 1816 could be used to filter patches by status. For performance reason, they
1798 1817 only represent a subset of non-status selections and cannot be used alone.
1799 1818
1800 1819 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1801 1820 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1802 1821 stack up to D9.
1803 1822
1804 1823 If --stack is given, follow dependencies information and read all patches.
1805 1824 It is equivalent to the ``:`` operator.
1806 1825 """
1807 1826 opts = pycompat.byteskwargs(opts)
1808 1827 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1809 1828
1810 1829 def _write(patches):
1811 1830 for drev, content in patches:
1812 1831 ui.write(content)
1813 1832
1814 1833 readpatch(ui, drevs, _write)
1815 1834
1816 1835
1817 1836 @vcrcommand(
1818 1837 b'phabimport',
1819 1838 [(b'', b'stack', False, _(b'import dependencies as well'))],
1820 1839 _(b'DREVSPEC... [OPTIONS]'),
1821 1840 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1822 1841 )
1823 1842 def phabimport(ui, repo, *specs, **opts):
1824 1843 """import patches from Phabricator for the specified Differential Revisions
1825 1844
1826 1845 The patches are read and applied starting at the parent of the working
1827 1846 directory.
1828 1847
1829 1848 See ``hg help phabread`` for how to specify DREVSPEC.
1830 1849 """
1831 1850 opts = pycompat.byteskwargs(opts)
1832 1851
1833 1852 # --bypass avoids losing exec and symlink bits when importing on Windows,
1834 1853 # and allows importing with a dirty wdir. It also aborts instead of leaving
1835 1854 # rejects.
1836 1855 opts[b'bypass'] = True
1837 1856
1838 1857 # Mandatory default values, synced with commands.import
1839 1858 opts[b'strip'] = 1
1840 1859 opts[b'prefix'] = b''
1841 1860 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1842 1861 opts[b'obsolete'] = False
1843 1862
1844 1863 if ui.configbool(b'phabimport', b'secret'):
1845 1864 opts[b'secret'] = True
1846 1865 if ui.configbool(b'phabimport', b'obsolete'):
1847 1866 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1848 1867
1849 1868 def _write(patches):
1850 1869 parents = repo[None].parents()
1851 1870
1852 1871 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1853 1872 for drev, contents in patches:
1854 1873 ui.status(_(b'applying patch from D%s\n') % drev)
1855 1874
1856 1875 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1857 1876 msg, node, rej = cmdutil.tryimportone(
1858 1877 ui,
1859 1878 repo,
1860 1879 patchdata,
1861 1880 parents,
1862 1881 opts,
1863 1882 [],
1864 1883 None, # Never update wdir to another revision
1865 1884 )
1866 1885
1867 1886 if not node:
1868 1887 raise error.Abort(_(b'D%s: no diffs found') % drev)
1869 1888
1870 1889 ui.note(msg + b'\n')
1871 1890 parents = [repo[node]]
1872 1891
1873 1892 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1874 1893
1875 1894 readpatch(repo.ui, drevs, _write)
1876 1895
1877 1896
1878 1897 @vcrcommand(
1879 1898 b'phabupdate',
1880 1899 [
1881 1900 (b'', b'accept', False, _(b'accept revisions')),
1882 1901 (b'', b'reject', False, _(b'reject revisions')),
1883 1902 (b'', b'abandon', False, _(b'abandon revisions')),
1884 1903 (b'', b'reclaim', False, _(b'reclaim revisions')),
1885 1904 (b'm', b'comment', b'', _(b'comment on the last revision')),
1886 1905 ],
1887 1906 _(b'DREVSPEC... [OPTIONS]'),
1888 1907 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1889 1908 optionalrepo=True,
1890 1909 )
1891 1910 def phabupdate(ui, repo, *specs, **opts):
1892 1911 """update Differential Revision in batch
1893 1912
1894 1913 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1895 1914 """
1896 1915 opts = pycompat.byteskwargs(opts)
1897 1916 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1898 1917 if len(flags) > 1:
1899 1918 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1900 1919
1901 1920 actions = []
1902 1921 for f in flags:
1903 1922 actions.append({b'type': f, b'value': True})
1904 1923
1905 1924 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1906 1925 for i, drev in enumerate(drevs):
1907 1926 if i + 1 == len(drevs) and opts.get(b'comment'):
1908 1927 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1909 1928 if actions:
1910 1929 params = {
1911 1930 b'objectIdentifier': drev[b'phid'],
1912 1931 b'transactions': actions,
1913 1932 }
1914 1933 callconduit(ui, b'differential.revision.edit', params)
1915 1934
1916 1935
1917 1936 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1918 1937 def template_review(context, mapping):
1919 1938 """:phabreview: Object describing the review for this changeset.
1920 1939 Has attributes `url` and `id`.
1921 1940 """
1922 1941 ctx = context.resource(mapping, b'ctx')
1923 1942 m = _differentialrevisiondescre.search(ctx.description())
1924 1943 if m:
1925 1944 return templateutil.hybriddict(
1926 1945 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1927 1946 )
1928 1947 else:
1929 1948 tags = ctx.repo().nodetags(ctx.node())
1930 1949 for t in tags:
1931 1950 if _differentialrevisiontagre.match(t):
1932 1951 url = ctx.repo().ui.config(b'phabricator', b'url')
1933 1952 if not url.endswith(b'/'):
1934 1953 url += b'/'
1935 1954 url += t
1936 1955
1937 1956 return templateutil.hybriddict({b'url': url, b'id': t,})
1938 1957 return None
1939 1958
1940 1959
1941 1960 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1942 1961 def template_status(context, mapping):
1943 1962 """:phabstatus: String. Status of Phabricator differential.
1944 1963 """
1945 1964 ctx = context.resource(mapping, b'ctx')
1946 1965 repo = context.resource(mapping, b'repo')
1947 1966 ui = context.resource(mapping, b'ui')
1948 1967
1949 1968 rev = ctx.rev()
1950 1969 try:
1951 1970 drevid = getdrevmap(repo, [rev])[rev]
1952 1971 except KeyError:
1953 1972 return None
1954 1973 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1955 1974 for drev in drevs:
1956 1975 if int(drev[b'id']) == drevid:
1957 1976 return templateutil.hybriddict(
1958 1977 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1959 1978 )
1960 1979 return None
1961 1980
1962 1981
1963 1982 @show.showview(b'phabstatus', csettopic=b'work')
1964 1983 def phabstatusshowview(ui, repo, displayer):
1965 1984 """Phabricator differiential status"""
1966 1985 revs = repo.revs('sort(_underway(), topo)')
1967 1986 drevmap = getdrevmap(repo, revs)
1968 1987 unknownrevs, drevids, revsbydrevid = [], set(), {}
1969 1988 for rev, drevid in pycompat.iteritems(drevmap):
1970 1989 if drevid is not None:
1971 1990 drevids.add(drevid)
1972 1991 revsbydrevid.setdefault(drevid, set()).add(rev)
1973 1992 else:
1974 1993 unknownrevs.append(rev)
1975 1994
1976 1995 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1977 1996 drevsbyrev = {}
1978 1997 for drev in drevs:
1979 1998 for rev in revsbydrevid[int(drev[b'id'])]:
1980 1999 drevsbyrev[rev] = drev
1981 2000
1982 2001 def phabstatus(ctx):
1983 2002 drev = drevsbyrev[ctx.rev()]
1984 2003 status = ui.label(
1985 2004 b'%(statusName)s' % drev,
1986 2005 b'phabricator.status.%s' % _getstatusname(drev),
1987 2006 )
1988 2007 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1989 2008
1990 2009 revs -= smartset.baseset(unknownrevs)
1991 2010 revdag = graphmod.dagwalker(repo, revs)
1992 2011
1993 2012 ui.setconfig(b'experimental', b'graphshorten', True)
1994 2013 displayer._exthook = phabstatus
1995 2014 nodelen = show.longestshortest(repo, revs)
1996 2015 logcmdutil.displaygraph(
1997 2016 ui,
1998 2017 repo,
1999 2018 revdag,
2000 2019 displayer,
2001 2020 graphmod.asciiedges,
2002 2021 props={b'nodelen': nodelen},
2003 2022 )
General Comments 0
You need to be logged in to leave comments. Login now