##// END OF EJS Templates
phabricator: teach `getoldnodedrevmap()` to handle folded reviews...
Matt Harbison -
r45136:5f9c917e default
parent child Browse files
Show More
@@ -1,2022 +1,2055 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.repophid
119 119 eh.configitem(
120 120 b'phabricator', b'repophid', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabricator', b'url', default=None,
124 124 )
125 125 eh.configitem(
126 126 b'phabsend', b'confirm', default=False,
127 127 )
128 128 eh.configitem(
129 129 b'phabimport', b'secret', default=False,
130 130 )
131 131 eh.configitem(
132 132 b'phabimport', b'obsolete', default=False,
133 133 )
134 134
135 135 colortable = {
136 136 b'phabricator.action.created': b'green',
137 137 b'phabricator.action.skipped': b'magenta',
138 138 b'phabricator.action.updated': b'magenta',
139 139 b'phabricator.desc': b'',
140 140 b'phabricator.drev': b'bold',
141 141 b'phabricator.node': b'',
142 142 b'phabricator.status.abandoned': b'magenta dim',
143 143 b'phabricator.status.accepted': b'green bold',
144 144 b'phabricator.status.closed': b'green',
145 145 b'phabricator.status.needsreview': b'yellow',
146 146 b'phabricator.status.needsrevision': b'red',
147 147 b'phabricator.status.changesplanned': b'red',
148 148 }
149 149
150 150 _VCR_FLAGS = [
151 151 (
152 152 b'',
153 153 b'test-vcr',
154 154 b'',
155 155 _(
156 156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 157 b', otherwise will mock all http requests using the specified vcr file.'
158 158 b' (ADVANCED)'
159 159 ),
160 160 ),
161 161 ]
162 162
163 163
164 164 @eh.wrapfunction(localrepo, "loadhgrc")
165 165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 167 """
168 168 result = False
169 169 arcconfig = {}
170 170
171 171 try:
172 172 # json.loads only accepts bytes from 3.6+
173 173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 174 # json.loads only returns unicode strings
175 175 arcconfig = pycompat.rapply(
176 176 lambda x: encoding.unitolocal(x)
177 177 if isinstance(x, pycompat.unicode)
178 178 else x,
179 179 pycompat.json_loads(rawparams),
180 180 )
181 181
182 182 result = True
183 183 except ValueError:
184 184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 185 except IOError:
186 186 pass
187 187
188 188 cfg = util.sortdict()
189 189
190 190 if b"repository.callsign" in arcconfig:
191 191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192 192
193 193 if b"phabricator.uri" in arcconfig:
194 194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195 195
196 196 if cfg:
197 197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198 198
199 199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200 200
201 201
202 202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 203 fullflags = flags + _VCR_FLAGS
204 204
205 205 def hgmatcher(r1, r2):
206 206 if r1.uri != r2.uri or r1.method != r2.method:
207 207 return False
208 208 r1params = util.urlreq.parseqs(r1.body)
209 209 r2params = util.urlreq.parseqs(r2.body)
210 210 for key in r1params:
211 211 if key not in r2params:
212 212 return False
213 213 value = r1params[key][0]
214 214 # we want to compare json payloads without worrying about ordering
215 215 if value.startswith(b'{') and value.endswith(b'}'):
216 216 r1json = pycompat.json_loads(value)
217 217 r2json = pycompat.json_loads(r2params[key][0])
218 218 if r1json != r2json:
219 219 return False
220 220 elif r2params[key][0] != value:
221 221 return False
222 222 return True
223 223
224 224 def sanitiserequest(request):
225 225 request.body = re.sub(
226 226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 227 )
228 228 return request
229 229
230 230 def sanitiseresponse(response):
231 231 if 'set-cookie' in response['headers']:
232 232 del response['headers']['set-cookie']
233 233 return response
234 234
235 235 def decorate(fn):
236 236 def inner(*args, **kwargs):
237 237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 238 if cassette:
239 239 import hgdemandimport
240 240
241 241 with hgdemandimport.deactivated():
242 242 import vcr as vcrmod
243 243 import vcr.stubs as stubs
244 244
245 245 vcr = vcrmod.VCR(
246 246 serializer='json',
247 247 before_record_request=sanitiserequest,
248 248 before_record_response=sanitiseresponse,
249 249 custom_patches=[
250 250 (
251 251 urlmod,
252 252 'httpconnection',
253 253 stubs.VCRHTTPConnection,
254 254 ),
255 255 (
256 256 urlmod,
257 257 'httpsconnection',
258 258 stubs.VCRHTTPSConnection,
259 259 ),
260 260 ],
261 261 )
262 262 vcr.register_matcher('hgmatcher', hgmatcher)
263 263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 264 return fn(*args, **kwargs)
265 265 return fn(*args, **kwargs)
266 266
267 267 cmd = util.checksignature(inner, depth=2)
268 268 cmd.__name__ = fn.__name__
269 269 cmd.__doc__ = fn.__doc__
270 270
271 271 return command(
272 272 name,
273 273 fullflags,
274 274 spec,
275 275 helpcategory=helpcategory,
276 276 optionalrepo=optionalrepo,
277 277 )(cmd)
278 278
279 279 return decorate
280 280
281 281
282 282 def urlencodenested(params):
283 283 """like urlencode, but works with nested parameters.
284 284
285 285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 288 """
289 289 flatparams = util.sortdict()
290 290
291 291 def process(prefix, obj):
292 292 if isinstance(obj, bool):
293 293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 296 if items is None:
297 297 flatparams[prefix] = obj
298 298 else:
299 299 for k, v in items(obj):
300 300 if prefix:
301 301 process(b'%s[%s]' % (prefix, k), v)
302 302 else:
303 303 process(k, v)
304 304
305 305 process(b'', params)
306 306 return util.urlreq.urlencode(flatparams)
307 307
308 308
309 309 def readurltoken(ui):
310 310 """return conduit url, token and make sure they exist
311 311
312 312 Currently read from [auth] config section. In the future, it might
313 313 make sense to read from .arcconfig and .arcrc as well.
314 314 """
315 315 url = ui.config(b'phabricator', b'url')
316 316 if not url:
317 317 raise error.Abort(
318 318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 319 )
320 320
321 321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 322 token = None
323 323
324 324 if res:
325 325 group, auth = res
326 326
327 327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328 328
329 329 token = auth.get(b'phabtoken')
330 330
331 331 if not token:
332 332 raise error.Abort(
333 333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 334 )
335 335
336 336 return url, token
337 337
338 338
339 339 def callconduit(ui, name, params):
340 340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 341 host, token = readurltoken(ui)
342 342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 344 params = params.copy()
345 345 params[b'__conduit__'] = {
346 346 b'token': token,
347 347 }
348 348 rawdata = {
349 349 b'params': templatefilters.json(params),
350 350 b'output': b'json',
351 351 b'__conduit__': 1,
352 352 }
353 353 data = urlencodenested(rawdata)
354 354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 355 if curlcmd:
356 356 sin, sout = procutil.popen2(
357 357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 358 )
359 359 sin.write(data)
360 360 sin.close()
361 361 body = sout.read()
362 362 else:
363 363 urlopener = urlmod.opener(ui, authinfo)
364 364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 365 with contextlib.closing(urlopener.open(request)) as rsp:
366 366 body = rsp.read()
367 367 ui.debug(b'Conduit Response: %s\n' % body)
368 368 parsed = pycompat.rapply(
369 369 lambda x: encoding.unitolocal(x)
370 370 if isinstance(x, pycompat.unicode)
371 371 else x,
372 372 # json.loads only accepts bytes from py3.6+
373 373 pycompat.json_loads(encoding.unifromlocal(body)),
374 374 )
375 375 if parsed.get(b'error_code'):
376 376 msg = _(b'Conduit Error (%s): %s') % (
377 377 parsed[b'error_code'],
378 378 parsed[b'error_info'],
379 379 )
380 380 raise error.Abort(msg)
381 381 return parsed[b'result']
382 382
383 383
384 384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 385 def debugcallconduit(ui, repo, name):
386 386 """call Conduit API
387 387
388 388 Call parameters are read from stdin as a JSON blob. Result will be written
389 389 to stdout as a JSON blob.
390 390 """
391 391 # json.loads only accepts bytes from 3.6+
392 392 rawparams = encoding.unifromlocal(ui.fin.read())
393 393 # json.loads only returns unicode strings
394 394 params = pycompat.rapply(
395 395 lambda x: encoding.unitolocal(x)
396 396 if isinstance(x, pycompat.unicode)
397 397 else x,
398 398 pycompat.json_loads(rawparams),
399 399 )
400 400 # json.dumps only accepts unicode strings
401 401 result = pycompat.rapply(
402 402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 403 callconduit(ui, name, params),
404 404 )
405 405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 406 ui.write(b'%s\n' % encoding.unitolocal(s))
407 407
408 408
409 409 def getrepophid(repo):
410 410 """given callsign, return repository PHID or None"""
411 411 # developer config: phabricator.repophid
412 412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 413 if repophid:
414 414 return repophid
415 415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 416 if not callsign:
417 417 return None
418 418 query = callconduit(
419 419 repo.ui,
420 420 b'diffusion.repository.search',
421 421 {b'constraints': {b'callsigns': [callsign]}},
422 422 )
423 423 if len(query[b'data']) == 0:
424 424 return None
425 425 repophid = query[b'data'][0][b'phid']
426 426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 427 return repophid
428 428
429 429
430 430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 431 _differentialrevisiondescre = re.compile(
432 432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 433 )
434 434
435 435
436 436 def getoldnodedrevmap(repo, nodelist):
437 437 """find previous nodes that has been sent to Phabricator
438 438
439 439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 440 for node in nodelist with known previous sent versions, or associated
441 441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 442 be ``None``.
443 443
444 444 Examines commit messages like "Differential Revision:" to get the
445 445 association information.
446 446
447 447 If such commit message line is not found, examines all precursors and their
448 448 tags. Tags with format like "D1234" are considered a match and the node
449 449 with that tag, and the number after "D" (ex. 1234) will be returned.
450 450
451 451 The ``old node``, if not None, is guaranteed to be the last diff of
452 452 corresponding Differential Revision, and exist in the repo.
453 453 """
454 454 unfi = repo.unfiltered()
455 455 has_node = unfi.changelog.index.has_node
456 456
457 457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 459 for node in nodelist:
460 460 ctx = unfi[node]
461 461 # For tags like "D123", put them into "toconfirm" to verify later
462 462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 463 for n in precnodes:
464 464 if has_node(n):
465 465 for tag in unfi.nodetags(n):
466 466 m = _differentialrevisiontagre.match(tag)
467 467 if m:
468 468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 469 break
470 470 else:
471 471 continue # move to next predecessor
472 472 break # found a tag, stop
473 473 else:
474 474 # Check commit message
475 475 m = _differentialrevisiondescre.search(ctx.description())
476 476 if m:
477 477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478 478
479 479 # Double check if tags are genuine by collecting all old nodes from
480 480 # Phabricator, and expect precursors overlap with it.
481 481 if toconfirm:
482 482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 483 alldiffs = callconduit(
484 484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 485 )
486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486
487 def getnodes(d, precset):
488 # Ignore other nodes that were combined into the Differential
489 # that aren't predecessors of the current local node.
490 return [n for n in getlocalcommits(d) if n in precset]
491
487 492 for newnode, (force, precset, drev) in toconfirm.items():
488 493 diffs = [
489 494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 495 ]
491 496
492 # "precursors" as known by Phabricator
493 phprecset = {getnode(d) for d in diffs}
497 # local predecessors known by Phabricator
498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
494 499
495 500 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 501 # and force is not set (when commit message says nothing)
497 if not force and not bool(phprecset & precset):
502 if not force and not phprecset:
498 503 tagname = b'D%d' % drev
499 504 tags.tag(
500 505 repo,
501 506 tagname,
502 507 nullid,
503 508 message=None,
504 509 user=None,
505 510 date=None,
506 511 local=True,
507 512 )
508 513 unfi.ui.warn(
509 514 _(
510 515 b'D%d: local tag removed - does not match '
511 516 b'Differential history\n'
512 517 )
513 518 % drev
514 519 )
515 520 continue
516 521
517 522 # Find the last node using Phabricator metadata, and make sure it
518 523 # exists in the repo
519 524 oldnode = lastdiff = None
520 525 if diffs:
521 526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 oldnode = getnode(lastdiff)
527 oldnodes = getnodes(lastdiff, precset)
528
529 # If this commit was the result of `hg fold` after submission,
530 # and now resubmitted with --fold, the easiest thing to do is
531 # to leave the node clear. This only results in creating a new
532 # diff for the _same_ Differential Revision if this commit is
533 # the first or last in the selected range.
534 # If this commit is the result of `hg split` in the same
535 # scenario, there is a single oldnode here (and multiple
536 # newnodes mapped to it). That makes it the same as the normal
537 # case, as the edges of the newnode range cleanly maps to one
538 # oldnode each.
539 if len(oldnodes) == 1:
540 oldnode = oldnodes[0]
523 541 if oldnode and not has_node(oldnode):
524 542 oldnode = None
525 543
526 544 result[newnode] = (oldnode, lastdiff, drev)
527 545
528 546 return result
529 547
530 548
531 549 def getdrevmap(repo, revs):
532 550 """Return a dict mapping each rev in `revs` to their Differential Revision
533 551 ID or None.
534 552 """
535 553 result = {}
536 554 for rev in revs:
537 555 result[rev] = None
538 556 ctx = repo[rev]
539 557 # Check commit message
540 558 m = _differentialrevisiondescre.search(ctx.description())
541 559 if m:
542 560 result[rev] = int(m.group('id'))
543 561 continue
544 562 # Check tags
545 563 for tag in repo.nodetags(ctx.node()):
546 564 m = _differentialrevisiontagre.match(tag)
547 565 if m:
548 566 result[rev] = int(m.group(1))
549 567 break
550 568
551 569 return result
552 570
553 571
554 572 def getdiff(basectx, ctx, diffopts):
555 573 """plain-text diff without header (user, commit message, etc)"""
556 574 output = util.stringio()
557 575 for chunk, _label in patch.diffui(
558 576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 577 ):
560 578 output.write(chunk)
561 579 return output.getvalue()
562 580
563 581
564 582 class DiffChangeType(object):
565 583 ADD = 1
566 584 CHANGE = 2
567 585 DELETE = 3
568 586 MOVE_AWAY = 4
569 587 COPY_AWAY = 5
570 588 MOVE_HERE = 6
571 589 COPY_HERE = 7
572 590 MULTICOPY = 8
573 591
574 592
575 593 class DiffFileType(object):
576 594 TEXT = 1
577 595 IMAGE = 2
578 596 BINARY = 3
579 597
580 598
581 599 @attr.s
582 600 class phabhunk(dict):
583 601 """Represents a Differential hunk, which is owned by a Differential change
584 602 """
585 603
586 604 oldOffset = attr.ib(default=0) # camelcase-required
587 605 oldLength = attr.ib(default=0) # camelcase-required
588 606 newOffset = attr.ib(default=0) # camelcase-required
589 607 newLength = attr.ib(default=0) # camelcase-required
590 608 corpus = attr.ib(default='')
591 609 # These get added to the phabchange's equivalents
592 610 addLines = attr.ib(default=0) # camelcase-required
593 611 delLines = attr.ib(default=0) # camelcase-required
594 612
595 613
596 614 @attr.s
597 615 class phabchange(object):
598 616 """Represents a Differential change, owns Differential hunks and owned by a
599 617 Differential diff. Each one represents one file in a diff.
600 618 """
601 619
602 620 currentPath = attr.ib(default=None) # camelcase-required
603 621 oldPath = attr.ib(default=None) # camelcase-required
604 622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 623 metadata = attr.ib(default=attr.Factory(dict))
606 624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 626 type = attr.ib(default=DiffChangeType.CHANGE)
609 627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 628 commitHash = attr.ib(default=None) # camelcase-required
611 629 addLines = attr.ib(default=0) # camelcase-required
612 630 delLines = attr.ib(default=0) # camelcase-required
613 631 hunks = attr.ib(default=attr.Factory(list))
614 632
615 633 def copynewmetadatatoold(self):
616 634 for key in list(self.metadata.keys()):
617 635 newkey = key.replace(b'new:', b'old:')
618 636 self.metadata[newkey] = self.metadata[key]
619 637
620 638 def addoldmode(self, value):
621 639 self.oldProperties[b'unix:filemode'] = value
622 640
623 641 def addnewmode(self, value):
624 642 self.newProperties[b'unix:filemode'] = value
625 643
626 644 def addhunk(self, hunk):
627 645 if not isinstance(hunk, phabhunk):
628 646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 648 # It's useful to include these stats since the Phab web UI shows them,
631 649 # and uses them to estimate how large a change a Revision is. Also used
632 650 # in email subjects for the [+++--] bit.
633 651 self.addLines += hunk.addLines
634 652 self.delLines += hunk.delLines
635 653
636 654
637 655 @attr.s
638 656 class phabdiff(object):
639 657 """Represents a Differential diff, owns Differential changes. Corresponds
640 658 to a commit.
641 659 """
642 660
643 661 # Doesn't seem to be any reason to send this (output of uname -n)
644 662 sourceMachine = attr.ib(default=b'') # camelcase-required
645 663 sourcePath = attr.ib(default=b'/') # camelcase-required
646 664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 667 branch = attr.ib(default=b'default')
650 668 bookmark = attr.ib(default=None)
651 669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 670 lintStatus = attr.ib(default=b'none') # camelcase-required
653 671 unitStatus = attr.ib(default=b'none') # camelcase-required
654 672 changes = attr.ib(default=attr.Factory(dict))
655 673 repositoryPHID = attr.ib(default=None) # camelcase-required
656 674
657 675 def addchange(self, change):
658 676 if not isinstance(change, phabchange):
659 677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 678 self.changes[change.currentPath] = pycompat.byteskwargs(
661 679 attr.asdict(change)
662 680 )
663 681
664 682
665 683 def maketext(pchange, basectx, ctx, fname):
666 684 """populate the phabchange for a text file"""
667 685 repo = ctx.repo()
668 686 fmatcher = match.exact([fname])
669 687 diffopts = mdiff.diffopts(git=True, context=32767)
670 688 _pfctx, _fctx, header, fhunks = next(
671 689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 690 )
673 691
674 692 for fhunk in fhunks:
675 693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 694 corpus = b''.join(lines[1:])
677 695 shunk = list(header)
678 696 shunk.extend(lines)
679 697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 698 patch.diffstatdata(util.iterlines(shunk))
681 699 )
682 700 pchange.addhunk(
683 701 phabhunk(
684 702 oldOffset,
685 703 oldLength,
686 704 newOffset,
687 705 newLength,
688 706 corpus,
689 707 addLines,
690 708 delLines,
691 709 )
692 710 )
693 711
694 712
695 713 def uploadchunks(fctx, fphid):
696 714 """upload large binary files as separate chunks.
697 715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 716 """
699 717 ui = fctx.repo().ui
700 718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 719 with ui.makeprogress(
702 720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 721 ) as progress:
704 722 for chunk in chunks:
705 723 progress.increment()
706 724 if chunk[b'complete']:
707 725 continue
708 726 bstart = int(chunk[b'byteStart'])
709 727 bend = int(chunk[b'byteEnd'])
710 728 callconduit(
711 729 ui,
712 730 b'file.uploadchunk',
713 731 {
714 732 b'filePHID': fphid,
715 733 b'byteStart': bstart,
716 734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 735 b'dataEncoding': b'base64',
718 736 },
719 737 )
720 738
721 739
722 740 def uploadfile(fctx):
723 741 """upload binary files to Phabricator"""
724 742 repo = fctx.repo()
725 743 ui = repo.ui
726 744 fname = fctx.path()
727 745 size = fctx.size()
728 746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729 747
730 748 # an allocate call is required first to see if an upload is even required
731 749 # (Phab might already have it) and to determine if chunking is needed
732 750 allocateparams = {
733 751 b'name': fname,
734 752 b'contentLength': size,
735 753 b'contentHash': fhash,
736 754 }
737 755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 756 fphid = filealloc[b'filePHID']
739 757
740 758 if filealloc[b'upload']:
741 759 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 760 if not fphid:
743 761 uploadparams = {
744 762 b'name': fname,
745 763 b'data_base64': base64.b64encode(fctx.data()),
746 764 }
747 765 fphid = callconduit(ui, b'file.upload', uploadparams)
748 766 else:
749 767 uploadchunks(fctx, fphid)
750 768 else:
751 769 ui.debug(b'server already has %s\n' % bytes(fctx))
752 770
753 771 if not fphid:
754 772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755 773
756 774 return fphid
757 775
758 776
759 777 def addoldbinary(pchange, oldfctx, fctx):
760 778 """add the metadata for the previous version of a binary file to the
761 779 phabchange for the new version
762 780
763 781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 782 version of the file, or None if the file is being removed.
765 783 """
766 784 if not fctx or fctx.cmp(oldfctx):
767 785 # Files differ, add the old one
768 786 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 787 mimeguess, _enc = mimetypes.guess_type(
770 788 encoding.unifromlocal(oldfctx.path())
771 789 )
772 790 if mimeguess:
773 791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 792 mimeguess
775 793 )
776 794 fphid = uploadfile(oldfctx)
777 795 pchange.metadata[b'old:binary-phid'] = fphid
778 796 else:
779 797 # If it's left as IMAGE/BINARY web UI might try to display it
780 798 pchange.fileType = DiffFileType.TEXT
781 799 pchange.copynewmetadatatoold()
782 800
783 801
784 802 def makebinary(pchange, fctx):
785 803 """populate the phabchange for a binary file"""
786 804 pchange.fileType = DiffFileType.BINARY
787 805 fphid = uploadfile(fctx)
788 806 pchange.metadata[b'new:binary-phid'] = fphid
789 807 pchange.metadata[b'new:file:size'] = fctx.size()
790 808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 809 if mimeguess:
792 810 mimeguess = pycompat.bytestr(mimeguess)
793 811 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 812 if mimeguess.startswith(b'image/'):
795 813 pchange.fileType = DiffFileType.IMAGE
796 814
797 815
798 816 # Copied from mercurial/patch.py
799 817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800 818
801 819
802 820 def notutf8(fctx):
803 821 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 822 as binary
805 823 """
806 824 try:
807 825 fctx.data().decode('utf-8')
808 826 return False
809 827 except UnicodeDecodeError:
810 828 fctx.repo().ui.write(
811 829 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 830 % fctx.path()
813 831 )
814 832 return True
815 833
816 834
817 835 def addremoved(pdiff, basectx, ctx, removed):
818 836 """add removed files to the phabdiff. Shouldn't include moves"""
819 837 for fname in removed:
820 838 pchange = phabchange(
821 839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 840 )
823 841 oldfctx = basectx.p1()[fname]
824 842 pchange.addoldmode(gitmode[oldfctx.flags()])
825 843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 844 maketext(pchange, basectx, ctx, fname)
827 845
828 846 pdiff.addchange(pchange)
829 847
830 848
831 849 def addmodified(pdiff, basectx, ctx, modified):
832 850 """add modified files to the phabdiff"""
833 851 for fname in modified:
834 852 fctx = ctx[fname]
835 853 oldfctx = basectx.p1()[fname]
836 854 pchange = phabchange(currentPath=fname, oldPath=fname)
837 855 filemode = gitmode[fctx.flags()]
838 856 originalmode = gitmode[oldfctx.flags()]
839 857 if filemode != originalmode:
840 858 pchange.addoldmode(originalmode)
841 859 pchange.addnewmode(filemode)
842 860
843 861 if (
844 862 fctx.isbinary()
845 863 or notutf8(fctx)
846 864 or oldfctx.isbinary()
847 865 or notutf8(oldfctx)
848 866 ):
849 867 makebinary(pchange, fctx)
850 868 addoldbinary(pchange, oldfctx, fctx)
851 869 else:
852 870 maketext(pchange, basectx, ctx, fname)
853 871
854 872 pdiff.addchange(pchange)
855 873
856 874
857 875 def addadded(pdiff, basectx, ctx, added, removed):
858 876 """add file adds to the phabdiff, both new files and copies/moves"""
859 877 # Keep track of files that've been recorded as moved/copied, so if there are
860 878 # additional copies we can mark them (moves get removed from removed)
861 879 copiedchanges = {}
862 880 movedchanges = {}
863 881
864 882 copy = {}
865 883 if basectx != ctx:
866 884 copy = copies.pathcopies(basectx.p1(), ctx)
867 885
868 886 for fname in added:
869 887 fctx = ctx[fname]
870 888 oldfctx = None
871 889 pchange = phabchange(currentPath=fname)
872 890
873 891 filemode = gitmode[fctx.flags()]
874 892
875 893 if copy:
876 894 originalfname = copy.get(fname, fname)
877 895 else:
878 896 originalfname = fname
879 897 if fctx.renamed():
880 898 originalfname = fctx.renamed()[0]
881 899
882 900 renamed = fname != originalfname
883 901
884 902 if renamed:
885 903 oldfctx = basectx.p1()[originalfname]
886 904 originalmode = gitmode[oldfctx.flags()]
887 905 pchange.oldPath = originalfname
888 906
889 907 if originalfname in removed:
890 908 origpchange = phabchange(
891 909 currentPath=originalfname,
892 910 oldPath=originalfname,
893 911 type=DiffChangeType.MOVE_AWAY,
894 912 awayPaths=[fname],
895 913 )
896 914 movedchanges[originalfname] = origpchange
897 915 removed.remove(originalfname)
898 916 pchange.type = DiffChangeType.MOVE_HERE
899 917 elif originalfname in movedchanges:
900 918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 919 movedchanges[originalfname].awayPaths.append(fname)
902 920 pchange.type = DiffChangeType.COPY_HERE
903 921 else: # pure copy
904 922 if originalfname not in copiedchanges:
905 923 origpchange = phabchange(
906 924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 925 )
908 926 copiedchanges[originalfname] = origpchange
909 927 else:
910 928 origpchange = copiedchanges[originalfname]
911 929 origpchange.awayPaths.append(fname)
912 930 pchange.type = DiffChangeType.COPY_HERE
913 931
914 932 if filemode != originalmode:
915 933 pchange.addoldmode(originalmode)
916 934 pchange.addnewmode(filemode)
917 935 else: # Brand-new file
918 936 pchange.addnewmode(gitmode[fctx.flags()])
919 937 pchange.type = DiffChangeType.ADD
920 938
921 939 if (
922 940 fctx.isbinary()
923 941 or notutf8(fctx)
924 942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 943 ):
926 944 makebinary(pchange, fctx)
927 945 if renamed:
928 946 addoldbinary(pchange, oldfctx, fctx)
929 947 else:
930 948 maketext(pchange, basectx, ctx, fname)
931 949
932 950 pdiff.addchange(pchange)
933 951
934 952 for _path, copiedchange in copiedchanges.items():
935 953 pdiff.addchange(copiedchange)
936 954 for _path, movedchange in movedchanges.items():
937 955 pdiff.addchange(movedchange)
938 956
939 957
940 958 def creatediff(basectx, ctx):
941 959 """create a Differential Diff"""
942 960 repo = ctx.repo()
943 961 repophid = getrepophid(repo)
944 962 # Create a "Differential Diff" via "differential.creatediff" API
945 963 pdiff = phabdiff(
946 964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 965 branch=b'%s' % ctx.branch(),
948 966 )
949 967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 968 # addadded will remove moved files from removed, so addremoved won't get
951 969 # them
952 970 addadded(pdiff, basectx, ctx, added, removed)
953 971 addmodified(pdiff, basectx, ctx, modified)
954 972 addremoved(pdiff, basectx, ctx, removed)
955 973 if repophid:
956 974 pdiff.repositoryPHID = repophid
957 975 diff = callconduit(
958 976 repo.ui,
959 977 b'differential.creatediff',
960 978 pycompat.byteskwargs(attr.asdict(pdiff)),
961 979 )
962 980 if not diff:
963 981 if basectx != ctx:
964 982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 983 else:
966 984 msg = _(b'cannot create diff for %s') % ctx
967 985 raise error.Abort(msg)
968 986 return diff
969 987
970 988
971 989 def writediffproperties(ctxs, diff):
972 990 """write metadata to diff so patches could be applied losslessly
973 991
974 992 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 993 The list is generally a single commit, but may be several when using
976 994 ``phabsend --fold``.
977 995 """
978 996 # creatediff returns with a diffid but query returns with an id
979 997 diffid = diff.get(b'diffid', diff.get(b'id'))
980 998 basectx = ctxs[0]
981 999 tipctx = ctxs[-1]
982 1000
983 1001 params = {
984 1002 b'diff_id': diffid,
985 1003 b'name': b'hg:meta',
986 1004 b'data': templatefilters.json(
987 1005 {
988 1006 b'user': tipctx.user(),
989 1007 b'date': b'%d %d' % tipctx.date(),
990 1008 b'branch': tipctx.branch(),
991 1009 b'node': tipctx.hex(),
992 1010 b'parent': basectx.p1().hex(),
993 1011 }
994 1012 ),
995 1013 }
996 1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
997 1015
998 1016 commits = {}
999 1017 for ctx in ctxs:
1000 1018 commits[ctx.hex()] = {
1001 1019 b'author': stringutil.person(ctx.user()),
1002 1020 b'authorEmail': stringutil.email(ctx.user()),
1003 1021 b'time': int(ctx.date()[0]),
1004 1022 b'commit': ctx.hex(),
1005 1023 b'parents': [ctx.p1().hex()],
1006 1024 b'branch': ctx.branch(),
1007 1025 }
1008 1026 params = {
1009 1027 b'diff_id': diffid,
1010 1028 b'name': b'local:commits',
1011 1029 b'data': templatefilters.json(commits),
1012 1030 }
1013 1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014 1032
1015 1033
1016 1034 def createdifferentialrevision(
1017 1035 ctxs,
1018 1036 revid=None,
1019 1037 parentrevphid=None,
1020 1038 oldbasenode=None,
1021 1039 oldnode=None,
1022 1040 olddiff=None,
1023 1041 actions=None,
1024 1042 comment=None,
1025 1043 ):
1026 1044 """create or update a Differential Revision
1027 1045
1028 1046 If revid is None, create a new Differential Revision, otherwise update
1029 1047 revid. If parentrevphid is not None, set it as a dependency.
1030 1048
1031 1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1032 1050 be a list of that single context. Otherwise, it is a list that covers the
1033 1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1034 1052 to include and ``ctxs[-1]`` is the last.
1035 1053
1036 1054 If oldnode is not None, check if the patch content (without commit message
1037 1055 and metadata) has changed before creating another diff. For a Revision with
1038 1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1039 1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1040 1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1041 1059 corresponds to ``ctxs[-1]``.
1042 1060
1043 1061 If actions is not None, they will be appended to the transaction.
1044 1062 """
1045 1063 ctx = ctxs[-1]
1046 1064 basectx = ctxs[0]
1047 1065
1048 1066 repo = ctx.repo()
1049 1067 if oldnode:
1050 1068 diffopts = mdiff.diffopts(git=True, context=32767)
1051 1069 unfi = repo.unfiltered()
1052 1070 oldctx = unfi[oldnode]
1053 1071 oldbasectx = unfi[oldbasenode]
1054 1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1055 1073 oldbasectx, oldctx, diffopts
1056 1074 )
1057 1075 else:
1058 1076 neednewdiff = True
1059 1077
1060 1078 transactions = []
1061 1079 if neednewdiff:
1062 1080 diff = creatediff(basectx, ctx)
1063 1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1064 1082 if comment:
1065 1083 transactions.append({b'type': b'comment', b'value': comment})
1066 1084 else:
1067 1085 # Even if we don't need to upload a new diff because the patch content
1068 1086 # does not change. We might still need to update its metadata so
1069 1087 # pushers could know the correct node metadata.
1070 1088 assert olddiff
1071 1089 diff = olddiff
1072 1090 writediffproperties(ctxs, diff)
1073 1091
1074 1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1075 1093 if parentrevphid:
1076 1094 transactions.append(
1077 1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1078 1096 )
1079 1097
1080 1098 if actions:
1081 1099 transactions += actions
1082 1100
1083 1101 # When folding multiple local commits into a single review, arcanist will
1084 1102 # take the summary line of the first commit as the title, and then
1085 1103 # concatenate the rest of the remaining messages (including each of their
1086 1104 # first lines) to the rest of the first commit message (each separated by
1087 1105 # an empty line), and use that as the summary field. Do the same here.
1088 1106 # For commits with only a one line message, there is no summary field, as
1089 1107 # this gets assigned to the title.
1090 1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1091 1109
1092 1110 for i, _ctx in enumerate(ctxs):
1093 1111 # Parse commit message and update related fields.
1094 1112 desc = _ctx.description()
1095 1113 info = callconduit(
1096 1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1097 1115 )
1098 1116
1099 1117 for k in [b'title', b'summary', b'testPlan']:
1100 1118 v = info[b'fields'].get(k)
1101 1119 if not v:
1102 1120 continue
1103 1121
1104 1122 if i == 0:
1105 1123 # Title, summary and test plan (if present) are taken verbatim
1106 1124 # for the first commit.
1107 1125 fields[k] = v.rstrip()
1108 1126 continue
1109 1127 elif k == b'title':
1110 1128 # Add subsequent titles (i.e. the first line of the commit
1111 1129 # message) back to the summary.
1112 1130 k = b'summary'
1113 1131
1114 1132 # Append any current field to the existing composite field
1115 1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1116 1134
1117 1135 for k, v in fields.items():
1118 1136 transactions.append({b'type': k, b'value': v})
1119 1137
1120 1138 params = {b'transactions': transactions}
1121 1139 if revid is not None:
1122 1140 # Update an existing Differential Revision
1123 1141 params[b'objectIdentifier'] = revid
1124 1142
1125 1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1126 1144 if not revision:
1127 1145 if len(ctxs) == 1:
1128 1146 msg = _(b'cannot create revision for %s') % ctx
1129 1147 else:
1130 1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1131 1149 raise error.Abort(msg)
1132 1150
1133 1151 return revision, diff
1134 1152
1135 1153
1136 1154 def userphids(ui, names):
1137 1155 """convert user names to PHIDs"""
1138 1156 names = [name.lower() for name in names]
1139 1157 query = {b'constraints': {b'usernames': names}}
1140 1158 result = callconduit(ui, b'user.search', query)
1141 1159 # username not found is not an error of the API. So check if we have missed
1142 1160 # some names here.
1143 1161 data = result[b'data']
1144 1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1145 1163 unresolved = set(names) - resolved
1146 1164 if unresolved:
1147 1165 raise error.Abort(
1148 1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1149 1167 )
1150 1168 return [entry[b'phid'] for entry in data]
1151 1169
1152 1170
1153 1171 @vcrcommand(
1154 1172 b'phabsend',
1155 1173 [
1156 1174 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1157 1175 (b'', b'amend', True, _(b'update commit messages')),
1158 1176 (b'', b'reviewer', [], _(b'specify reviewers')),
1159 1177 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1160 1178 (
1161 1179 b'm',
1162 1180 b'comment',
1163 1181 b'',
1164 1182 _(b'add a comment to Revisions with new/updated Diffs'),
1165 1183 ),
1166 1184 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1167 1185 ],
1168 1186 _(b'REV [OPTIONS]'),
1169 1187 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1170 1188 )
1171 1189 def phabsend(ui, repo, *revs, **opts):
1172 1190 """upload changesets to Phabricator
1173 1191
1174 1192 If there are multiple revisions specified, they will be send as a stack
1175 1193 with a linear dependencies relationship using the order specified by the
1176 1194 revset.
1177 1195
1178 1196 For the first time uploading changesets, local tags will be created to
1179 1197 maintain the association. After the first time, phabsend will check
1180 1198 obsstore and tags information so it can figure out whether to update an
1181 1199 existing Differential Revision, or create a new one.
1182 1200
1183 1201 If --amend is set, update commit messages so they have the
1184 1202 ``Differential Revision`` URL, remove related tags. This is similar to what
1185 1203 arcanist will do, and is more desired in author-push workflows. Otherwise,
1186 1204 use local tags to record the ``Differential Revision`` association.
1187 1205
1188 1206 The --confirm option lets you confirm changesets before sending them. You
1189 1207 can also add following to your configuration file to make it default
1190 1208 behaviour::
1191 1209
1192 1210 [phabsend]
1193 1211 confirm = true
1194 1212
1195 1213 phabsend will check obsstore and the above association to decide whether to
1196 1214 update an existing Differential Revision, or create a new one.
1197 1215 """
1198 1216 opts = pycompat.byteskwargs(opts)
1199 1217 revs = list(revs) + opts.get(b'rev', [])
1200 1218 revs = scmutil.revrange(repo, revs)
1201 1219 revs.sort() # ascending order to preserve topological parent/child in phab
1202 1220
1203 1221 if not revs:
1204 1222 raise error.Abort(_(b'phabsend requires at least one changeset'))
1205 1223 if opts.get(b'amend'):
1206 1224 cmdutil.checkunfinished(repo)
1207 1225
1208 1226 # {newnode: (oldnode, olddiff, olddrev}
1209 1227 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1210 1228
1211 1229 confirm = ui.configbool(b'phabsend', b'confirm')
1212 1230 confirm |= bool(opts.get(b'confirm'))
1213 1231 if confirm:
1214 1232 confirmed = _confirmbeforesend(repo, revs, oldmap)
1215 1233 if not confirmed:
1216 1234 raise error.Abort(_(b'phabsend cancelled'))
1217 1235
1218 1236 actions = []
1219 1237 reviewers = opts.get(b'reviewer', [])
1220 1238 blockers = opts.get(b'blocker', [])
1221 1239 phids = []
1222 1240 if reviewers:
1223 1241 phids.extend(userphids(repo.ui, reviewers))
1224 1242 if blockers:
1225 1243 phids.extend(
1226 1244 map(
1227 1245 lambda phid: b'blocking(%s)' % phid,
1228 1246 userphids(repo.ui, blockers),
1229 1247 )
1230 1248 )
1231 1249 if phids:
1232 1250 actions.append({b'type': b'reviewers.add', b'value': phids})
1233 1251
1234 1252 drevids = [] # [int]
1235 1253 diffmap = {} # {newnode: diff}
1236 1254
1237 1255 # Send patches one by one so we know their Differential Revision PHIDs and
1238 1256 # can provide dependency relationship
1239 1257 lastrevphid = None
1240 1258 for rev in revs:
1241 1259 ui.debug(b'sending rev %d\n' % rev)
1242 1260 ctx = repo[rev]
1243 1261
1244 1262 # Get Differential Revision ID
1245 1263 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1246 1264 oldbasenode = oldnode
1247 1265 if oldnode != ctx.node() or opts.get(b'amend'):
1248 1266 # Create or update Differential Revision
1249 1267 revision, diff = createdifferentialrevision(
1250 1268 [ctx],
1251 1269 revid,
1252 1270 lastrevphid,
1253 1271 oldbasenode,
1254 1272 oldnode,
1255 1273 olddiff,
1256 1274 actions,
1257 1275 opts.get(b'comment'),
1258 1276 )
1259 1277 diffmap[ctx.node()] = diff
1260 1278 newrevid = int(revision[b'object'][b'id'])
1261 1279 newrevphid = revision[b'object'][b'phid']
1262 1280 if revid:
1263 1281 action = b'updated'
1264 1282 else:
1265 1283 action = b'created'
1266 1284
1267 1285 # Create a local tag to note the association, if commit message
1268 1286 # does not have it already
1269 1287 m = _differentialrevisiondescre.search(ctx.description())
1270 1288 if not m or int(m.group('id')) != newrevid:
1271 1289 tagname = b'D%d' % newrevid
1272 1290 tags.tag(
1273 1291 repo,
1274 1292 tagname,
1275 1293 ctx.node(),
1276 1294 message=None,
1277 1295 user=None,
1278 1296 date=None,
1279 1297 local=True,
1280 1298 )
1281 1299 else:
1282 1300 # Nothing changed. But still set "newrevphid" so the next revision
1283 1301 # could depend on this one and "newrevid" for the summary line.
1284 1302 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1285 1303 newrevid = revid
1286 1304 action = b'skipped'
1287 1305
1288 1306 actiondesc = ui.label(
1289 1307 {
1290 1308 b'created': _(b'created'),
1291 1309 b'skipped': _(b'skipped'),
1292 1310 b'updated': _(b'updated'),
1293 1311 }[action],
1294 1312 b'phabricator.action.%s' % action,
1295 1313 )
1296 1314 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1297 1315 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1298 1316 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1299 1317 ui.write(
1300 1318 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1301 1319 )
1302 1320 drevids.append(newrevid)
1303 1321 lastrevphid = newrevphid
1304 1322
1305 1323 # Update commit messages and remove tags
1306 1324 if opts.get(b'amend'):
1307 1325 unfi = repo.unfiltered()
1308 1326 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1309 1327 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1310 1328 wnode = unfi[b'.'].node()
1311 1329 mapping = {} # {oldnode: [newnode]}
1312 1330 for i, rev in enumerate(revs):
1313 1331 old = unfi[rev]
1314 1332 drevid = drevids[i]
1315 1333 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1316 1334 newdesc = get_amended_desc(drev, old, False)
1317 1335 # Make sure commit message contain "Differential Revision"
1318 1336 if old.description() != newdesc:
1319 1337 if old.phase() == phases.public:
1320 1338 ui.warn(
1321 1339 _(b"warning: not updating public commit %s\n")
1322 1340 % scmutil.formatchangeid(old)
1323 1341 )
1324 1342 continue
1325 1343 parents = [
1326 1344 mapping.get(old.p1().node(), (old.p1(),))[0],
1327 1345 mapping.get(old.p2().node(), (old.p2(),))[0],
1328 1346 ]
1329 1347 new = context.metadataonlyctx(
1330 1348 repo,
1331 1349 old,
1332 1350 parents=parents,
1333 1351 text=newdesc,
1334 1352 user=old.user(),
1335 1353 date=old.date(),
1336 1354 extra=old.extra(),
1337 1355 )
1338 1356
1339 1357 newnode = new.commit()
1340 1358
1341 1359 mapping[old.node()] = [newnode]
1342 1360 # Update diff property
1343 1361 # If it fails just warn and keep going, otherwise the DREV
1344 1362 # associations will be lost
1345 1363 try:
1346 1364 writediffproperties(
1347 1365 [unfi[newnode]], diffmap[old.node()]
1348 1366 )
1349 1367 except util.urlerr.urlerror:
1350 1368 ui.warnnoi18n(
1351 1369 b'Failed to update metadata for D%d\n' % drevid
1352 1370 )
1353 1371 # Remove local tags since it's no longer necessary
1354 1372 tagname = b'D%d' % drevid
1355 1373 if tagname in repo.tags():
1356 1374 tags.tag(
1357 1375 repo,
1358 1376 tagname,
1359 1377 nullid,
1360 1378 message=None,
1361 1379 user=None,
1362 1380 date=None,
1363 1381 local=True,
1364 1382 )
1365 1383 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1366 1384 if wnode in mapping:
1367 1385 unfi.setparents(mapping[wnode][0])
1368 1386
1369 1387
1370 1388 # Map from "hg:meta" keys to header understood by "hg import". The order is
1371 1389 # consistent with "hg export" output.
1372 1390 _metanamemap = util.sortdict(
1373 1391 [
1374 1392 (b'user', b'User'),
1375 1393 (b'date', b'Date'),
1376 1394 (b'branch', b'Branch'),
1377 1395 (b'node', b'Node ID'),
1378 1396 (b'parent', b'Parent '),
1379 1397 ]
1380 1398 )
1381 1399
1382 1400
1383 1401 def _confirmbeforesend(repo, revs, oldmap):
1384 1402 url, token = readurltoken(repo.ui)
1385 1403 ui = repo.ui
1386 1404 for rev in revs:
1387 1405 ctx = repo[rev]
1388 1406 desc = ctx.description().splitlines()[0]
1389 1407 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1390 1408 if drevid:
1391 1409 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1392 1410 else:
1393 1411 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1394 1412
1395 1413 ui.write(
1396 1414 _(b'%s - %s: %s\n')
1397 1415 % (
1398 1416 drevdesc,
1399 1417 ui.label(bytes(ctx), b'phabricator.node'),
1400 1418 ui.label(desc, b'phabricator.desc'),
1401 1419 )
1402 1420 )
1403 1421
1404 1422 if ui.promptchoice(
1405 1423 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1406 1424 ):
1407 1425 return False
1408 1426
1409 1427 return True
1410 1428
1411 1429
1412 1430 _knownstatusnames = {
1413 1431 b'accepted',
1414 1432 b'needsreview',
1415 1433 b'needsrevision',
1416 1434 b'closed',
1417 1435 b'abandoned',
1418 1436 b'changesplanned',
1419 1437 }
1420 1438
1421 1439
1422 1440 def _getstatusname(drev):
1423 1441 """get normalized status name from a Differential Revision"""
1424 1442 return drev[b'statusName'].replace(b' ', b'').lower()
1425 1443
1426 1444
1427 1445 # Small language to specify differential revisions. Support symbols: (), :X,
1428 1446 # +, and -.
1429 1447
1430 1448 _elements = {
1431 1449 # token-type: binding-strength, primary, prefix, infix, suffix
1432 1450 b'(': (12, None, (b'group', 1, b')'), None, None),
1433 1451 b':': (8, None, (b'ancestors', 8), None, None),
1434 1452 b'&': (5, None, None, (b'and_', 5), None),
1435 1453 b'+': (4, None, None, (b'add', 4), None),
1436 1454 b'-': (4, None, None, (b'sub', 4), None),
1437 1455 b')': (0, None, None, None, None),
1438 1456 b'symbol': (0, b'symbol', None, None, None),
1439 1457 b'end': (0, None, None, None, None),
1440 1458 }
1441 1459
1442 1460
1443 1461 def _tokenize(text):
1444 1462 view = memoryview(text) # zero-copy slice
1445 1463 special = b'():+-& '
1446 1464 pos = 0
1447 1465 length = len(text)
1448 1466 while pos < length:
1449 1467 symbol = b''.join(
1450 1468 itertools.takewhile(
1451 1469 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1452 1470 )
1453 1471 )
1454 1472 if symbol:
1455 1473 yield (b'symbol', symbol, pos)
1456 1474 pos += len(symbol)
1457 1475 else: # special char, ignore space
1458 1476 if text[pos : pos + 1] != b' ':
1459 1477 yield (text[pos : pos + 1], None, pos)
1460 1478 pos += 1
1461 1479 yield (b'end', None, pos)
1462 1480
1463 1481
1464 1482 def _parse(text):
1465 1483 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1466 1484 if pos != len(text):
1467 1485 raise error.ParseError(b'invalid token', pos)
1468 1486 return tree
1469 1487
1470 1488
1471 1489 def _parsedrev(symbol):
1472 1490 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1473 1491 if symbol.startswith(b'D') and symbol[1:].isdigit():
1474 1492 return int(symbol[1:])
1475 1493 if symbol.isdigit():
1476 1494 return int(symbol)
1477 1495
1478 1496
1479 1497 def _prefetchdrevs(tree):
1480 1498 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1481 1499 drevs = set()
1482 1500 ancestordrevs = set()
1483 1501 op = tree[0]
1484 1502 if op == b'symbol':
1485 1503 r = _parsedrev(tree[1])
1486 1504 if r:
1487 1505 drevs.add(r)
1488 1506 elif op == b'ancestors':
1489 1507 r, a = _prefetchdrevs(tree[1])
1490 1508 drevs.update(r)
1491 1509 ancestordrevs.update(r)
1492 1510 ancestordrevs.update(a)
1493 1511 else:
1494 1512 for t in tree[1:]:
1495 1513 r, a = _prefetchdrevs(t)
1496 1514 drevs.update(r)
1497 1515 ancestordrevs.update(a)
1498 1516 return drevs, ancestordrevs
1499 1517
1500 1518
1501 1519 def querydrev(ui, spec):
1502 1520 """return a list of "Differential Revision" dicts
1503 1521
1504 1522 spec is a string using a simple query language, see docstring in phabread
1505 1523 for details.
1506 1524
1507 1525 A "Differential Revision dict" looks like:
1508 1526
1509 1527 {
1510 1528 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1511 1529 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1512 1530 "auxiliary": {
1513 1531 "phabricator:depends-on": [
1514 1532 "PHID-DREV-gbapp366kutjebt7agcd"
1515 1533 ]
1516 1534 "phabricator:projects": [],
1517 1535 },
1518 1536 "branch": "default",
1519 1537 "ccs": [],
1520 1538 "commits": [],
1521 1539 "dateCreated": "1499181406",
1522 1540 "dateModified": "1499182103",
1523 1541 "diffs": [
1524 1542 "3",
1525 1543 "4",
1526 1544 ],
1527 1545 "hashes": [],
1528 1546 "id": "2",
1529 1547 "lineCount": "2",
1530 1548 "phid": "PHID-DREV-672qvysjcczopag46qty",
1531 1549 "properties": {},
1532 1550 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1533 1551 "reviewers": [],
1534 1552 "sourcePath": null
1535 1553 "status": "0",
1536 1554 "statusName": "Needs Review",
1537 1555 "summary": "",
1538 1556 "testPlan": "",
1539 1557 "title": "example",
1540 1558 "uri": "https://phab.example.com/D2",
1541 1559 }
1542 1560 """
1543 1561 # TODO: replace differential.query and differential.querydiffs with
1544 1562 # differential.diff.search because the former (and their output) are
1545 1563 # frozen, and planned to be deprecated and removed.
1546 1564
1547 1565 def fetch(params):
1548 1566 """params -> single drev or None"""
1549 1567 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1550 1568 if key in prefetched:
1551 1569 return prefetched[key]
1552 1570 drevs = callconduit(ui, b'differential.query', params)
1553 1571 # Fill prefetched with the result
1554 1572 for drev in drevs:
1555 1573 prefetched[drev[b'phid']] = drev
1556 1574 prefetched[int(drev[b'id'])] = drev
1557 1575 if key not in prefetched:
1558 1576 raise error.Abort(
1559 1577 _(b'cannot get Differential Revision %r') % params
1560 1578 )
1561 1579 return prefetched[key]
1562 1580
1563 1581 def getstack(topdrevids):
1564 1582 """given a top, get a stack from the bottom, [id] -> [id]"""
1565 1583 visited = set()
1566 1584 result = []
1567 1585 queue = [{b'ids': [i]} for i in topdrevids]
1568 1586 while queue:
1569 1587 params = queue.pop()
1570 1588 drev = fetch(params)
1571 1589 if drev[b'id'] in visited:
1572 1590 continue
1573 1591 visited.add(drev[b'id'])
1574 1592 result.append(int(drev[b'id']))
1575 1593 auxiliary = drev.get(b'auxiliary', {})
1576 1594 depends = auxiliary.get(b'phabricator:depends-on', [])
1577 1595 for phid in depends:
1578 1596 queue.append({b'phids': [phid]})
1579 1597 result.reverse()
1580 1598 return smartset.baseset(result)
1581 1599
1582 1600 # Initialize prefetch cache
1583 1601 prefetched = {} # {id or phid: drev}
1584 1602
1585 1603 tree = _parse(spec)
1586 1604 drevs, ancestordrevs = _prefetchdrevs(tree)
1587 1605
1588 1606 # developer config: phabricator.batchsize
1589 1607 batchsize = ui.configint(b'phabricator', b'batchsize')
1590 1608
1591 1609 # Prefetch Differential Revisions in batch
1592 1610 tofetch = set(drevs)
1593 1611 for r in ancestordrevs:
1594 1612 tofetch.update(range(max(1, r - batchsize), r + 1))
1595 1613 if drevs:
1596 1614 fetch({b'ids': list(tofetch)})
1597 1615 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1598 1616
1599 1617 # Walk through the tree, return smartsets
1600 1618 def walk(tree):
1601 1619 op = tree[0]
1602 1620 if op == b'symbol':
1603 1621 drev = _parsedrev(tree[1])
1604 1622 if drev:
1605 1623 return smartset.baseset([drev])
1606 1624 elif tree[1] in _knownstatusnames:
1607 1625 drevs = [
1608 1626 r
1609 1627 for r in validids
1610 1628 if _getstatusname(prefetched[r]) == tree[1]
1611 1629 ]
1612 1630 return smartset.baseset(drevs)
1613 1631 else:
1614 1632 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1615 1633 elif op in {b'and_', b'add', b'sub'}:
1616 1634 assert len(tree) == 3
1617 1635 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1618 1636 elif op == b'group':
1619 1637 return walk(tree[1])
1620 1638 elif op == b'ancestors':
1621 1639 return getstack(walk(tree[1]))
1622 1640 else:
1623 1641 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1624 1642
1625 1643 return [prefetched[r] for r in walk(tree)]
1626 1644
1627 1645
1628 1646 def getdescfromdrev(drev):
1629 1647 """get description (commit message) from "Differential Revision"
1630 1648
1631 1649 This is similar to differential.getcommitmessage API. But we only care
1632 1650 about limited fields: title, summary, test plan, and URL.
1633 1651 """
1634 1652 title = drev[b'title']
1635 1653 summary = drev[b'summary'].rstrip()
1636 1654 testplan = drev[b'testPlan'].rstrip()
1637 1655 if testplan:
1638 1656 testplan = b'Test Plan:\n%s' % testplan
1639 1657 uri = b'Differential Revision: %s' % drev[b'uri']
1640 1658 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1641 1659
1642 1660
1643 1661 def get_amended_desc(drev, ctx, folded):
1644 1662 """similar to ``getdescfromdrev``, but supports a folded series of commits
1645 1663
1646 1664 This is used when determining if an individual commit needs to have its
1647 1665 message amended after posting it for review. The determination is made for
1648 1666 each individual commit, even when they were folded into one review.
1649 1667 """
1650 1668 if not folded:
1651 1669 return getdescfromdrev(drev)
1652 1670
1653 1671 uri = b'Differential Revision: %s' % drev[b'uri']
1654 1672
1655 1673 # Since the commit messages were combined when posting multiple commits
1656 1674 # with --fold, the fields can't be read from Phabricator here, or *all*
1657 1675 # affected local revisions will end up with the same commit message after
1658 1676 # the URI is amended in. Append in the DREV line, or update it if it
1659 1677 # exists. At worst, this means commit message or test plan updates on
1660 1678 # Phabricator aren't propagated back to the repository, but that seems
1661 1679 # reasonable for the case where local commits are effectively combined
1662 1680 # in Phabricator.
1663 1681 m = _differentialrevisiondescre.search(ctx.description())
1664 1682 if not m:
1665 1683 return b'\n\n'.join([ctx.description(), uri])
1666 1684
1667 1685 return _differentialrevisiondescre.sub(uri, ctx.description())
1668 1686
1669 1687
1688 def getlocalcommits(diff):
1689 """get the set of local commits from a diff object
1690
1691 See ``getdiffmeta()`` for an example diff object.
1692 """
1693 props = diff.get(b'properties') or {}
1694 commits = props.get(b'local:commits') or {}
1695 if len(commits) > 1:
1696 return {bin(c) for c in commits.keys()}
1697
1698 # Storing the diff metadata predates storing `local:commits`, so continue
1699 # to use that in the --no-fold case.
1700 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1701
1702
1670 1703 def getdiffmeta(diff):
1671 1704 """get commit metadata (date, node, user, p1) from a diff object
1672 1705
1673 1706 The metadata could be "hg:meta", sent by phabsend, like:
1674 1707
1675 1708 "properties": {
1676 1709 "hg:meta": {
1677 1710 "branch": "default",
1678 1711 "date": "1499571514 25200",
1679 1712 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1680 1713 "user": "Foo Bar <foo@example.com>",
1681 1714 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1682 1715 }
1683 1716 }
1684 1717
1685 1718 Or converted from "local:commits", sent by "arc", like:
1686 1719
1687 1720 "properties": {
1688 1721 "local:commits": {
1689 1722 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1690 1723 "author": "Foo Bar",
1691 1724 "authorEmail": "foo@example.com"
1692 1725 "branch": "default",
1693 1726 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1694 1727 "local": "1000",
1695 1728 "message": "...",
1696 1729 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1697 1730 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1698 1731 "summary": "...",
1699 1732 "tag": "",
1700 1733 "time": 1499546314,
1701 1734 }
1702 1735 }
1703 1736 }
1704 1737
1705 1738 Note: metadata extracted from "local:commits" will lose time zone
1706 1739 information.
1707 1740 """
1708 1741 props = diff.get(b'properties') or {}
1709 1742 meta = props.get(b'hg:meta')
1710 1743 if not meta:
1711 1744 if props.get(b'local:commits'):
1712 1745 commit = sorted(props[b'local:commits'].values())[0]
1713 1746 meta = {}
1714 1747 if b'author' in commit and b'authorEmail' in commit:
1715 1748 meta[b'user'] = b'%s <%s>' % (
1716 1749 commit[b'author'],
1717 1750 commit[b'authorEmail'],
1718 1751 )
1719 1752 if b'time' in commit:
1720 1753 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1721 1754 if b'branch' in commit:
1722 1755 meta[b'branch'] = commit[b'branch']
1723 1756 node = commit.get(b'commit', commit.get(b'rev'))
1724 1757 if node:
1725 1758 meta[b'node'] = node
1726 1759 if len(commit.get(b'parents', ())) >= 1:
1727 1760 meta[b'parent'] = commit[b'parents'][0]
1728 1761 else:
1729 1762 meta = {}
1730 1763 if b'date' not in meta and b'dateCreated' in diff:
1731 1764 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1732 1765 if b'branch' not in meta and diff.get(b'branch'):
1733 1766 meta[b'branch'] = diff[b'branch']
1734 1767 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1735 1768 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1736 1769 return meta
1737 1770
1738 1771
1739 1772 def _getdrevs(ui, stack, specs):
1740 1773 """convert user supplied DREVSPECs into "Differential Revision" dicts
1741 1774
1742 1775 See ``hg help phabread`` for how to specify each DREVSPEC.
1743 1776 """
1744 1777 if len(specs) > 0:
1745 1778
1746 1779 def _formatspec(s):
1747 1780 if stack:
1748 1781 s = b':(%s)' % s
1749 1782 return b'(%s)' % s
1750 1783
1751 1784 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1752 1785
1753 1786 drevs = querydrev(ui, spec)
1754 1787 if drevs:
1755 1788 return drevs
1756 1789
1757 1790 raise error.Abort(_(b"empty DREVSPEC set"))
1758 1791
1759 1792
1760 1793 def readpatch(ui, drevs, write):
1761 1794 """generate plain-text patch readable by 'hg import'
1762 1795
1763 1796 write takes a list of (DREV, bytes), where DREV is the differential number
1764 1797 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1765 1798 to be imported. drevs is what "querydrev" returns, results of
1766 1799 "differential.query".
1767 1800 """
1768 1801 # Prefetch hg:meta property for all diffs
1769 1802 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1770 1803 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1771 1804
1772 1805 patches = []
1773 1806
1774 1807 # Generate patch for each drev
1775 1808 for drev in drevs:
1776 1809 ui.note(_(b'reading D%s\n') % drev[b'id'])
1777 1810
1778 1811 diffid = max(int(v) for v in drev[b'diffs'])
1779 1812 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1780 1813 desc = getdescfromdrev(drev)
1781 1814 header = b'# HG changeset patch\n'
1782 1815
1783 1816 # Try to preserve metadata from hg:meta property. Write hg patch
1784 1817 # headers that can be read by the "import" command. See patchheadermap
1785 1818 # and extract in mercurial/patch.py for supported headers.
1786 1819 meta = getdiffmeta(diffs[b'%d' % diffid])
1787 1820 for k in _metanamemap.keys():
1788 1821 if k in meta:
1789 1822 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1790 1823
1791 1824 content = b'%s%s\n%s' % (header, desc, body)
1792 1825 patches.append((drev[b'id'], content))
1793 1826
1794 1827 # Write patches to the supplied callback
1795 1828 write(patches)
1796 1829
1797 1830
1798 1831 @vcrcommand(
1799 1832 b'phabread',
1800 1833 [(b'', b'stack', False, _(b'read dependencies'))],
1801 1834 _(b'DREVSPEC... [OPTIONS]'),
1802 1835 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1803 1836 optionalrepo=True,
1804 1837 )
1805 1838 def phabread(ui, repo, *specs, **opts):
1806 1839 """print patches from Phabricator suitable for importing
1807 1840
1808 1841 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1809 1842 the number ``123``. It could also have common operators like ``+``, ``-``,
1810 1843 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1811 1844 select a stack. If multiple DREVSPEC values are given, the result is the
1812 1845 union of each individually evaluated value. No attempt is currently made
1813 1846 to reorder the values to run from parent to child.
1814 1847
1815 1848 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1816 1849 could be used to filter patches by status. For performance reason, they
1817 1850 only represent a subset of non-status selections and cannot be used alone.
1818 1851
1819 1852 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1820 1853 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1821 1854 stack up to D9.
1822 1855
1823 1856 If --stack is given, follow dependencies information and read all patches.
1824 1857 It is equivalent to the ``:`` operator.
1825 1858 """
1826 1859 opts = pycompat.byteskwargs(opts)
1827 1860 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1828 1861
1829 1862 def _write(patches):
1830 1863 for drev, content in patches:
1831 1864 ui.write(content)
1832 1865
1833 1866 readpatch(ui, drevs, _write)
1834 1867
1835 1868
1836 1869 @vcrcommand(
1837 1870 b'phabimport',
1838 1871 [(b'', b'stack', False, _(b'import dependencies as well'))],
1839 1872 _(b'DREVSPEC... [OPTIONS]'),
1840 1873 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1841 1874 )
1842 1875 def phabimport(ui, repo, *specs, **opts):
1843 1876 """import patches from Phabricator for the specified Differential Revisions
1844 1877
1845 1878 The patches are read and applied starting at the parent of the working
1846 1879 directory.
1847 1880
1848 1881 See ``hg help phabread`` for how to specify DREVSPEC.
1849 1882 """
1850 1883 opts = pycompat.byteskwargs(opts)
1851 1884
1852 1885 # --bypass avoids losing exec and symlink bits when importing on Windows,
1853 1886 # and allows importing with a dirty wdir. It also aborts instead of leaving
1854 1887 # rejects.
1855 1888 opts[b'bypass'] = True
1856 1889
1857 1890 # Mandatory default values, synced with commands.import
1858 1891 opts[b'strip'] = 1
1859 1892 opts[b'prefix'] = b''
1860 1893 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1861 1894 opts[b'obsolete'] = False
1862 1895
1863 1896 if ui.configbool(b'phabimport', b'secret'):
1864 1897 opts[b'secret'] = True
1865 1898 if ui.configbool(b'phabimport', b'obsolete'):
1866 1899 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1867 1900
1868 1901 def _write(patches):
1869 1902 parents = repo[None].parents()
1870 1903
1871 1904 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1872 1905 for drev, contents in patches:
1873 1906 ui.status(_(b'applying patch from D%s\n') % drev)
1874 1907
1875 1908 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1876 1909 msg, node, rej = cmdutil.tryimportone(
1877 1910 ui,
1878 1911 repo,
1879 1912 patchdata,
1880 1913 parents,
1881 1914 opts,
1882 1915 [],
1883 1916 None, # Never update wdir to another revision
1884 1917 )
1885 1918
1886 1919 if not node:
1887 1920 raise error.Abort(_(b'D%s: no diffs found') % drev)
1888 1921
1889 1922 ui.note(msg + b'\n')
1890 1923 parents = [repo[node]]
1891 1924
1892 1925 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1893 1926
1894 1927 readpatch(repo.ui, drevs, _write)
1895 1928
1896 1929
1897 1930 @vcrcommand(
1898 1931 b'phabupdate',
1899 1932 [
1900 1933 (b'', b'accept', False, _(b'accept revisions')),
1901 1934 (b'', b'reject', False, _(b'reject revisions')),
1902 1935 (b'', b'abandon', False, _(b'abandon revisions')),
1903 1936 (b'', b'reclaim', False, _(b'reclaim revisions')),
1904 1937 (b'm', b'comment', b'', _(b'comment on the last revision')),
1905 1938 ],
1906 1939 _(b'DREVSPEC... [OPTIONS]'),
1907 1940 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1908 1941 optionalrepo=True,
1909 1942 )
1910 1943 def phabupdate(ui, repo, *specs, **opts):
1911 1944 """update Differential Revision in batch
1912 1945
1913 1946 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1914 1947 """
1915 1948 opts = pycompat.byteskwargs(opts)
1916 1949 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1917 1950 if len(flags) > 1:
1918 1951 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1919 1952
1920 1953 actions = []
1921 1954 for f in flags:
1922 1955 actions.append({b'type': f, b'value': True})
1923 1956
1924 1957 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1925 1958 for i, drev in enumerate(drevs):
1926 1959 if i + 1 == len(drevs) and opts.get(b'comment'):
1927 1960 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1928 1961 if actions:
1929 1962 params = {
1930 1963 b'objectIdentifier': drev[b'phid'],
1931 1964 b'transactions': actions,
1932 1965 }
1933 1966 callconduit(ui, b'differential.revision.edit', params)
1934 1967
1935 1968
1936 1969 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1937 1970 def template_review(context, mapping):
1938 1971 """:phabreview: Object describing the review for this changeset.
1939 1972 Has attributes `url` and `id`.
1940 1973 """
1941 1974 ctx = context.resource(mapping, b'ctx')
1942 1975 m = _differentialrevisiondescre.search(ctx.description())
1943 1976 if m:
1944 1977 return templateutil.hybriddict(
1945 1978 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1946 1979 )
1947 1980 else:
1948 1981 tags = ctx.repo().nodetags(ctx.node())
1949 1982 for t in tags:
1950 1983 if _differentialrevisiontagre.match(t):
1951 1984 url = ctx.repo().ui.config(b'phabricator', b'url')
1952 1985 if not url.endswith(b'/'):
1953 1986 url += b'/'
1954 1987 url += t
1955 1988
1956 1989 return templateutil.hybriddict({b'url': url, b'id': t,})
1957 1990 return None
1958 1991
1959 1992
1960 1993 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1961 1994 def template_status(context, mapping):
1962 1995 """:phabstatus: String. Status of Phabricator differential.
1963 1996 """
1964 1997 ctx = context.resource(mapping, b'ctx')
1965 1998 repo = context.resource(mapping, b'repo')
1966 1999 ui = context.resource(mapping, b'ui')
1967 2000
1968 2001 rev = ctx.rev()
1969 2002 try:
1970 2003 drevid = getdrevmap(repo, [rev])[rev]
1971 2004 except KeyError:
1972 2005 return None
1973 2006 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1974 2007 for drev in drevs:
1975 2008 if int(drev[b'id']) == drevid:
1976 2009 return templateutil.hybriddict(
1977 2010 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1978 2011 )
1979 2012 return None
1980 2013
1981 2014
1982 2015 @show.showview(b'phabstatus', csettopic=b'work')
1983 2016 def phabstatusshowview(ui, repo, displayer):
1984 2017 """Phabricator differiential status"""
1985 2018 revs = repo.revs('sort(_underway(), topo)')
1986 2019 drevmap = getdrevmap(repo, revs)
1987 2020 unknownrevs, drevids, revsbydrevid = [], set(), {}
1988 2021 for rev, drevid in pycompat.iteritems(drevmap):
1989 2022 if drevid is not None:
1990 2023 drevids.add(drevid)
1991 2024 revsbydrevid.setdefault(drevid, set()).add(rev)
1992 2025 else:
1993 2026 unknownrevs.append(rev)
1994 2027
1995 2028 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1996 2029 drevsbyrev = {}
1997 2030 for drev in drevs:
1998 2031 for rev in revsbydrevid[int(drev[b'id'])]:
1999 2032 drevsbyrev[rev] = drev
2000 2033
2001 2034 def phabstatus(ctx):
2002 2035 drev = drevsbyrev[ctx.rev()]
2003 2036 status = ui.label(
2004 2037 b'%(statusName)s' % drev,
2005 2038 b'phabricator.status.%s' % _getstatusname(drev),
2006 2039 )
2007 2040 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2008 2041
2009 2042 revs -= smartset.baseset(unknownrevs)
2010 2043 revdag = graphmod.dagwalker(repo, revs)
2011 2044
2012 2045 ui.setconfig(b'experimental', b'graphshorten', True)
2013 2046 displayer._exthook = phabstatus
2014 2047 nodelen = show.longestshortest(repo, revs)
2015 2048 logcmdutil.displaygraph(
2016 2049 ui,
2017 2050 repo,
2018 2051 revdag,
2019 2052 displayer,
2020 2053 graphmod.asciiedges,
2021 2054 props={b'nodelen': nodelen},
2022 2055 )
General Comments 0
You need to be logged in to leave comments. Login now