##// END OF EJS Templates
phabricator: remove *-argument from _getdrevs()...
Yuya Nishihara -
r45076:2d63a891 default
parent child Browse files
Show More
@@ -1,1918 +1,1918 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127 eh.configitem(
128 128 b'phabimport', b'secret', default=False,
129 129 )
130 130 eh.configitem(
131 131 b'phabimport', b'obsolete', default=False,
132 132 )
133 133
134 134 colortable = {
135 135 b'phabricator.action.created': b'green',
136 136 b'phabricator.action.skipped': b'magenta',
137 137 b'phabricator.action.updated': b'magenta',
138 138 b'phabricator.desc': b'',
139 139 b'phabricator.drev': b'bold',
140 140 b'phabricator.node': b'',
141 141 b'phabricator.status.abandoned': b'magenta dim',
142 142 b'phabricator.status.accepted': b'green bold',
143 143 b'phabricator.status.closed': b'green',
144 144 b'phabricator.status.needsreview': b'yellow',
145 145 b'phabricator.status.needsrevision': b'red',
146 146 b'phabricator.status.changesplanned': b'red',
147 147 }
148 148
149 149 _VCR_FLAGS = [
150 150 (
151 151 b'',
152 152 b'test-vcr',
153 153 b'',
154 154 _(
155 155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 156 b', otherwise will mock all http requests using the specified vcr file.'
157 157 b' (ADVANCED)'
158 158 ),
159 159 ),
160 160 ]
161 161
162 162
163 163 @eh.wrapfunction(localrepo, "loadhgrc")
164 164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 165 """Load ``.arcconfig`` content into a ui instance on repository open.
166 166 """
167 167 result = False
168 168 arcconfig = {}
169 169
170 170 try:
171 171 # json.loads only accepts bytes from 3.6+
172 172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 173 # json.loads only returns unicode strings
174 174 arcconfig = pycompat.rapply(
175 175 lambda x: encoding.unitolocal(x)
176 176 if isinstance(x, pycompat.unicode)
177 177 else x,
178 178 pycompat.json_loads(rawparams),
179 179 )
180 180
181 181 result = True
182 182 except ValueError:
183 183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 184 except IOError:
185 185 pass
186 186
187 187 cfg = util.sortdict()
188 188
189 189 if b"repository.callsign" in arcconfig:
190 190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 191
192 192 if b"phabricator.uri" in arcconfig:
193 193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 194
195 195 if cfg:
196 196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 197
198 198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 199
200 200
201 201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 202 fullflags = flags + _VCR_FLAGS
203 203
204 204 def hgmatcher(r1, r2):
205 205 if r1.uri != r2.uri or r1.method != r2.method:
206 206 return False
207 207 r1params = util.urlreq.parseqs(r1.body)
208 208 r2params = util.urlreq.parseqs(r2.body)
209 209 for key in r1params:
210 210 if key not in r2params:
211 211 return False
212 212 value = r1params[key][0]
213 213 # we want to compare json payloads without worrying about ordering
214 214 if value.startswith(b'{') and value.endswith(b'}'):
215 215 r1json = pycompat.json_loads(value)
216 216 r2json = pycompat.json_loads(r2params[key][0])
217 217 if r1json != r2json:
218 218 return False
219 219 elif r2params[key][0] != value:
220 220 return False
221 221 return True
222 222
223 223 def sanitiserequest(request):
224 224 request.body = re.sub(
225 225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 226 )
227 227 return request
228 228
229 229 def sanitiseresponse(response):
230 230 if 'set-cookie' in response['headers']:
231 231 del response['headers']['set-cookie']
232 232 return response
233 233
234 234 def decorate(fn):
235 235 def inner(*args, **kwargs):
236 236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 237 if cassette:
238 238 import hgdemandimport
239 239
240 240 with hgdemandimport.deactivated():
241 241 import vcr as vcrmod
242 242 import vcr.stubs as stubs
243 243
244 244 vcr = vcrmod.VCR(
245 245 serializer='json',
246 246 before_record_request=sanitiserequest,
247 247 before_record_response=sanitiseresponse,
248 248 custom_patches=[
249 249 (
250 250 urlmod,
251 251 'httpconnection',
252 252 stubs.VCRHTTPConnection,
253 253 ),
254 254 (
255 255 urlmod,
256 256 'httpsconnection',
257 257 stubs.VCRHTTPSConnection,
258 258 ),
259 259 ],
260 260 )
261 261 vcr.register_matcher('hgmatcher', hgmatcher)
262 262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 263 return fn(*args, **kwargs)
264 264 return fn(*args, **kwargs)
265 265
266 266 cmd = util.checksignature(inner, depth=2)
267 267 cmd.__name__ = fn.__name__
268 268 cmd.__doc__ = fn.__doc__
269 269
270 270 return command(
271 271 name,
272 272 fullflags,
273 273 spec,
274 274 helpcategory=helpcategory,
275 275 optionalrepo=optionalrepo,
276 276 )(cmd)
277 277
278 278 return decorate
279 279
280 280
281 281 def urlencodenested(params):
282 282 """like urlencode, but works with nested parameters.
283 283
284 284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 287 """
288 288 flatparams = util.sortdict()
289 289
290 290 def process(prefix, obj):
291 291 if isinstance(obj, bool):
292 292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 295 if items is None:
296 296 flatparams[prefix] = obj
297 297 else:
298 298 for k, v in items(obj):
299 299 if prefix:
300 300 process(b'%s[%s]' % (prefix, k), v)
301 301 else:
302 302 process(k, v)
303 303
304 304 process(b'', params)
305 305 return util.urlreq.urlencode(flatparams)
306 306
307 307
308 308 def readurltoken(ui):
309 309 """return conduit url, token and make sure they exist
310 310
311 311 Currently read from [auth] config section. In the future, it might
312 312 make sense to read from .arcconfig and .arcrc as well.
313 313 """
314 314 url = ui.config(b'phabricator', b'url')
315 315 if not url:
316 316 raise error.Abort(
317 317 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 318 )
319 319
320 320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 321 token = None
322 322
323 323 if res:
324 324 group, auth = res
325 325
326 326 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 327
328 328 token = auth.get(b'phabtoken')
329 329
330 330 if not token:
331 331 raise error.Abort(
332 332 _(b'Can\'t find conduit token associated to %s') % (url,)
333 333 )
334 334
335 335 return url, token
336 336
337 337
338 338 def callconduit(ui, name, params):
339 339 """call Conduit API, params is a dict. return json.loads result, or None"""
340 340 host, token = readurltoken(ui)
341 341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 343 params = params.copy()
344 344 params[b'__conduit__'] = {
345 345 b'token': token,
346 346 }
347 347 rawdata = {
348 348 b'params': templatefilters.json(params),
349 349 b'output': b'json',
350 350 b'__conduit__': 1,
351 351 }
352 352 data = urlencodenested(rawdata)
353 353 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 354 if curlcmd:
355 355 sin, sout = procutil.popen2(
356 356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 357 )
358 358 sin.write(data)
359 359 sin.close()
360 360 body = sout.read()
361 361 else:
362 362 urlopener = urlmod.opener(ui, authinfo)
363 363 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 364 with contextlib.closing(urlopener.open(request)) as rsp:
365 365 body = rsp.read()
366 366 ui.debug(b'Conduit Response: %s\n' % body)
367 367 parsed = pycompat.rapply(
368 368 lambda x: encoding.unitolocal(x)
369 369 if isinstance(x, pycompat.unicode)
370 370 else x,
371 371 # json.loads only accepts bytes from py3.6+
372 372 pycompat.json_loads(encoding.unifromlocal(body)),
373 373 )
374 374 if parsed.get(b'error_code'):
375 375 msg = _(b'Conduit Error (%s): %s') % (
376 376 parsed[b'error_code'],
377 377 parsed[b'error_info'],
378 378 )
379 379 raise error.Abort(msg)
380 380 return parsed[b'result']
381 381
382 382
383 383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 384 def debugcallconduit(ui, repo, name):
385 385 """call Conduit API
386 386
387 387 Call parameters are read from stdin as a JSON blob. Result will be written
388 388 to stdout as a JSON blob.
389 389 """
390 390 # json.loads only accepts bytes from 3.6+
391 391 rawparams = encoding.unifromlocal(ui.fin.read())
392 392 # json.loads only returns unicode strings
393 393 params = pycompat.rapply(
394 394 lambda x: encoding.unitolocal(x)
395 395 if isinstance(x, pycompat.unicode)
396 396 else x,
397 397 pycompat.json_loads(rawparams),
398 398 )
399 399 # json.dumps only accepts unicode strings
400 400 result = pycompat.rapply(
401 401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 402 callconduit(ui, name, params),
403 403 )
404 404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 405 ui.write(b'%s\n' % encoding.unitolocal(s))
406 406
407 407
408 408 def getrepophid(repo):
409 409 """given callsign, return repository PHID or None"""
410 410 # developer config: phabricator.repophid
411 411 repophid = repo.ui.config(b'phabricator', b'repophid')
412 412 if repophid:
413 413 return repophid
414 414 callsign = repo.ui.config(b'phabricator', b'callsign')
415 415 if not callsign:
416 416 return None
417 417 query = callconduit(
418 418 repo.ui,
419 419 b'diffusion.repository.search',
420 420 {b'constraints': {b'callsigns': [callsign]}},
421 421 )
422 422 if len(query[b'data']) == 0:
423 423 return None
424 424 repophid = query[b'data'][0][b'phid']
425 425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 426 return repophid
427 427
428 428
429 429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 430 _differentialrevisiondescre = re.compile(
431 431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 432 )
433 433
434 434
435 435 def getoldnodedrevmap(repo, nodelist):
436 436 """find previous nodes that has been sent to Phabricator
437 437
438 438 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 439 for node in nodelist with known previous sent versions, or associated
440 440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 441 be ``None``.
442 442
443 443 Examines commit messages like "Differential Revision:" to get the
444 444 association information.
445 445
446 446 If such commit message line is not found, examines all precursors and their
447 447 tags. Tags with format like "D1234" are considered a match and the node
448 448 with that tag, and the number after "D" (ex. 1234) will be returned.
449 449
450 450 The ``old node``, if not None, is guaranteed to be the last diff of
451 451 corresponding Differential Revision, and exist in the repo.
452 452 """
453 453 unfi = repo.unfiltered()
454 454 has_node = unfi.changelog.index.has_node
455 455
456 456 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 457 toconfirm = {} # {node: (force, {precnode}, drev)}
458 458 for node in nodelist:
459 459 ctx = unfi[node]
460 460 # For tags like "D123", put them into "toconfirm" to verify later
461 461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 462 for n in precnodes:
463 463 if has_node(n):
464 464 for tag in unfi.nodetags(n):
465 465 m = _differentialrevisiontagre.match(tag)
466 466 if m:
467 467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 468 break
469 469 else:
470 470 continue # move to next predecessor
471 471 break # found a tag, stop
472 472 else:
473 473 # Check commit message
474 474 m = _differentialrevisiondescre.search(ctx.description())
475 475 if m:
476 476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 477
478 478 # Double check if tags are genuine by collecting all old nodes from
479 479 # Phabricator, and expect precursors overlap with it.
480 480 if toconfirm:
481 481 drevs = [drev for force, precs, drev in toconfirm.values()]
482 482 alldiffs = callconduit(
483 483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 484 )
485 485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 486 for newnode, (force, precset, drev) in toconfirm.items():
487 487 diffs = [
488 488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 489 ]
490 490
491 491 # "precursors" as known by Phabricator
492 492 phprecset = {getnode(d) for d in diffs}
493 493
494 494 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 495 # and force is not set (when commit message says nothing)
496 496 if not force and not bool(phprecset & precset):
497 497 tagname = b'D%d' % drev
498 498 tags.tag(
499 499 repo,
500 500 tagname,
501 501 nullid,
502 502 message=None,
503 503 user=None,
504 504 date=None,
505 505 local=True,
506 506 )
507 507 unfi.ui.warn(
508 508 _(
509 509 b'D%d: local tag removed - does not match '
510 510 b'Differential history\n'
511 511 )
512 512 % drev
513 513 )
514 514 continue
515 515
516 516 # Find the last node using Phabricator metadata, and make sure it
517 517 # exists in the repo
518 518 oldnode = lastdiff = None
519 519 if diffs:
520 520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 521 oldnode = getnode(lastdiff)
522 522 if oldnode and not has_node(oldnode):
523 523 oldnode = None
524 524
525 525 result[newnode] = (oldnode, lastdiff, drev)
526 526
527 527 return result
528 528
529 529
530 530 def getdrevmap(repo, revs):
531 531 """Return a dict mapping each rev in `revs` to their Differential Revision
532 532 ID or None.
533 533 """
534 534 result = {}
535 535 for rev in revs:
536 536 result[rev] = None
537 537 ctx = repo[rev]
538 538 # Check commit message
539 539 m = _differentialrevisiondescre.search(ctx.description())
540 540 if m:
541 541 result[rev] = int(m.group('id'))
542 542 continue
543 543 # Check tags
544 544 for tag in repo.nodetags(ctx.node()):
545 545 m = _differentialrevisiontagre.match(tag)
546 546 if m:
547 547 result[rev] = int(m.group(1))
548 548 break
549 549
550 550 return result
551 551
552 552
553 553 def getdiff(ctx, diffopts):
554 554 """plain-text diff without header (user, commit message, etc)"""
555 555 output = util.stringio()
556 556 for chunk, _label in patch.diffui(
557 557 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
558 558 ):
559 559 output.write(chunk)
560 560 return output.getvalue()
561 561
562 562
563 563 class DiffChangeType(object):
564 564 ADD = 1
565 565 CHANGE = 2
566 566 DELETE = 3
567 567 MOVE_AWAY = 4
568 568 COPY_AWAY = 5
569 569 MOVE_HERE = 6
570 570 COPY_HERE = 7
571 571 MULTICOPY = 8
572 572
573 573
574 574 class DiffFileType(object):
575 575 TEXT = 1
576 576 IMAGE = 2
577 577 BINARY = 3
578 578
579 579
580 580 @attr.s
581 581 class phabhunk(dict):
582 582 """Represents a Differential hunk, which is owned by a Differential change
583 583 """
584 584
585 585 oldOffset = attr.ib(default=0) # camelcase-required
586 586 oldLength = attr.ib(default=0) # camelcase-required
587 587 newOffset = attr.ib(default=0) # camelcase-required
588 588 newLength = attr.ib(default=0) # camelcase-required
589 589 corpus = attr.ib(default='')
590 590 # These get added to the phabchange's equivalents
591 591 addLines = attr.ib(default=0) # camelcase-required
592 592 delLines = attr.ib(default=0) # camelcase-required
593 593
594 594
595 595 @attr.s
596 596 class phabchange(object):
597 597 """Represents a Differential change, owns Differential hunks and owned by a
598 598 Differential diff. Each one represents one file in a diff.
599 599 """
600 600
601 601 currentPath = attr.ib(default=None) # camelcase-required
602 602 oldPath = attr.ib(default=None) # camelcase-required
603 603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 604 metadata = attr.ib(default=attr.Factory(dict))
605 605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 607 type = attr.ib(default=DiffChangeType.CHANGE)
608 608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 609 commitHash = attr.ib(default=None) # camelcase-required
610 610 addLines = attr.ib(default=0) # camelcase-required
611 611 delLines = attr.ib(default=0) # camelcase-required
612 612 hunks = attr.ib(default=attr.Factory(list))
613 613
614 614 def copynewmetadatatoold(self):
615 615 for key in list(self.metadata.keys()):
616 616 newkey = key.replace(b'new:', b'old:')
617 617 self.metadata[newkey] = self.metadata[key]
618 618
619 619 def addoldmode(self, value):
620 620 self.oldProperties[b'unix:filemode'] = value
621 621
622 622 def addnewmode(self, value):
623 623 self.newProperties[b'unix:filemode'] = value
624 624
625 625 def addhunk(self, hunk):
626 626 if not isinstance(hunk, phabhunk):
627 627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 629 # It's useful to include these stats since the Phab web UI shows them,
630 630 # and uses them to estimate how large a change a Revision is. Also used
631 631 # in email subjects for the [+++--] bit.
632 632 self.addLines += hunk.addLines
633 633 self.delLines += hunk.delLines
634 634
635 635
636 636 @attr.s
637 637 class phabdiff(object):
638 638 """Represents a Differential diff, owns Differential changes. Corresponds
639 639 to a commit.
640 640 """
641 641
642 642 # Doesn't seem to be any reason to send this (output of uname -n)
643 643 sourceMachine = attr.ib(default=b'') # camelcase-required
644 644 sourcePath = attr.ib(default=b'/') # camelcase-required
645 645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 648 branch = attr.ib(default=b'default')
649 649 bookmark = attr.ib(default=None)
650 650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 651 lintStatus = attr.ib(default=b'none') # camelcase-required
652 652 unitStatus = attr.ib(default=b'none') # camelcase-required
653 653 changes = attr.ib(default=attr.Factory(dict))
654 654 repositoryPHID = attr.ib(default=None) # camelcase-required
655 655
656 656 def addchange(self, change):
657 657 if not isinstance(change, phabchange):
658 658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 659 self.changes[change.currentPath] = pycompat.byteskwargs(
660 660 attr.asdict(change)
661 661 )
662 662
663 663
664 664 def maketext(pchange, ctx, fname):
665 665 """populate the phabchange for a text file"""
666 666 repo = ctx.repo()
667 667 fmatcher = match.exact([fname])
668 668 diffopts = mdiff.diffopts(git=True, context=32767)
669 669 _pfctx, _fctx, header, fhunks = next(
670 670 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
671 671 )
672 672
673 673 for fhunk in fhunks:
674 674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 675 corpus = b''.join(lines[1:])
676 676 shunk = list(header)
677 677 shunk.extend(lines)
678 678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 679 patch.diffstatdata(util.iterlines(shunk))
680 680 )
681 681 pchange.addhunk(
682 682 phabhunk(
683 683 oldOffset,
684 684 oldLength,
685 685 newOffset,
686 686 newLength,
687 687 corpus,
688 688 addLines,
689 689 delLines,
690 690 )
691 691 )
692 692
693 693
694 694 def uploadchunks(fctx, fphid):
695 695 """upload large binary files as separate chunks.
696 696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 697 """
698 698 ui = fctx.repo().ui
699 699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 700 with ui.makeprogress(
701 701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 702 ) as progress:
703 703 for chunk in chunks:
704 704 progress.increment()
705 705 if chunk[b'complete']:
706 706 continue
707 707 bstart = int(chunk[b'byteStart'])
708 708 bend = int(chunk[b'byteEnd'])
709 709 callconduit(
710 710 ui,
711 711 b'file.uploadchunk',
712 712 {
713 713 b'filePHID': fphid,
714 714 b'byteStart': bstart,
715 715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 716 b'dataEncoding': b'base64',
717 717 },
718 718 )
719 719
720 720
721 721 def uploadfile(fctx):
722 722 """upload binary files to Phabricator"""
723 723 repo = fctx.repo()
724 724 ui = repo.ui
725 725 fname = fctx.path()
726 726 size = fctx.size()
727 727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 728
729 729 # an allocate call is required first to see if an upload is even required
730 730 # (Phab might already have it) and to determine if chunking is needed
731 731 allocateparams = {
732 732 b'name': fname,
733 733 b'contentLength': size,
734 734 b'contentHash': fhash,
735 735 }
736 736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 737 fphid = filealloc[b'filePHID']
738 738
739 739 if filealloc[b'upload']:
740 740 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 741 if not fphid:
742 742 uploadparams = {
743 743 b'name': fname,
744 744 b'data_base64': base64.b64encode(fctx.data()),
745 745 }
746 746 fphid = callconduit(ui, b'file.upload', uploadparams)
747 747 else:
748 748 uploadchunks(fctx, fphid)
749 749 else:
750 750 ui.debug(b'server already has %s\n' % bytes(fctx))
751 751
752 752 if not fphid:
753 753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 754
755 755 return fphid
756 756
757 757
758 758 def addoldbinary(pchange, oldfctx, fctx):
759 759 """add the metadata for the previous version of a binary file to the
760 760 phabchange for the new version
761 761
762 762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 763 version of the file, or None if the file is being removed.
764 764 """
765 765 if not fctx or fctx.cmp(oldfctx):
766 766 # Files differ, add the old one
767 767 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 768 mimeguess, _enc = mimetypes.guess_type(
769 769 encoding.unifromlocal(oldfctx.path())
770 770 )
771 771 if mimeguess:
772 772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 773 mimeguess
774 774 )
775 775 fphid = uploadfile(oldfctx)
776 776 pchange.metadata[b'old:binary-phid'] = fphid
777 777 else:
778 778 # If it's left as IMAGE/BINARY web UI might try to display it
779 779 pchange.fileType = DiffFileType.TEXT
780 780 pchange.copynewmetadatatoold()
781 781
782 782
783 783 def makebinary(pchange, fctx):
784 784 """populate the phabchange for a binary file"""
785 785 pchange.fileType = DiffFileType.BINARY
786 786 fphid = uploadfile(fctx)
787 787 pchange.metadata[b'new:binary-phid'] = fphid
788 788 pchange.metadata[b'new:file:size'] = fctx.size()
789 789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 790 if mimeguess:
791 791 mimeguess = pycompat.bytestr(mimeguess)
792 792 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 793 if mimeguess.startswith(b'image/'):
794 794 pchange.fileType = DiffFileType.IMAGE
795 795
796 796
797 797 # Copied from mercurial/patch.py
798 798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 799
800 800
801 801 def notutf8(fctx):
802 802 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 803 as binary
804 804 """
805 805 try:
806 806 fctx.data().decode('utf-8')
807 807 return False
808 808 except UnicodeDecodeError:
809 809 fctx.repo().ui.write(
810 810 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 811 % fctx.path()
812 812 )
813 813 return True
814 814
815 815
816 816 def addremoved(pdiff, ctx, removed):
817 817 """add removed files to the phabdiff. Shouldn't include moves"""
818 818 for fname in removed:
819 819 pchange = phabchange(
820 820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 821 )
822 822 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
823 823 oldfctx = ctx.p1()[fname]
824 824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 825 maketext(pchange, ctx, fname)
826 826
827 827 pdiff.addchange(pchange)
828 828
829 829
830 830 def addmodified(pdiff, ctx, modified):
831 831 """add modified files to the phabdiff"""
832 832 for fname in modified:
833 833 fctx = ctx[fname]
834 834 oldfctx = fctx.p1()
835 835 pchange = phabchange(currentPath=fname, oldPath=fname)
836 836 filemode = gitmode[ctx[fname].flags()]
837 837 originalmode = gitmode[ctx.p1()[fname].flags()]
838 838 if filemode != originalmode:
839 839 pchange.addoldmode(originalmode)
840 840 pchange.addnewmode(filemode)
841 841
842 842 if (
843 843 fctx.isbinary()
844 844 or notutf8(fctx)
845 845 or oldfctx.isbinary()
846 846 or notutf8(oldfctx)
847 847 ):
848 848 makebinary(pchange, fctx)
849 849 addoldbinary(pchange, fctx.p1(), fctx)
850 850 else:
851 851 maketext(pchange, ctx, fname)
852 852
853 853 pdiff.addchange(pchange)
854 854
855 855
856 856 def addadded(pdiff, ctx, added, removed):
857 857 """add file adds to the phabdiff, both new files and copies/moves"""
858 858 # Keep track of files that've been recorded as moved/copied, so if there are
859 859 # additional copies we can mark them (moves get removed from removed)
860 860 copiedchanges = {}
861 861 movedchanges = {}
862 862 for fname in added:
863 863 fctx = ctx[fname]
864 864 oldfctx = None
865 865 pchange = phabchange(currentPath=fname)
866 866
867 867 filemode = gitmode[ctx[fname].flags()]
868 868 renamed = fctx.renamed()
869 869
870 870 if renamed:
871 871 originalfname = renamed[0]
872 872 oldfctx = ctx.p1()[originalfname]
873 873 originalmode = gitmode[oldfctx.flags()]
874 874 pchange.oldPath = originalfname
875 875
876 876 if originalfname in removed:
877 877 origpchange = phabchange(
878 878 currentPath=originalfname,
879 879 oldPath=originalfname,
880 880 type=DiffChangeType.MOVE_AWAY,
881 881 awayPaths=[fname],
882 882 )
883 883 movedchanges[originalfname] = origpchange
884 884 removed.remove(originalfname)
885 885 pchange.type = DiffChangeType.MOVE_HERE
886 886 elif originalfname in movedchanges:
887 887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
888 888 movedchanges[originalfname].awayPaths.append(fname)
889 889 pchange.type = DiffChangeType.COPY_HERE
890 890 else: # pure copy
891 891 if originalfname not in copiedchanges:
892 892 origpchange = phabchange(
893 893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
894 894 )
895 895 copiedchanges[originalfname] = origpchange
896 896 else:
897 897 origpchange = copiedchanges[originalfname]
898 898 origpchange.awayPaths.append(fname)
899 899 pchange.type = DiffChangeType.COPY_HERE
900 900
901 901 if filemode != originalmode:
902 902 pchange.addoldmode(originalmode)
903 903 pchange.addnewmode(filemode)
904 904 else: # Brand-new file
905 905 pchange.addnewmode(gitmode[fctx.flags()])
906 906 pchange.type = DiffChangeType.ADD
907 907
908 908 if (
909 909 fctx.isbinary()
910 910 or notutf8(fctx)
911 911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
912 912 ):
913 913 makebinary(pchange, fctx)
914 914 if renamed:
915 915 addoldbinary(pchange, oldfctx, fctx)
916 916 else:
917 917 maketext(pchange, ctx, fname)
918 918
919 919 pdiff.addchange(pchange)
920 920
921 921 for _path, copiedchange in copiedchanges.items():
922 922 pdiff.addchange(copiedchange)
923 923 for _path, movedchange in movedchanges.items():
924 924 pdiff.addchange(movedchange)
925 925
926 926
927 927 def creatediff(ctx):
928 928 """create a Differential Diff"""
929 929 repo = ctx.repo()
930 930 repophid = getrepophid(repo)
931 931 # Create a "Differential Diff" via "differential.creatediff" API
932 932 pdiff = phabdiff(
933 933 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
934 934 branch=b'%s' % ctx.branch(),
935 935 )
936 936 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
937 937 # addadded will remove moved files from removed, so addremoved won't get
938 938 # them
939 939 addadded(pdiff, ctx, added, removed)
940 940 addmodified(pdiff, ctx, modified)
941 941 addremoved(pdiff, ctx, removed)
942 942 if repophid:
943 943 pdiff.repositoryPHID = repophid
944 944 diff = callconduit(
945 945 repo.ui,
946 946 b'differential.creatediff',
947 947 pycompat.byteskwargs(attr.asdict(pdiff)),
948 948 )
949 949 if not diff:
950 950 raise error.Abort(_(b'cannot create diff for %s') % ctx)
951 951 return diff
952 952
953 953
954 954 def writediffproperties(ctx, diff):
955 955 """write metadata to diff so patches could be applied losslessly"""
956 956 # creatediff returns with a diffid but query returns with an id
957 957 diffid = diff.get(b'diffid', diff.get(b'id'))
958 958 params = {
959 959 b'diff_id': diffid,
960 960 b'name': b'hg:meta',
961 961 b'data': templatefilters.json(
962 962 {
963 963 b'user': ctx.user(),
964 964 b'date': b'%d %d' % ctx.date(),
965 965 b'branch': ctx.branch(),
966 966 b'node': ctx.hex(),
967 967 b'parent': ctx.p1().hex(),
968 968 }
969 969 ),
970 970 }
971 971 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972 972
973 973 params = {
974 974 b'diff_id': diffid,
975 975 b'name': b'local:commits',
976 976 b'data': templatefilters.json(
977 977 {
978 978 ctx.hex(): {
979 979 b'author': stringutil.person(ctx.user()),
980 980 b'authorEmail': stringutil.email(ctx.user()),
981 981 b'time': int(ctx.date()[0]),
982 982 b'commit': ctx.hex(),
983 983 b'parents': [ctx.p1().hex()],
984 984 b'branch': ctx.branch(),
985 985 },
986 986 }
987 987 ),
988 988 }
989 989 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
990 990
991 991
992 992 def createdifferentialrevision(
993 993 ctx,
994 994 revid=None,
995 995 parentrevphid=None,
996 996 oldnode=None,
997 997 olddiff=None,
998 998 actions=None,
999 999 comment=None,
1000 1000 ):
1001 1001 """create or update a Differential Revision
1002 1002
1003 1003 If revid is None, create a new Differential Revision, otherwise update
1004 1004 revid. If parentrevphid is not None, set it as a dependency.
1005 1005
1006 1006 If oldnode is not None, check if the patch content (without commit message
1007 1007 and metadata) has changed before creating another diff.
1008 1008
1009 1009 If actions is not None, they will be appended to the transaction.
1010 1010 """
1011 1011 repo = ctx.repo()
1012 1012 if oldnode:
1013 1013 diffopts = mdiff.diffopts(git=True, context=32767)
1014 1014 oldctx = repo.unfiltered()[oldnode]
1015 1015 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1016 1016 else:
1017 1017 neednewdiff = True
1018 1018
1019 1019 transactions = []
1020 1020 if neednewdiff:
1021 1021 diff = creatediff(ctx)
1022 1022 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1023 1023 if comment:
1024 1024 transactions.append({b'type': b'comment', b'value': comment})
1025 1025 else:
1026 1026 # Even if we don't need to upload a new diff because the patch content
1027 1027 # does not change. We might still need to update its metadata so
1028 1028 # pushers could know the correct node metadata.
1029 1029 assert olddiff
1030 1030 diff = olddiff
1031 1031 writediffproperties(ctx, diff)
1032 1032
1033 1033 # Set the parent Revision every time, so commit re-ordering is picked-up
1034 1034 if parentrevphid:
1035 1035 transactions.append(
1036 1036 {b'type': b'parents.set', b'value': [parentrevphid]}
1037 1037 )
1038 1038
1039 1039 if actions:
1040 1040 transactions += actions
1041 1041
1042 1042 # Parse commit message and update related fields.
1043 1043 desc = ctx.description()
1044 1044 info = callconduit(
1045 1045 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1046 1046 )
1047 1047 for k, v in info[b'fields'].items():
1048 1048 if k in [b'title', b'summary', b'testPlan']:
1049 1049 transactions.append({b'type': k, b'value': v})
1050 1050
1051 1051 params = {b'transactions': transactions}
1052 1052 if revid is not None:
1053 1053 # Update an existing Differential Revision
1054 1054 params[b'objectIdentifier'] = revid
1055 1055
1056 1056 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1057 1057 if not revision:
1058 1058 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1059 1059
1060 1060 return revision, diff
1061 1061
1062 1062
1063 1063 def userphids(ui, names):
1064 1064 """convert user names to PHIDs"""
1065 1065 names = [name.lower() for name in names]
1066 1066 query = {b'constraints': {b'usernames': names}}
1067 1067 result = callconduit(ui, b'user.search', query)
1068 1068 # username not found is not an error of the API. So check if we have missed
1069 1069 # some names here.
1070 1070 data = result[b'data']
1071 1071 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1072 1072 unresolved = set(names) - resolved
1073 1073 if unresolved:
1074 1074 raise error.Abort(
1075 1075 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1076 1076 )
1077 1077 return [entry[b'phid'] for entry in data]
1078 1078
1079 1079
1080 1080 @vcrcommand(
1081 1081 b'phabsend',
1082 1082 [
1083 1083 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1084 1084 (b'', b'amend', True, _(b'update commit messages')),
1085 1085 (b'', b'reviewer', [], _(b'specify reviewers')),
1086 1086 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1087 1087 (
1088 1088 b'm',
1089 1089 b'comment',
1090 1090 b'',
1091 1091 _(b'add a comment to Revisions with new/updated Diffs'),
1092 1092 ),
1093 1093 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1094 1094 ],
1095 1095 _(b'REV [OPTIONS]'),
1096 1096 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1097 1097 )
1098 1098 def phabsend(ui, repo, *revs, **opts):
1099 1099 """upload changesets to Phabricator
1100 1100
1101 1101 If there are multiple revisions specified, they will be send as a stack
1102 1102 with a linear dependencies relationship using the order specified by the
1103 1103 revset.
1104 1104
1105 1105 For the first time uploading changesets, local tags will be created to
1106 1106 maintain the association. After the first time, phabsend will check
1107 1107 obsstore and tags information so it can figure out whether to update an
1108 1108 existing Differential Revision, or create a new one.
1109 1109
1110 1110 If --amend is set, update commit messages so they have the
1111 1111 ``Differential Revision`` URL, remove related tags. This is similar to what
1112 1112 arcanist will do, and is more desired in author-push workflows. Otherwise,
1113 1113 use local tags to record the ``Differential Revision`` association.
1114 1114
1115 1115 The --confirm option lets you confirm changesets before sending them. You
1116 1116 can also add following to your configuration file to make it default
1117 1117 behaviour::
1118 1118
1119 1119 [phabsend]
1120 1120 confirm = true
1121 1121
1122 1122 phabsend will check obsstore and the above association to decide whether to
1123 1123 update an existing Differential Revision, or create a new one.
1124 1124 """
1125 1125 opts = pycompat.byteskwargs(opts)
1126 1126 revs = list(revs) + opts.get(b'rev', [])
1127 1127 revs = scmutil.revrange(repo, revs)
1128 1128 revs.sort() # ascending order to preserve topological parent/child in phab
1129 1129
1130 1130 if not revs:
1131 1131 raise error.Abort(_(b'phabsend requires at least one changeset'))
1132 1132 if opts.get(b'amend'):
1133 1133 cmdutil.checkunfinished(repo)
1134 1134
1135 1135 # {newnode: (oldnode, olddiff, olddrev}
1136 1136 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1137 1137
1138 1138 confirm = ui.configbool(b'phabsend', b'confirm')
1139 1139 confirm |= bool(opts.get(b'confirm'))
1140 1140 if confirm:
1141 1141 confirmed = _confirmbeforesend(repo, revs, oldmap)
1142 1142 if not confirmed:
1143 1143 raise error.Abort(_(b'phabsend cancelled'))
1144 1144
1145 1145 actions = []
1146 1146 reviewers = opts.get(b'reviewer', [])
1147 1147 blockers = opts.get(b'blocker', [])
1148 1148 phids = []
1149 1149 if reviewers:
1150 1150 phids.extend(userphids(repo.ui, reviewers))
1151 1151 if blockers:
1152 1152 phids.extend(
1153 1153 map(
1154 1154 lambda phid: b'blocking(%s)' % phid,
1155 1155 userphids(repo.ui, blockers),
1156 1156 )
1157 1157 )
1158 1158 if phids:
1159 1159 actions.append({b'type': b'reviewers.add', b'value': phids})
1160 1160
1161 1161 drevids = [] # [int]
1162 1162 diffmap = {} # {newnode: diff}
1163 1163
1164 1164 # Send patches one by one so we know their Differential Revision PHIDs and
1165 1165 # can provide dependency relationship
1166 1166 lastrevphid = None
1167 1167 for rev in revs:
1168 1168 ui.debug(b'sending rev %d\n' % rev)
1169 1169 ctx = repo[rev]
1170 1170
1171 1171 # Get Differential Revision ID
1172 1172 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1173 1173 if oldnode != ctx.node() or opts.get(b'amend'):
1174 1174 # Create or update Differential Revision
1175 1175 revision, diff = createdifferentialrevision(
1176 1176 ctx,
1177 1177 revid,
1178 1178 lastrevphid,
1179 1179 oldnode,
1180 1180 olddiff,
1181 1181 actions,
1182 1182 opts.get(b'comment'),
1183 1183 )
1184 1184 diffmap[ctx.node()] = diff
1185 1185 newrevid = int(revision[b'object'][b'id'])
1186 1186 newrevphid = revision[b'object'][b'phid']
1187 1187 if revid:
1188 1188 action = b'updated'
1189 1189 else:
1190 1190 action = b'created'
1191 1191
1192 1192 # Create a local tag to note the association, if commit message
1193 1193 # does not have it already
1194 1194 m = _differentialrevisiondescre.search(ctx.description())
1195 1195 if not m or int(m.group('id')) != newrevid:
1196 1196 tagname = b'D%d' % newrevid
1197 1197 tags.tag(
1198 1198 repo,
1199 1199 tagname,
1200 1200 ctx.node(),
1201 1201 message=None,
1202 1202 user=None,
1203 1203 date=None,
1204 1204 local=True,
1205 1205 )
1206 1206 else:
1207 1207 # Nothing changed. But still set "newrevphid" so the next revision
1208 1208 # could depend on this one and "newrevid" for the summary line.
1209 1209 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1210 1210 newrevid = revid
1211 1211 action = b'skipped'
1212 1212
1213 1213 actiondesc = ui.label(
1214 1214 {
1215 1215 b'created': _(b'created'),
1216 1216 b'skipped': _(b'skipped'),
1217 1217 b'updated': _(b'updated'),
1218 1218 }[action],
1219 1219 b'phabricator.action.%s' % action,
1220 1220 )
1221 1221 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1222 1222 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1223 1223 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1224 1224 ui.write(
1225 1225 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1226 1226 )
1227 1227 drevids.append(newrevid)
1228 1228 lastrevphid = newrevphid
1229 1229
1230 1230 # Update commit messages and remove tags
1231 1231 if opts.get(b'amend'):
1232 1232 unfi = repo.unfiltered()
1233 1233 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1234 1234 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1235 1235 wnode = unfi[b'.'].node()
1236 1236 mapping = {} # {oldnode: [newnode]}
1237 1237 for i, rev in enumerate(revs):
1238 1238 old = unfi[rev]
1239 1239 drevid = drevids[i]
1240 1240 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1241 1241 newdesc = getdescfromdrev(drev)
1242 1242 # Make sure commit message contain "Differential Revision"
1243 1243 if old.description() != newdesc:
1244 1244 if old.phase() == phases.public:
1245 1245 ui.warn(
1246 1246 _(b"warning: not updating public commit %s\n")
1247 1247 % scmutil.formatchangeid(old)
1248 1248 )
1249 1249 continue
1250 1250 parents = [
1251 1251 mapping.get(old.p1().node(), (old.p1(),))[0],
1252 1252 mapping.get(old.p2().node(), (old.p2(),))[0],
1253 1253 ]
1254 1254 new = context.metadataonlyctx(
1255 1255 repo,
1256 1256 old,
1257 1257 parents=parents,
1258 1258 text=newdesc,
1259 1259 user=old.user(),
1260 1260 date=old.date(),
1261 1261 extra=old.extra(),
1262 1262 )
1263 1263
1264 1264 newnode = new.commit()
1265 1265
1266 1266 mapping[old.node()] = [newnode]
1267 1267 # Update diff property
1268 1268 # If it fails just warn and keep going, otherwise the DREV
1269 1269 # associations will be lost
1270 1270 try:
1271 1271 writediffproperties(unfi[newnode], diffmap[old.node()])
1272 1272 except util.urlerr.urlerror:
1273 1273 ui.warnnoi18n(
1274 1274 b'Failed to update metadata for D%d\n' % drevid
1275 1275 )
1276 1276 # Remove local tags since it's no longer necessary
1277 1277 tagname = b'D%d' % drevid
1278 1278 if tagname in repo.tags():
1279 1279 tags.tag(
1280 1280 repo,
1281 1281 tagname,
1282 1282 nullid,
1283 1283 message=None,
1284 1284 user=None,
1285 1285 date=None,
1286 1286 local=True,
1287 1287 )
1288 1288 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1289 1289 if wnode in mapping:
1290 1290 unfi.setparents(mapping[wnode][0])
1291 1291
1292 1292
1293 1293 # Map from "hg:meta" keys to header understood by "hg import". The order is
1294 1294 # consistent with "hg export" output.
1295 1295 _metanamemap = util.sortdict(
1296 1296 [
1297 1297 (b'user', b'User'),
1298 1298 (b'date', b'Date'),
1299 1299 (b'branch', b'Branch'),
1300 1300 (b'node', b'Node ID'),
1301 1301 (b'parent', b'Parent '),
1302 1302 ]
1303 1303 )
1304 1304
1305 1305
1306 1306 def _confirmbeforesend(repo, revs, oldmap):
1307 1307 url, token = readurltoken(repo.ui)
1308 1308 ui = repo.ui
1309 1309 for rev in revs:
1310 1310 ctx = repo[rev]
1311 1311 desc = ctx.description().splitlines()[0]
1312 1312 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1313 1313 if drevid:
1314 1314 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1315 1315 else:
1316 1316 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1317 1317
1318 1318 ui.write(
1319 1319 _(b'%s - %s: %s\n')
1320 1320 % (
1321 1321 drevdesc,
1322 1322 ui.label(bytes(ctx), b'phabricator.node'),
1323 1323 ui.label(desc, b'phabricator.desc'),
1324 1324 )
1325 1325 )
1326 1326
1327 1327 if ui.promptchoice(
1328 1328 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1329 1329 ):
1330 1330 return False
1331 1331
1332 1332 return True
1333 1333
1334 1334
1335 1335 _knownstatusnames = {
1336 1336 b'accepted',
1337 1337 b'needsreview',
1338 1338 b'needsrevision',
1339 1339 b'closed',
1340 1340 b'abandoned',
1341 1341 b'changesplanned',
1342 1342 }
1343 1343
1344 1344
1345 1345 def _getstatusname(drev):
1346 1346 """get normalized status name from a Differential Revision"""
1347 1347 return drev[b'statusName'].replace(b' ', b'').lower()
1348 1348
1349 1349
1350 1350 # Small language to specify differential revisions. Support symbols: (), :X,
1351 1351 # +, and -.
1352 1352
1353 1353 _elements = {
1354 1354 # token-type: binding-strength, primary, prefix, infix, suffix
1355 1355 b'(': (12, None, (b'group', 1, b')'), None, None),
1356 1356 b':': (8, None, (b'ancestors', 8), None, None),
1357 1357 b'&': (5, None, None, (b'and_', 5), None),
1358 1358 b'+': (4, None, None, (b'add', 4), None),
1359 1359 b'-': (4, None, None, (b'sub', 4), None),
1360 1360 b')': (0, None, None, None, None),
1361 1361 b'symbol': (0, b'symbol', None, None, None),
1362 1362 b'end': (0, None, None, None, None),
1363 1363 }
1364 1364
1365 1365
1366 1366 def _tokenize(text):
1367 1367 view = memoryview(text) # zero-copy slice
1368 1368 special = b'():+-& '
1369 1369 pos = 0
1370 1370 length = len(text)
1371 1371 while pos < length:
1372 1372 symbol = b''.join(
1373 1373 itertools.takewhile(
1374 1374 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1375 1375 )
1376 1376 )
1377 1377 if symbol:
1378 1378 yield (b'symbol', symbol, pos)
1379 1379 pos += len(symbol)
1380 1380 else: # special char, ignore space
1381 1381 if text[pos : pos + 1] != b' ':
1382 1382 yield (text[pos : pos + 1], None, pos)
1383 1383 pos += 1
1384 1384 yield (b'end', None, pos)
1385 1385
1386 1386
1387 1387 def _parse(text):
1388 1388 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1389 1389 if pos != len(text):
1390 1390 raise error.ParseError(b'invalid token', pos)
1391 1391 return tree
1392 1392
1393 1393
1394 1394 def _parsedrev(symbol):
1395 1395 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1396 1396 if symbol.startswith(b'D') and symbol[1:].isdigit():
1397 1397 return int(symbol[1:])
1398 1398 if symbol.isdigit():
1399 1399 return int(symbol)
1400 1400
1401 1401
1402 1402 def _prefetchdrevs(tree):
1403 1403 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1404 1404 drevs = set()
1405 1405 ancestordrevs = set()
1406 1406 op = tree[0]
1407 1407 if op == b'symbol':
1408 1408 r = _parsedrev(tree[1])
1409 1409 if r:
1410 1410 drevs.add(r)
1411 1411 elif op == b'ancestors':
1412 1412 r, a = _prefetchdrevs(tree[1])
1413 1413 drevs.update(r)
1414 1414 ancestordrevs.update(r)
1415 1415 ancestordrevs.update(a)
1416 1416 else:
1417 1417 for t in tree[1:]:
1418 1418 r, a = _prefetchdrevs(t)
1419 1419 drevs.update(r)
1420 1420 ancestordrevs.update(a)
1421 1421 return drevs, ancestordrevs
1422 1422
1423 1423
1424 1424 def querydrev(ui, spec):
1425 1425 """return a list of "Differential Revision" dicts
1426 1426
1427 1427 spec is a string using a simple query language, see docstring in phabread
1428 1428 for details.
1429 1429
1430 1430 A "Differential Revision dict" looks like:
1431 1431
1432 1432 {
1433 1433 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1434 1434 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1435 1435 "auxiliary": {
1436 1436 "phabricator:depends-on": [
1437 1437 "PHID-DREV-gbapp366kutjebt7agcd"
1438 1438 ]
1439 1439 "phabricator:projects": [],
1440 1440 },
1441 1441 "branch": "default",
1442 1442 "ccs": [],
1443 1443 "commits": [],
1444 1444 "dateCreated": "1499181406",
1445 1445 "dateModified": "1499182103",
1446 1446 "diffs": [
1447 1447 "3",
1448 1448 "4",
1449 1449 ],
1450 1450 "hashes": [],
1451 1451 "id": "2",
1452 1452 "lineCount": "2",
1453 1453 "phid": "PHID-DREV-672qvysjcczopag46qty",
1454 1454 "properties": {},
1455 1455 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1456 1456 "reviewers": [],
1457 1457 "sourcePath": null
1458 1458 "status": "0",
1459 1459 "statusName": "Needs Review",
1460 1460 "summary": "",
1461 1461 "testPlan": "",
1462 1462 "title": "example",
1463 1463 "uri": "https://phab.example.com/D2",
1464 1464 }
1465 1465 """
1466 1466 # TODO: replace differential.query and differential.querydiffs with
1467 1467 # differential.diff.search because the former (and their output) are
1468 1468 # frozen, and planned to be deprecated and removed.
1469 1469
1470 1470 def fetch(params):
1471 1471 """params -> single drev or None"""
1472 1472 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1473 1473 if key in prefetched:
1474 1474 return prefetched[key]
1475 1475 drevs = callconduit(ui, b'differential.query', params)
1476 1476 # Fill prefetched with the result
1477 1477 for drev in drevs:
1478 1478 prefetched[drev[b'phid']] = drev
1479 1479 prefetched[int(drev[b'id'])] = drev
1480 1480 if key not in prefetched:
1481 1481 raise error.Abort(
1482 1482 _(b'cannot get Differential Revision %r') % params
1483 1483 )
1484 1484 return prefetched[key]
1485 1485
1486 1486 def getstack(topdrevids):
1487 1487 """given a top, get a stack from the bottom, [id] -> [id]"""
1488 1488 visited = set()
1489 1489 result = []
1490 1490 queue = [{b'ids': [i]} for i in topdrevids]
1491 1491 while queue:
1492 1492 params = queue.pop()
1493 1493 drev = fetch(params)
1494 1494 if drev[b'id'] in visited:
1495 1495 continue
1496 1496 visited.add(drev[b'id'])
1497 1497 result.append(int(drev[b'id']))
1498 1498 auxiliary = drev.get(b'auxiliary', {})
1499 1499 depends = auxiliary.get(b'phabricator:depends-on', [])
1500 1500 for phid in depends:
1501 1501 queue.append({b'phids': [phid]})
1502 1502 result.reverse()
1503 1503 return smartset.baseset(result)
1504 1504
1505 1505 # Initialize prefetch cache
1506 1506 prefetched = {} # {id or phid: drev}
1507 1507
1508 1508 tree = _parse(spec)
1509 1509 drevs, ancestordrevs = _prefetchdrevs(tree)
1510 1510
1511 1511 # developer config: phabricator.batchsize
1512 1512 batchsize = ui.configint(b'phabricator', b'batchsize')
1513 1513
1514 1514 # Prefetch Differential Revisions in batch
1515 1515 tofetch = set(drevs)
1516 1516 for r in ancestordrevs:
1517 1517 tofetch.update(range(max(1, r - batchsize), r + 1))
1518 1518 if drevs:
1519 1519 fetch({b'ids': list(tofetch)})
1520 1520 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1521 1521
1522 1522 # Walk through the tree, return smartsets
1523 1523 def walk(tree):
1524 1524 op = tree[0]
1525 1525 if op == b'symbol':
1526 1526 drev = _parsedrev(tree[1])
1527 1527 if drev:
1528 1528 return smartset.baseset([drev])
1529 1529 elif tree[1] in _knownstatusnames:
1530 1530 drevs = [
1531 1531 r
1532 1532 for r in validids
1533 1533 if _getstatusname(prefetched[r]) == tree[1]
1534 1534 ]
1535 1535 return smartset.baseset(drevs)
1536 1536 else:
1537 1537 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1538 1538 elif op in {b'and_', b'add', b'sub'}:
1539 1539 assert len(tree) == 3
1540 1540 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1541 1541 elif op == b'group':
1542 1542 return walk(tree[1])
1543 1543 elif op == b'ancestors':
1544 1544 return getstack(walk(tree[1]))
1545 1545 else:
1546 1546 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1547 1547
1548 1548 return [prefetched[r] for r in walk(tree)]
1549 1549
1550 1550
1551 1551 def getdescfromdrev(drev):
1552 1552 """get description (commit message) from "Differential Revision"
1553 1553
1554 1554 This is similar to differential.getcommitmessage API. But we only care
1555 1555 about limited fields: title, summary, test plan, and URL.
1556 1556 """
1557 1557 title = drev[b'title']
1558 1558 summary = drev[b'summary'].rstrip()
1559 1559 testplan = drev[b'testPlan'].rstrip()
1560 1560 if testplan:
1561 1561 testplan = b'Test Plan:\n%s' % testplan
1562 1562 uri = b'Differential Revision: %s' % drev[b'uri']
1563 1563 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1564 1564
1565 1565
1566 1566 def getdiffmeta(diff):
1567 1567 """get commit metadata (date, node, user, p1) from a diff object
1568 1568
1569 1569 The metadata could be "hg:meta", sent by phabsend, like:
1570 1570
1571 1571 "properties": {
1572 1572 "hg:meta": {
1573 1573 "branch": "default",
1574 1574 "date": "1499571514 25200",
1575 1575 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 1576 "user": "Foo Bar <foo@example.com>",
1577 1577 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1578 1578 }
1579 1579 }
1580 1580
1581 1581 Or converted from "local:commits", sent by "arc", like:
1582 1582
1583 1583 "properties": {
1584 1584 "local:commits": {
1585 1585 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1586 1586 "author": "Foo Bar",
1587 1587 "authorEmail": "foo@example.com"
1588 1588 "branch": "default",
1589 1589 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1590 1590 "local": "1000",
1591 1591 "message": "...",
1592 1592 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1593 1593 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1594 1594 "summary": "...",
1595 1595 "tag": "",
1596 1596 "time": 1499546314,
1597 1597 }
1598 1598 }
1599 1599 }
1600 1600
1601 1601 Note: metadata extracted from "local:commits" will lose time zone
1602 1602 information.
1603 1603 """
1604 1604 props = diff.get(b'properties') or {}
1605 1605 meta = props.get(b'hg:meta')
1606 1606 if not meta:
1607 1607 if props.get(b'local:commits'):
1608 1608 commit = sorted(props[b'local:commits'].values())[0]
1609 1609 meta = {}
1610 1610 if b'author' in commit and b'authorEmail' in commit:
1611 1611 meta[b'user'] = b'%s <%s>' % (
1612 1612 commit[b'author'],
1613 1613 commit[b'authorEmail'],
1614 1614 )
1615 1615 if b'time' in commit:
1616 1616 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1617 1617 if b'branch' in commit:
1618 1618 meta[b'branch'] = commit[b'branch']
1619 1619 node = commit.get(b'commit', commit.get(b'rev'))
1620 1620 if node:
1621 1621 meta[b'node'] = node
1622 1622 if len(commit.get(b'parents', ())) >= 1:
1623 1623 meta[b'parent'] = commit[b'parents'][0]
1624 1624 else:
1625 1625 meta = {}
1626 1626 if b'date' not in meta and b'dateCreated' in diff:
1627 1627 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1628 1628 if b'branch' not in meta and diff.get(b'branch'):
1629 1629 meta[b'branch'] = diff[b'branch']
1630 1630 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1631 1631 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1632 1632 return meta
1633 1633
1634 1634
1635 def _getdrevs(ui, stack, *specs):
1635 def _getdrevs(ui, stack, specs):
1636 1636 """convert user supplied DREVSPECs into "Differential Revision" dicts
1637 1637
1638 1638 See ``hg help phabread`` for how to specify each DREVSPEC.
1639 1639 """
1640 if len(*specs) > 0:
1640 if len(specs) > 0:
1641 1641
1642 1642 def _formatspec(s):
1643 1643 if stack:
1644 1644 s = b':(%s)' % s
1645 1645 return b'(%s)' % s
1646 1646
1647 spec = b'+'.join(pycompat.maplist(_formatspec, *specs))
1647 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1648 1648
1649 1649 drevs = querydrev(ui, spec)
1650 1650 if drevs:
1651 1651 return drevs
1652 1652
1653 1653 raise error.Abort(_(b"empty DREVSPEC set"))
1654 1654
1655 1655
1656 1656 def readpatch(ui, drevs, write):
1657 1657 """generate plain-text patch readable by 'hg import'
1658 1658
1659 1659 write takes a list of (DREV, bytes), where DREV is the differential number
1660 1660 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1661 1661 to be imported. drevs is what "querydrev" returns, results of
1662 1662 "differential.query".
1663 1663 """
1664 1664 # Prefetch hg:meta property for all diffs
1665 1665 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1666 1666 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1667 1667
1668 1668 patches = []
1669 1669
1670 1670 # Generate patch for each drev
1671 1671 for drev in drevs:
1672 1672 ui.note(_(b'reading D%s\n') % drev[b'id'])
1673 1673
1674 1674 diffid = max(int(v) for v in drev[b'diffs'])
1675 1675 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1676 1676 desc = getdescfromdrev(drev)
1677 1677 header = b'# HG changeset patch\n'
1678 1678
1679 1679 # Try to preserve metadata from hg:meta property. Write hg patch
1680 1680 # headers that can be read by the "import" command. See patchheadermap
1681 1681 # and extract in mercurial/patch.py for supported headers.
1682 1682 meta = getdiffmeta(diffs[b'%d' % diffid])
1683 1683 for k in _metanamemap.keys():
1684 1684 if k in meta:
1685 1685 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1686 1686
1687 1687 content = b'%s%s\n%s' % (header, desc, body)
1688 1688 patches.append((drev[b'id'], content))
1689 1689
1690 1690 # Write patches to the supplied callback
1691 1691 write(patches)
1692 1692
1693 1693
1694 1694 @vcrcommand(
1695 1695 b'phabread',
1696 1696 [(b'', b'stack', False, _(b'read dependencies'))],
1697 1697 _(b'DREVSPEC... [OPTIONS]'),
1698 1698 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1699 1699 optionalrepo=True,
1700 1700 )
1701 1701 def phabread(ui, repo, *specs, **opts):
1702 1702 """print patches from Phabricator suitable for importing
1703 1703
1704 1704 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1705 1705 the number ``123``. It could also have common operators like ``+``, ``-``,
1706 1706 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1707 1707 select a stack. If multiple DREVSPEC values are given, the result is the
1708 1708 union of each individually evaluated value. No attempt is currently made
1709 1709 to reorder the values to run from parent to child.
1710 1710
1711 1711 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1712 1712 could be used to filter patches by status. For performance reason, they
1713 1713 only represent a subset of non-status selections and cannot be used alone.
1714 1714
1715 1715 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1716 1716 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1717 1717 stack up to D9.
1718 1718
1719 1719 If --stack is given, follow dependencies information and read all patches.
1720 1720 It is equivalent to the ``:`` operator.
1721 1721 """
1722 1722 opts = pycompat.byteskwargs(opts)
1723 1723 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1724 1724
1725 1725 def _write(patches):
1726 1726 for drev, content in patches:
1727 1727 ui.write(content)
1728 1728
1729 1729 readpatch(ui, drevs, _write)
1730 1730
1731 1731
1732 1732 @vcrcommand(
1733 1733 b'phabimport',
1734 1734 [(b'', b'stack', False, _(b'import dependencies as well'))],
1735 1735 _(b'DREVSPEC... [OPTIONS]'),
1736 1736 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1737 1737 )
1738 1738 def phabimport(ui, repo, *specs, **opts):
1739 1739 """import patches from Phabricator for the specified Differential Revisions
1740 1740
1741 1741 The patches are read and applied starting at the parent of the working
1742 1742 directory.
1743 1743
1744 1744 See ``hg help phabread`` for how to specify DREVSPEC.
1745 1745 """
1746 1746 opts = pycompat.byteskwargs(opts)
1747 1747
1748 1748 # --bypass avoids losing exec and symlink bits when importing on Windows,
1749 1749 # and allows importing with a dirty wdir. It also aborts instead of leaving
1750 1750 # rejects.
1751 1751 opts[b'bypass'] = True
1752 1752
1753 1753 # Mandatory default values, synced with commands.import
1754 1754 opts[b'strip'] = 1
1755 1755 opts[b'prefix'] = b''
1756 1756 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1757 1757 opts[b'obsolete'] = False
1758 1758
1759 1759 if ui.configbool(b'phabimport', b'secret'):
1760 1760 opts[b'secret'] = True
1761 1761 if ui.configbool(b'phabimport', b'obsolete'):
1762 1762 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1763 1763
1764 1764 def _write(patches):
1765 1765 parents = repo[None].parents()
1766 1766
1767 1767 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1768 1768 for drev, contents in patches:
1769 1769 ui.status(_(b'applying patch from D%s\n') % drev)
1770 1770
1771 1771 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1772 1772 msg, node, rej = cmdutil.tryimportone(
1773 1773 ui,
1774 1774 repo,
1775 1775 patchdata,
1776 1776 parents,
1777 1777 opts,
1778 1778 [],
1779 1779 None, # Never update wdir to another revision
1780 1780 )
1781 1781
1782 1782 if not node:
1783 1783 raise error.Abort(_(b'D%s: no diffs found') % drev)
1784 1784
1785 1785 ui.note(msg + b'\n')
1786 1786 parents = [repo[node]]
1787 1787
1788 1788 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1789 1789
1790 1790 readpatch(repo.ui, drevs, _write)
1791 1791
1792 1792
1793 1793 @vcrcommand(
1794 1794 b'phabupdate',
1795 1795 [
1796 1796 (b'', b'accept', False, _(b'accept revisions')),
1797 1797 (b'', b'reject', False, _(b'reject revisions')),
1798 1798 (b'', b'abandon', False, _(b'abandon revisions')),
1799 1799 (b'', b'reclaim', False, _(b'reclaim revisions')),
1800 1800 (b'm', b'comment', b'', _(b'comment on the last revision')),
1801 1801 ],
1802 1802 _(b'DREVSPEC... [OPTIONS]'),
1803 1803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1804 1804 optionalrepo=True,
1805 1805 )
1806 1806 def phabupdate(ui, repo, *specs, **opts):
1807 1807 """update Differential Revision in batch
1808 1808
1809 1809 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1810 1810 """
1811 1811 opts = pycompat.byteskwargs(opts)
1812 1812 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1813 1813 if len(flags) > 1:
1814 1814 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1815 1815
1816 1816 actions = []
1817 1817 for f in flags:
1818 1818 actions.append({b'type': f, b'value': True})
1819 1819
1820 1820 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1821 1821 for i, drev in enumerate(drevs):
1822 1822 if i + 1 == len(drevs) and opts.get(b'comment'):
1823 1823 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1824 1824 if actions:
1825 1825 params = {
1826 1826 b'objectIdentifier': drev[b'phid'],
1827 1827 b'transactions': actions,
1828 1828 }
1829 1829 callconduit(ui, b'differential.revision.edit', params)
1830 1830
1831 1831
1832 1832 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1833 1833 def template_review(context, mapping):
1834 1834 """:phabreview: Object describing the review for this changeset.
1835 1835 Has attributes `url` and `id`.
1836 1836 """
1837 1837 ctx = context.resource(mapping, b'ctx')
1838 1838 m = _differentialrevisiondescre.search(ctx.description())
1839 1839 if m:
1840 1840 return templateutil.hybriddict(
1841 1841 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1842 1842 )
1843 1843 else:
1844 1844 tags = ctx.repo().nodetags(ctx.node())
1845 1845 for t in tags:
1846 1846 if _differentialrevisiontagre.match(t):
1847 1847 url = ctx.repo().ui.config(b'phabricator', b'url')
1848 1848 if not url.endswith(b'/'):
1849 1849 url += b'/'
1850 1850 url += t
1851 1851
1852 1852 return templateutil.hybriddict({b'url': url, b'id': t,})
1853 1853 return None
1854 1854
1855 1855
1856 1856 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1857 1857 def template_status(context, mapping):
1858 1858 """:phabstatus: String. Status of Phabricator differential.
1859 1859 """
1860 1860 ctx = context.resource(mapping, b'ctx')
1861 1861 repo = context.resource(mapping, b'repo')
1862 1862 ui = context.resource(mapping, b'ui')
1863 1863
1864 1864 rev = ctx.rev()
1865 1865 try:
1866 1866 drevid = getdrevmap(repo, [rev])[rev]
1867 1867 except KeyError:
1868 1868 return None
1869 1869 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1870 1870 for drev in drevs:
1871 1871 if int(drev[b'id']) == drevid:
1872 1872 return templateutil.hybriddict(
1873 1873 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1874 1874 )
1875 1875 return None
1876 1876
1877 1877
1878 1878 @show.showview(b'phabstatus', csettopic=b'work')
1879 1879 def phabstatusshowview(ui, repo, displayer):
1880 1880 """Phabricator differiential status"""
1881 1881 revs = repo.revs('sort(_underway(), topo)')
1882 1882 drevmap = getdrevmap(repo, revs)
1883 1883 unknownrevs, drevids, revsbydrevid = [], set(), {}
1884 1884 for rev, drevid in pycompat.iteritems(drevmap):
1885 1885 if drevid is not None:
1886 1886 drevids.add(drevid)
1887 1887 revsbydrevid.setdefault(drevid, set()).add(rev)
1888 1888 else:
1889 1889 unknownrevs.append(rev)
1890 1890
1891 1891 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1892 1892 drevsbyrev = {}
1893 1893 for drev in drevs:
1894 1894 for rev in revsbydrevid[int(drev[b'id'])]:
1895 1895 drevsbyrev[rev] = drev
1896 1896
1897 1897 def phabstatus(ctx):
1898 1898 drev = drevsbyrev[ctx.rev()]
1899 1899 status = ui.label(
1900 1900 b'%(statusName)s' % drev,
1901 1901 b'phabricator.status.%s' % _getstatusname(drev),
1902 1902 )
1903 1903 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1904 1904
1905 1905 revs -= smartset.baseset(unknownrevs)
1906 1906 revdag = graphmod.dagwalker(repo, revs)
1907 1907
1908 1908 ui.setconfig(b'experimental', b'graphshorten', True)
1909 1909 displayer._exthook = phabstatus
1910 1910 nodelen = show.longestshortest(repo, revs)
1911 1911 logcmdutil.displaygraph(
1912 1912 ui,
1913 1913 repo,
1914 1914 revdag,
1915 1915 displayer,
1916 1916 graphmod.asciiedges,
1917 1917 props={b'nodelen': nodelen},
1918 1918 )
General Comments 0
You need to be logged in to leave comments. Login now