##// END OF EJS Templates
phabricator: add basectx arguments to file related `phabsend` utilities...
Matt Harbison -
r45100:53d75fde default
parent child Browse files
Show More
@@ -1,1918 +1,1926 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127 eh.configitem(
128 128 b'phabimport', b'secret', default=False,
129 129 )
130 130 eh.configitem(
131 131 b'phabimport', b'obsolete', default=False,
132 132 )
133 133
134 134 colortable = {
135 135 b'phabricator.action.created': b'green',
136 136 b'phabricator.action.skipped': b'magenta',
137 137 b'phabricator.action.updated': b'magenta',
138 138 b'phabricator.desc': b'',
139 139 b'phabricator.drev': b'bold',
140 140 b'phabricator.node': b'',
141 141 b'phabricator.status.abandoned': b'magenta dim',
142 142 b'phabricator.status.accepted': b'green bold',
143 143 b'phabricator.status.closed': b'green',
144 144 b'phabricator.status.needsreview': b'yellow',
145 145 b'phabricator.status.needsrevision': b'red',
146 146 b'phabricator.status.changesplanned': b'red',
147 147 }
148 148
149 149 _VCR_FLAGS = [
150 150 (
151 151 b'',
152 152 b'test-vcr',
153 153 b'',
154 154 _(
155 155 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 156 b', otherwise will mock all http requests using the specified vcr file.'
157 157 b' (ADVANCED)'
158 158 ),
159 159 ),
160 160 ]
161 161
162 162
163 163 @eh.wrapfunction(localrepo, "loadhgrc")
164 164 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 165 """Load ``.arcconfig`` content into a ui instance on repository open.
166 166 """
167 167 result = False
168 168 arcconfig = {}
169 169
170 170 try:
171 171 # json.loads only accepts bytes from 3.6+
172 172 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 173 # json.loads only returns unicode strings
174 174 arcconfig = pycompat.rapply(
175 175 lambda x: encoding.unitolocal(x)
176 176 if isinstance(x, pycompat.unicode)
177 177 else x,
178 178 pycompat.json_loads(rawparams),
179 179 )
180 180
181 181 result = True
182 182 except ValueError:
183 183 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 184 except IOError:
185 185 pass
186 186
187 187 cfg = util.sortdict()
188 188
189 189 if b"repository.callsign" in arcconfig:
190 190 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 191
192 192 if b"phabricator.uri" in arcconfig:
193 193 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 194
195 195 if cfg:
196 196 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 197
198 198 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 199
200 200
201 201 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 202 fullflags = flags + _VCR_FLAGS
203 203
204 204 def hgmatcher(r1, r2):
205 205 if r1.uri != r2.uri or r1.method != r2.method:
206 206 return False
207 207 r1params = util.urlreq.parseqs(r1.body)
208 208 r2params = util.urlreq.parseqs(r2.body)
209 209 for key in r1params:
210 210 if key not in r2params:
211 211 return False
212 212 value = r1params[key][0]
213 213 # we want to compare json payloads without worrying about ordering
214 214 if value.startswith(b'{') and value.endswith(b'}'):
215 215 r1json = pycompat.json_loads(value)
216 216 r2json = pycompat.json_loads(r2params[key][0])
217 217 if r1json != r2json:
218 218 return False
219 219 elif r2params[key][0] != value:
220 220 return False
221 221 return True
222 222
223 223 def sanitiserequest(request):
224 224 request.body = re.sub(
225 225 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 226 )
227 227 return request
228 228
229 229 def sanitiseresponse(response):
230 230 if 'set-cookie' in response['headers']:
231 231 del response['headers']['set-cookie']
232 232 return response
233 233
234 234 def decorate(fn):
235 235 def inner(*args, **kwargs):
236 236 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 237 if cassette:
238 238 import hgdemandimport
239 239
240 240 with hgdemandimport.deactivated():
241 241 import vcr as vcrmod
242 242 import vcr.stubs as stubs
243 243
244 244 vcr = vcrmod.VCR(
245 245 serializer='json',
246 246 before_record_request=sanitiserequest,
247 247 before_record_response=sanitiseresponse,
248 248 custom_patches=[
249 249 (
250 250 urlmod,
251 251 'httpconnection',
252 252 stubs.VCRHTTPConnection,
253 253 ),
254 254 (
255 255 urlmod,
256 256 'httpsconnection',
257 257 stubs.VCRHTTPSConnection,
258 258 ),
259 259 ],
260 260 )
261 261 vcr.register_matcher('hgmatcher', hgmatcher)
262 262 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 263 return fn(*args, **kwargs)
264 264 return fn(*args, **kwargs)
265 265
266 266 cmd = util.checksignature(inner, depth=2)
267 267 cmd.__name__ = fn.__name__
268 268 cmd.__doc__ = fn.__doc__
269 269
270 270 return command(
271 271 name,
272 272 fullflags,
273 273 spec,
274 274 helpcategory=helpcategory,
275 275 optionalrepo=optionalrepo,
276 276 )(cmd)
277 277
278 278 return decorate
279 279
280 280
281 281 def urlencodenested(params):
282 282 """like urlencode, but works with nested parameters.
283 283
284 284 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 285 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 286 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 287 """
288 288 flatparams = util.sortdict()
289 289
290 290 def process(prefix, obj):
291 291 if isinstance(obj, bool):
292 292 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 293 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 294 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 295 if items is None:
296 296 flatparams[prefix] = obj
297 297 else:
298 298 for k, v in items(obj):
299 299 if prefix:
300 300 process(b'%s[%s]' % (prefix, k), v)
301 301 else:
302 302 process(k, v)
303 303
304 304 process(b'', params)
305 305 return util.urlreq.urlencode(flatparams)
306 306
307 307
308 308 def readurltoken(ui):
309 309 """return conduit url, token and make sure they exist
310 310
311 311 Currently read from [auth] config section. In the future, it might
312 312 make sense to read from .arcconfig and .arcrc as well.
313 313 """
314 314 url = ui.config(b'phabricator', b'url')
315 315 if not url:
316 316 raise error.Abort(
317 317 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 318 )
319 319
320 320 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 321 token = None
322 322
323 323 if res:
324 324 group, auth = res
325 325
326 326 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 327
328 328 token = auth.get(b'phabtoken')
329 329
330 330 if not token:
331 331 raise error.Abort(
332 332 _(b'Can\'t find conduit token associated to %s') % (url,)
333 333 )
334 334
335 335 return url, token
336 336
337 337
338 338 def callconduit(ui, name, params):
339 339 """call Conduit API, params is a dict. return json.loads result, or None"""
340 340 host, token = readurltoken(ui)
341 341 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 342 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 343 params = params.copy()
344 344 params[b'__conduit__'] = {
345 345 b'token': token,
346 346 }
347 347 rawdata = {
348 348 b'params': templatefilters.json(params),
349 349 b'output': b'json',
350 350 b'__conduit__': 1,
351 351 }
352 352 data = urlencodenested(rawdata)
353 353 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 354 if curlcmd:
355 355 sin, sout = procutil.popen2(
356 356 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 357 )
358 358 sin.write(data)
359 359 sin.close()
360 360 body = sout.read()
361 361 else:
362 362 urlopener = urlmod.opener(ui, authinfo)
363 363 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 364 with contextlib.closing(urlopener.open(request)) as rsp:
365 365 body = rsp.read()
366 366 ui.debug(b'Conduit Response: %s\n' % body)
367 367 parsed = pycompat.rapply(
368 368 lambda x: encoding.unitolocal(x)
369 369 if isinstance(x, pycompat.unicode)
370 370 else x,
371 371 # json.loads only accepts bytes from py3.6+
372 372 pycompat.json_loads(encoding.unifromlocal(body)),
373 373 )
374 374 if parsed.get(b'error_code'):
375 375 msg = _(b'Conduit Error (%s): %s') % (
376 376 parsed[b'error_code'],
377 377 parsed[b'error_info'],
378 378 )
379 379 raise error.Abort(msg)
380 380 return parsed[b'result']
381 381
382 382
383 383 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 384 def debugcallconduit(ui, repo, name):
385 385 """call Conduit API
386 386
387 387 Call parameters are read from stdin as a JSON blob. Result will be written
388 388 to stdout as a JSON blob.
389 389 """
390 390 # json.loads only accepts bytes from 3.6+
391 391 rawparams = encoding.unifromlocal(ui.fin.read())
392 392 # json.loads only returns unicode strings
393 393 params = pycompat.rapply(
394 394 lambda x: encoding.unitolocal(x)
395 395 if isinstance(x, pycompat.unicode)
396 396 else x,
397 397 pycompat.json_loads(rawparams),
398 398 )
399 399 # json.dumps only accepts unicode strings
400 400 result = pycompat.rapply(
401 401 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 402 callconduit(ui, name, params),
403 403 )
404 404 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 405 ui.write(b'%s\n' % encoding.unitolocal(s))
406 406
407 407
408 408 def getrepophid(repo):
409 409 """given callsign, return repository PHID or None"""
410 410 # developer config: phabricator.repophid
411 411 repophid = repo.ui.config(b'phabricator', b'repophid')
412 412 if repophid:
413 413 return repophid
414 414 callsign = repo.ui.config(b'phabricator', b'callsign')
415 415 if not callsign:
416 416 return None
417 417 query = callconduit(
418 418 repo.ui,
419 419 b'diffusion.repository.search',
420 420 {b'constraints': {b'callsigns': [callsign]}},
421 421 )
422 422 if len(query[b'data']) == 0:
423 423 return None
424 424 repophid = query[b'data'][0][b'phid']
425 425 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 426 return repophid
427 427
428 428
429 429 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 430 _differentialrevisiondescre = re.compile(
431 431 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 432 )
433 433
434 434
435 435 def getoldnodedrevmap(repo, nodelist):
436 436 """find previous nodes that has been sent to Phabricator
437 437
438 438 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 439 for node in nodelist with known previous sent versions, or associated
440 440 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 441 be ``None``.
442 442
443 443 Examines commit messages like "Differential Revision:" to get the
444 444 association information.
445 445
446 446 If such commit message line is not found, examines all precursors and their
447 447 tags. Tags with format like "D1234" are considered a match and the node
448 448 with that tag, and the number after "D" (ex. 1234) will be returned.
449 449
450 450 The ``old node``, if not None, is guaranteed to be the last diff of
451 451 corresponding Differential Revision, and exist in the repo.
452 452 """
453 453 unfi = repo.unfiltered()
454 454 has_node = unfi.changelog.index.has_node
455 455
456 456 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 457 toconfirm = {} # {node: (force, {precnode}, drev)}
458 458 for node in nodelist:
459 459 ctx = unfi[node]
460 460 # For tags like "D123", put them into "toconfirm" to verify later
461 461 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 462 for n in precnodes:
463 463 if has_node(n):
464 464 for tag in unfi.nodetags(n):
465 465 m = _differentialrevisiontagre.match(tag)
466 466 if m:
467 467 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 468 break
469 469 else:
470 470 continue # move to next predecessor
471 471 break # found a tag, stop
472 472 else:
473 473 # Check commit message
474 474 m = _differentialrevisiondescre.search(ctx.description())
475 475 if m:
476 476 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 477
478 478 # Double check if tags are genuine by collecting all old nodes from
479 479 # Phabricator, and expect precursors overlap with it.
480 480 if toconfirm:
481 481 drevs = [drev for force, precs, drev in toconfirm.values()]
482 482 alldiffs = callconduit(
483 483 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 484 )
485 485 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
486 486 for newnode, (force, precset, drev) in toconfirm.items():
487 487 diffs = [
488 488 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
489 489 ]
490 490
491 491 # "precursors" as known by Phabricator
492 492 phprecset = {getnode(d) for d in diffs}
493 493
494 494 # Ignore if precursors (Phabricator and local repo) do not overlap,
495 495 # and force is not set (when commit message says nothing)
496 496 if not force and not bool(phprecset & precset):
497 497 tagname = b'D%d' % drev
498 498 tags.tag(
499 499 repo,
500 500 tagname,
501 501 nullid,
502 502 message=None,
503 503 user=None,
504 504 date=None,
505 505 local=True,
506 506 )
507 507 unfi.ui.warn(
508 508 _(
509 509 b'D%d: local tag removed - does not match '
510 510 b'Differential history\n'
511 511 )
512 512 % drev
513 513 )
514 514 continue
515 515
516 516 # Find the last node using Phabricator metadata, and make sure it
517 517 # exists in the repo
518 518 oldnode = lastdiff = None
519 519 if diffs:
520 520 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
521 521 oldnode = getnode(lastdiff)
522 522 if oldnode and not has_node(oldnode):
523 523 oldnode = None
524 524
525 525 result[newnode] = (oldnode, lastdiff, drev)
526 526
527 527 return result
528 528
529 529
530 530 def getdrevmap(repo, revs):
531 531 """Return a dict mapping each rev in `revs` to their Differential Revision
532 532 ID or None.
533 533 """
534 534 result = {}
535 535 for rev in revs:
536 536 result[rev] = None
537 537 ctx = repo[rev]
538 538 # Check commit message
539 539 m = _differentialrevisiondescre.search(ctx.description())
540 540 if m:
541 541 result[rev] = int(m.group('id'))
542 542 continue
543 543 # Check tags
544 544 for tag in repo.nodetags(ctx.node()):
545 545 m = _differentialrevisiontagre.match(tag)
546 546 if m:
547 547 result[rev] = int(m.group(1))
548 548 break
549 549
550 550 return result
551 551
552 552
553 def getdiff(ctx, diffopts):
553 def getdiff(basectx, ctx, diffopts):
554 554 """plain-text diff without header (user, commit message, etc)"""
555 555 output = util.stringio()
556 556 for chunk, _label in patch.diffui(
557 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
557 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
558 558 ):
559 559 output.write(chunk)
560 560 return output.getvalue()
561 561
562 562
563 563 class DiffChangeType(object):
564 564 ADD = 1
565 565 CHANGE = 2
566 566 DELETE = 3
567 567 MOVE_AWAY = 4
568 568 COPY_AWAY = 5
569 569 MOVE_HERE = 6
570 570 COPY_HERE = 7
571 571 MULTICOPY = 8
572 572
573 573
574 574 class DiffFileType(object):
575 575 TEXT = 1
576 576 IMAGE = 2
577 577 BINARY = 3
578 578
579 579
580 580 @attr.s
581 581 class phabhunk(dict):
582 582 """Represents a Differential hunk, which is owned by a Differential change
583 583 """
584 584
585 585 oldOffset = attr.ib(default=0) # camelcase-required
586 586 oldLength = attr.ib(default=0) # camelcase-required
587 587 newOffset = attr.ib(default=0) # camelcase-required
588 588 newLength = attr.ib(default=0) # camelcase-required
589 589 corpus = attr.ib(default='')
590 590 # These get added to the phabchange's equivalents
591 591 addLines = attr.ib(default=0) # camelcase-required
592 592 delLines = attr.ib(default=0) # camelcase-required
593 593
594 594
595 595 @attr.s
596 596 class phabchange(object):
597 597 """Represents a Differential change, owns Differential hunks and owned by a
598 598 Differential diff. Each one represents one file in a diff.
599 599 """
600 600
601 601 currentPath = attr.ib(default=None) # camelcase-required
602 602 oldPath = attr.ib(default=None) # camelcase-required
603 603 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
604 604 metadata = attr.ib(default=attr.Factory(dict))
605 605 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
606 606 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 607 type = attr.ib(default=DiffChangeType.CHANGE)
608 608 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
609 609 commitHash = attr.ib(default=None) # camelcase-required
610 610 addLines = attr.ib(default=0) # camelcase-required
611 611 delLines = attr.ib(default=0) # camelcase-required
612 612 hunks = attr.ib(default=attr.Factory(list))
613 613
614 614 def copynewmetadatatoold(self):
615 615 for key in list(self.metadata.keys()):
616 616 newkey = key.replace(b'new:', b'old:')
617 617 self.metadata[newkey] = self.metadata[key]
618 618
619 619 def addoldmode(self, value):
620 620 self.oldProperties[b'unix:filemode'] = value
621 621
622 622 def addnewmode(self, value):
623 623 self.newProperties[b'unix:filemode'] = value
624 624
625 625 def addhunk(self, hunk):
626 626 if not isinstance(hunk, phabhunk):
627 627 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
628 628 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
629 629 # It's useful to include these stats since the Phab web UI shows them,
630 630 # and uses them to estimate how large a change a Revision is. Also used
631 631 # in email subjects for the [+++--] bit.
632 632 self.addLines += hunk.addLines
633 633 self.delLines += hunk.delLines
634 634
635 635
636 636 @attr.s
637 637 class phabdiff(object):
638 638 """Represents a Differential diff, owns Differential changes. Corresponds
639 639 to a commit.
640 640 """
641 641
642 642 # Doesn't seem to be any reason to send this (output of uname -n)
643 643 sourceMachine = attr.ib(default=b'') # camelcase-required
644 644 sourcePath = attr.ib(default=b'/') # camelcase-required
645 645 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
646 646 sourceControlPath = attr.ib(default=b'/') # camelcase-required
647 647 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
648 648 branch = attr.ib(default=b'default')
649 649 bookmark = attr.ib(default=None)
650 650 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
651 651 lintStatus = attr.ib(default=b'none') # camelcase-required
652 652 unitStatus = attr.ib(default=b'none') # camelcase-required
653 653 changes = attr.ib(default=attr.Factory(dict))
654 654 repositoryPHID = attr.ib(default=None) # camelcase-required
655 655
656 656 def addchange(self, change):
657 657 if not isinstance(change, phabchange):
658 658 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
659 659 self.changes[change.currentPath] = pycompat.byteskwargs(
660 660 attr.asdict(change)
661 661 )
662 662
663 663
664 def maketext(pchange, ctx, fname):
664 def maketext(pchange, basectx, ctx, fname):
665 665 """populate the phabchange for a text file"""
666 666 repo = ctx.repo()
667 667 fmatcher = match.exact([fname])
668 668 diffopts = mdiff.diffopts(git=True, context=32767)
669 669 _pfctx, _fctx, header, fhunks = next(
670 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
670 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
671 671 )
672 672
673 673 for fhunk in fhunks:
674 674 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
675 675 corpus = b''.join(lines[1:])
676 676 shunk = list(header)
677 677 shunk.extend(lines)
678 678 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
679 679 patch.diffstatdata(util.iterlines(shunk))
680 680 )
681 681 pchange.addhunk(
682 682 phabhunk(
683 683 oldOffset,
684 684 oldLength,
685 685 newOffset,
686 686 newLength,
687 687 corpus,
688 688 addLines,
689 689 delLines,
690 690 )
691 691 )
692 692
693 693
694 694 def uploadchunks(fctx, fphid):
695 695 """upload large binary files as separate chunks.
696 696 Phab requests chunking over 8MiB, and splits into 4MiB chunks
697 697 """
698 698 ui = fctx.repo().ui
699 699 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
700 700 with ui.makeprogress(
701 701 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
702 702 ) as progress:
703 703 for chunk in chunks:
704 704 progress.increment()
705 705 if chunk[b'complete']:
706 706 continue
707 707 bstart = int(chunk[b'byteStart'])
708 708 bend = int(chunk[b'byteEnd'])
709 709 callconduit(
710 710 ui,
711 711 b'file.uploadchunk',
712 712 {
713 713 b'filePHID': fphid,
714 714 b'byteStart': bstart,
715 715 b'data': base64.b64encode(fctx.data()[bstart:bend]),
716 716 b'dataEncoding': b'base64',
717 717 },
718 718 )
719 719
720 720
721 721 def uploadfile(fctx):
722 722 """upload binary files to Phabricator"""
723 723 repo = fctx.repo()
724 724 ui = repo.ui
725 725 fname = fctx.path()
726 726 size = fctx.size()
727 727 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
728 728
729 729 # an allocate call is required first to see if an upload is even required
730 730 # (Phab might already have it) and to determine if chunking is needed
731 731 allocateparams = {
732 732 b'name': fname,
733 733 b'contentLength': size,
734 734 b'contentHash': fhash,
735 735 }
736 736 filealloc = callconduit(ui, b'file.allocate', allocateparams)
737 737 fphid = filealloc[b'filePHID']
738 738
739 739 if filealloc[b'upload']:
740 740 ui.write(_(b'uploading %s\n') % bytes(fctx))
741 741 if not fphid:
742 742 uploadparams = {
743 743 b'name': fname,
744 744 b'data_base64': base64.b64encode(fctx.data()),
745 745 }
746 746 fphid = callconduit(ui, b'file.upload', uploadparams)
747 747 else:
748 748 uploadchunks(fctx, fphid)
749 749 else:
750 750 ui.debug(b'server already has %s\n' % bytes(fctx))
751 751
752 752 if not fphid:
753 753 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
754 754
755 755 return fphid
756 756
757 757
758 758 def addoldbinary(pchange, oldfctx, fctx):
759 759 """add the metadata for the previous version of a binary file to the
760 760 phabchange for the new version
761 761
762 762 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
763 763 version of the file, or None if the file is being removed.
764 764 """
765 765 if not fctx or fctx.cmp(oldfctx):
766 766 # Files differ, add the old one
767 767 pchange.metadata[b'old:file:size'] = oldfctx.size()
768 768 mimeguess, _enc = mimetypes.guess_type(
769 769 encoding.unifromlocal(oldfctx.path())
770 770 )
771 771 if mimeguess:
772 772 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
773 773 mimeguess
774 774 )
775 775 fphid = uploadfile(oldfctx)
776 776 pchange.metadata[b'old:binary-phid'] = fphid
777 777 else:
778 778 # If it's left as IMAGE/BINARY web UI might try to display it
779 779 pchange.fileType = DiffFileType.TEXT
780 780 pchange.copynewmetadatatoold()
781 781
782 782
783 783 def makebinary(pchange, fctx):
784 784 """populate the phabchange for a binary file"""
785 785 pchange.fileType = DiffFileType.BINARY
786 786 fphid = uploadfile(fctx)
787 787 pchange.metadata[b'new:binary-phid'] = fphid
788 788 pchange.metadata[b'new:file:size'] = fctx.size()
789 789 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
790 790 if mimeguess:
791 791 mimeguess = pycompat.bytestr(mimeguess)
792 792 pchange.metadata[b'new:file:mime-type'] = mimeguess
793 793 if mimeguess.startswith(b'image/'):
794 794 pchange.fileType = DiffFileType.IMAGE
795 795
796 796
797 797 # Copied from mercurial/patch.py
798 798 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
799 799
800 800
801 801 def notutf8(fctx):
802 802 """detect non-UTF-8 text files since Phabricator requires them to be marked
803 803 as binary
804 804 """
805 805 try:
806 806 fctx.data().decode('utf-8')
807 807 return False
808 808 except UnicodeDecodeError:
809 809 fctx.repo().ui.write(
810 810 _(b'file %s detected as non-UTF-8, marked as binary\n')
811 811 % fctx.path()
812 812 )
813 813 return True
814 814
815 815
816 def addremoved(pdiff, ctx, removed):
816 def addremoved(pdiff, basectx, ctx, removed):
817 817 """add removed files to the phabdiff. Shouldn't include moves"""
818 818 for fname in removed:
819 819 pchange = phabchange(
820 820 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
821 821 )
822 oldfctx = ctx.p1()[fname]
822 oldfctx = basectx.p1()[fname]
823 823 pchange.addoldmode(gitmode[oldfctx.flags()])
824 824 if not (oldfctx.isbinary() or notutf8(oldfctx)):
825 maketext(pchange, ctx, fname)
825 maketext(pchange, basectx, ctx, fname)
826 826
827 827 pdiff.addchange(pchange)
828 828
829 829
830 def addmodified(pdiff, ctx, modified):
830 def addmodified(pdiff, basectx, ctx, modified):
831 831 """add modified files to the phabdiff"""
832 832 for fname in modified:
833 833 fctx = ctx[fname]
834 oldfctx = ctx.p1()[fname]
834 oldfctx = basectx.p1()[fname]
835 835 pchange = phabchange(currentPath=fname, oldPath=fname)
836 836 filemode = gitmode[fctx.flags()]
837 837 originalmode = gitmode[oldfctx.flags()]
838 838 if filemode != originalmode:
839 839 pchange.addoldmode(originalmode)
840 840 pchange.addnewmode(filemode)
841 841
842 842 if (
843 843 fctx.isbinary()
844 844 or notutf8(fctx)
845 845 or oldfctx.isbinary()
846 846 or notutf8(oldfctx)
847 847 ):
848 848 makebinary(pchange, fctx)
849 849 addoldbinary(pchange, oldfctx, fctx)
850 850 else:
851 maketext(pchange, ctx, fname)
851 maketext(pchange, basectx, ctx, fname)
852 852
853 853 pdiff.addchange(pchange)
854 854
855 855
856 def addadded(pdiff, ctx, added, removed):
856 def addadded(pdiff, basectx, ctx, added, removed):
857 857 """add file adds to the phabdiff, both new files and copies/moves"""
858 858 # Keep track of files that've been recorded as moved/copied, so if there are
859 859 # additional copies we can mark them (moves get removed from removed)
860 860 copiedchanges = {}
861 861 movedchanges = {}
862 862 for fname in added:
863 863 fctx = ctx[fname]
864 864 oldfctx = None
865 865 pchange = phabchange(currentPath=fname)
866 866
867 867 filemode = gitmode[fctx.flags()]
868 868 renamed = fctx.renamed()
869 869
870 870 if renamed:
871 871 originalfname = renamed[0]
872 oldfctx = ctx.p1()[originalfname]
872 oldfctx = basectx.p1()[originalfname]
873 873 originalmode = gitmode[oldfctx.flags()]
874 874 pchange.oldPath = originalfname
875 875
876 876 if originalfname in removed:
877 877 origpchange = phabchange(
878 878 currentPath=originalfname,
879 879 oldPath=originalfname,
880 880 type=DiffChangeType.MOVE_AWAY,
881 881 awayPaths=[fname],
882 882 )
883 883 movedchanges[originalfname] = origpchange
884 884 removed.remove(originalfname)
885 885 pchange.type = DiffChangeType.MOVE_HERE
886 886 elif originalfname in movedchanges:
887 887 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
888 888 movedchanges[originalfname].awayPaths.append(fname)
889 889 pchange.type = DiffChangeType.COPY_HERE
890 890 else: # pure copy
891 891 if originalfname not in copiedchanges:
892 892 origpchange = phabchange(
893 893 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
894 894 )
895 895 copiedchanges[originalfname] = origpchange
896 896 else:
897 897 origpchange = copiedchanges[originalfname]
898 898 origpchange.awayPaths.append(fname)
899 899 pchange.type = DiffChangeType.COPY_HERE
900 900
901 901 if filemode != originalmode:
902 902 pchange.addoldmode(originalmode)
903 903 pchange.addnewmode(filemode)
904 904 else: # Brand-new file
905 905 pchange.addnewmode(gitmode[fctx.flags()])
906 906 pchange.type = DiffChangeType.ADD
907 907
908 908 if (
909 909 fctx.isbinary()
910 910 or notutf8(fctx)
911 911 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
912 912 ):
913 913 makebinary(pchange, fctx)
914 914 if renamed:
915 915 addoldbinary(pchange, oldfctx, fctx)
916 916 else:
917 maketext(pchange, ctx, fname)
917 maketext(pchange, basectx, ctx, fname)
918 918
919 919 pdiff.addchange(pchange)
920 920
921 921 for _path, copiedchange in copiedchanges.items():
922 922 pdiff.addchange(copiedchange)
923 923 for _path, movedchange in movedchanges.items():
924 924 pdiff.addchange(movedchange)
925 925
926 926
927 def creatediff(ctx):
927 def creatediff(basectx, ctx):
928 928 """create a Differential Diff"""
929 929 repo = ctx.repo()
930 930 repophid = getrepophid(repo)
931 931 # Create a "Differential Diff" via "differential.creatediff" API
932 932 pdiff = phabdiff(
933 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
933 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
934 934 branch=b'%s' % ctx.branch(),
935 935 )
936 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
936 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
937 937 # addadded will remove moved files from removed, so addremoved won't get
938 938 # them
939 addadded(pdiff, ctx, added, removed)
940 addmodified(pdiff, ctx, modified)
941 addremoved(pdiff, ctx, removed)
939 addadded(pdiff, basectx, ctx, added, removed)
940 addmodified(pdiff, basectx, ctx, modified)
941 addremoved(pdiff, basectx, ctx, removed)
942 942 if repophid:
943 943 pdiff.repositoryPHID = repophid
944 944 diff = callconduit(
945 945 repo.ui,
946 946 b'differential.creatediff',
947 947 pycompat.byteskwargs(attr.asdict(pdiff)),
948 948 )
949 949 if not diff:
950 raise error.Abort(_(b'cannot create diff for %s') % ctx)
950 if basectx != ctx:
951 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
952 else:
953 msg = _(b'cannot create diff for %s') % ctx
954 raise error.Abort(msg)
951 955 return diff
952 956
953 957
954 958 def writediffproperties(ctx, diff):
955 959 """write metadata to diff so patches could be applied losslessly"""
956 960 # creatediff returns with a diffid but query returns with an id
957 961 diffid = diff.get(b'diffid', diff.get(b'id'))
958 962 params = {
959 963 b'diff_id': diffid,
960 964 b'name': b'hg:meta',
961 965 b'data': templatefilters.json(
962 966 {
963 967 b'user': ctx.user(),
964 968 b'date': b'%d %d' % ctx.date(),
965 969 b'branch': ctx.branch(),
966 970 b'node': ctx.hex(),
967 971 b'parent': ctx.p1().hex(),
968 972 }
969 973 ),
970 974 }
971 975 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
972 976
973 977 params = {
974 978 b'diff_id': diffid,
975 979 b'name': b'local:commits',
976 980 b'data': templatefilters.json(
977 981 {
978 982 ctx.hex(): {
979 983 b'author': stringutil.person(ctx.user()),
980 984 b'authorEmail': stringutil.email(ctx.user()),
981 985 b'time': int(ctx.date()[0]),
982 986 b'commit': ctx.hex(),
983 987 b'parents': [ctx.p1().hex()],
984 988 b'branch': ctx.branch(),
985 989 },
986 990 }
987 991 ),
988 992 }
989 993 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
990 994
991 995
992 996 def createdifferentialrevision(
993 997 ctx,
994 998 revid=None,
995 999 parentrevphid=None,
996 1000 oldnode=None,
997 1001 olddiff=None,
998 1002 actions=None,
999 1003 comment=None,
1000 1004 ):
1001 1005 """create or update a Differential Revision
1002 1006
1003 1007 If revid is None, create a new Differential Revision, otherwise update
1004 1008 revid. If parentrevphid is not None, set it as a dependency.
1005 1009
1006 1010 If oldnode is not None, check if the patch content (without commit message
1007 1011 and metadata) has changed before creating another diff.
1008 1012
1009 1013 If actions is not None, they will be appended to the transaction.
1010 1014 """
1015 basectx = ctx
1011 1016 repo = ctx.repo()
1012 1017 if oldnode:
1013 1018 diffopts = mdiff.diffopts(git=True, context=32767)
1014 1019 oldctx = repo.unfiltered()[oldnode]
1015 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1020 oldbasectx = oldctx
1021 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1022 oldbasectx, oldctx, diffopts
1023 )
1016 1024 else:
1017 1025 neednewdiff = True
1018 1026
1019 1027 transactions = []
1020 1028 if neednewdiff:
1021 diff = creatediff(ctx)
1029 diff = creatediff(basectx, ctx)
1022 1030 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1023 1031 if comment:
1024 1032 transactions.append({b'type': b'comment', b'value': comment})
1025 1033 else:
1026 1034 # Even if we don't need to upload a new diff because the patch content
1027 1035 # does not change. We might still need to update its metadata so
1028 1036 # pushers could know the correct node metadata.
1029 1037 assert olddiff
1030 1038 diff = olddiff
1031 1039 writediffproperties(ctx, diff)
1032 1040
1033 1041 # Set the parent Revision every time, so commit re-ordering is picked-up
1034 1042 if parentrevphid:
1035 1043 transactions.append(
1036 1044 {b'type': b'parents.set', b'value': [parentrevphid]}
1037 1045 )
1038 1046
1039 1047 if actions:
1040 1048 transactions += actions
1041 1049
1042 1050 # Parse commit message and update related fields.
1043 1051 desc = ctx.description()
1044 1052 info = callconduit(
1045 1053 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1046 1054 )
1047 1055 for k, v in info[b'fields'].items():
1048 1056 if k in [b'title', b'summary', b'testPlan']:
1049 1057 transactions.append({b'type': k, b'value': v})
1050 1058
1051 1059 params = {b'transactions': transactions}
1052 1060 if revid is not None:
1053 1061 # Update an existing Differential Revision
1054 1062 params[b'objectIdentifier'] = revid
1055 1063
1056 1064 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1057 1065 if not revision:
1058 1066 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1059 1067
1060 1068 return revision, diff
1061 1069
1062 1070
1063 1071 def userphids(ui, names):
1064 1072 """convert user names to PHIDs"""
1065 1073 names = [name.lower() for name in names]
1066 1074 query = {b'constraints': {b'usernames': names}}
1067 1075 result = callconduit(ui, b'user.search', query)
1068 1076 # username not found is not an error of the API. So check if we have missed
1069 1077 # some names here.
1070 1078 data = result[b'data']
1071 1079 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1072 1080 unresolved = set(names) - resolved
1073 1081 if unresolved:
1074 1082 raise error.Abort(
1075 1083 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1076 1084 )
1077 1085 return [entry[b'phid'] for entry in data]
1078 1086
1079 1087
1080 1088 @vcrcommand(
1081 1089 b'phabsend',
1082 1090 [
1083 1091 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1084 1092 (b'', b'amend', True, _(b'update commit messages')),
1085 1093 (b'', b'reviewer', [], _(b'specify reviewers')),
1086 1094 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1087 1095 (
1088 1096 b'm',
1089 1097 b'comment',
1090 1098 b'',
1091 1099 _(b'add a comment to Revisions with new/updated Diffs'),
1092 1100 ),
1093 1101 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1094 1102 ],
1095 1103 _(b'REV [OPTIONS]'),
1096 1104 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1097 1105 )
1098 1106 def phabsend(ui, repo, *revs, **opts):
1099 1107 """upload changesets to Phabricator
1100 1108
1101 1109 If there are multiple revisions specified, they will be send as a stack
1102 1110 with a linear dependencies relationship using the order specified by the
1103 1111 revset.
1104 1112
1105 1113 For the first time uploading changesets, local tags will be created to
1106 1114 maintain the association. After the first time, phabsend will check
1107 1115 obsstore and tags information so it can figure out whether to update an
1108 1116 existing Differential Revision, or create a new one.
1109 1117
1110 1118 If --amend is set, update commit messages so they have the
1111 1119 ``Differential Revision`` URL, remove related tags. This is similar to what
1112 1120 arcanist will do, and is more desired in author-push workflows. Otherwise,
1113 1121 use local tags to record the ``Differential Revision`` association.
1114 1122
1115 1123 The --confirm option lets you confirm changesets before sending them. You
1116 1124 can also add following to your configuration file to make it default
1117 1125 behaviour::
1118 1126
1119 1127 [phabsend]
1120 1128 confirm = true
1121 1129
1122 1130 phabsend will check obsstore and the above association to decide whether to
1123 1131 update an existing Differential Revision, or create a new one.
1124 1132 """
1125 1133 opts = pycompat.byteskwargs(opts)
1126 1134 revs = list(revs) + opts.get(b'rev', [])
1127 1135 revs = scmutil.revrange(repo, revs)
1128 1136 revs.sort() # ascending order to preserve topological parent/child in phab
1129 1137
1130 1138 if not revs:
1131 1139 raise error.Abort(_(b'phabsend requires at least one changeset'))
1132 1140 if opts.get(b'amend'):
1133 1141 cmdutil.checkunfinished(repo)
1134 1142
1135 1143 # {newnode: (oldnode, olddiff, olddrev}
1136 1144 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1137 1145
1138 1146 confirm = ui.configbool(b'phabsend', b'confirm')
1139 1147 confirm |= bool(opts.get(b'confirm'))
1140 1148 if confirm:
1141 1149 confirmed = _confirmbeforesend(repo, revs, oldmap)
1142 1150 if not confirmed:
1143 1151 raise error.Abort(_(b'phabsend cancelled'))
1144 1152
1145 1153 actions = []
1146 1154 reviewers = opts.get(b'reviewer', [])
1147 1155 blockers = opts.get(b'blocker', [])
1148 1156 phids = []
1149 1157 if reviewers:
1150 1158 phids.extend(userphids(repo.ui, reviewers))
1151 1159 if blockers:
1152 1160 phids.extend(
1153 1161 map(
1154 1162 lambda phid: b'blocking(%s)' % phid,
1155 1163 userphids(repo.ui, blockers),
1156 1164 )
1157 1165 )
1158 1166 if phids:
1159 1167 actions.append({b'type': b'reviewers.add', b'value': phids})
1160 1168
1161 1169 drevids = [] # [int]
1162 1170 diffmap = {} # {newnode: diff}
1163 1171
1164 1172 # Send patches one by one so we know their Differential Revision PHIDs and
1165 1173 # can provide dependency relationship
1166 1174 lastrevphid = None
1167 1175 for rev in revs:
1168 1176 ui.debug(b'sending rev %d\n' % rev)
1169 1177 ctx = repo[rev]
1170 1178
1171 1179 # Get Differential Revision ID
1172 1180 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1173 1181 if oldnode != ctx.node() or opts.get(b'amend'):
1174 1182 # Create or update Differential Revision
1175 1183 revision, diff = createdifferentialrevision(
1176 1184 ctx,
1177 1185 revid,
1178 1186 lastrevphid,
1179 1187 oldnode,
1180 1188 olddiff,
1181 1189 actions,
1182 1190 opts.get(b'comment'),
1183 1191 )
1184 1192 diffmap[ctx.node()] = diff
1185 1193 newrevid = int(revision[b'object'][b'id'])
1186 1194 newrevphid = revision[b'object'][b'phid']
1187 1195 if revid:
1188 1196 action = b'updated'
1189 1197 else:
1190 1198 action = b'created'
1191 1199
1192 1200 # Create a local tag to note the association, if commit message
1193 1201 # does not have it already
1194 1202 m = _differentialrevisiondescre.search(ctx.description())
1195 1203 if not m or int(m.group('id')) != newrevid:
1196 1204 tagname = b'D%d' % newrevid
1197 1205 tags.tag(
1198 1206 repo,
1199 1207 tagname,
1200 1208 ctx.node(),
1201 1209 message=None,
1202 1210 user=None,
1203 1211 date=None,
1204 1212 local=True,
1205 1213 )
1206 1214 else:
1207 1215 # Nothing changed. But still set "newrevphid" so the next revision
1208 1216 # could depend on this one and "newrevid" for the summary line.
1209 1217 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1210 1218 newrevid = revid
1211 1219 action = b'skipped'
1212 1220
1213 1221 actiondesc = ui.label(
1214 1222 {
1215 1223 b'created': _(b'created'),
1216 1224 b'skipped': _(b'skipped'),
1217 1225 b'updated': _(b'updated'),
1218 1226 }[action],
1219 1227 b'phabricator.action.%s' % action,
1220 1228 )
1221 1229 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1222 1230 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1223 1231 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1224 1232 ui.write(
1225 1233 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1226 1234 )
1227 1235 drevids.append(newrevid)
1228 1236 lastrevphid = newrevphid
1229 1237
1230 1238 # Update commit messages and remove tags
1231 1239 if opts.get(b'amend'):
1232 1240 unfi = repo.unfiltered()
1233 1241 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1234 1242 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1235 1243 wnode = unfi[b'.'].node()
1236 1244 mapping = {} # {oldnode: [newnode]}
1237 1245 for i, rev in enumerate(revs):
1238 1246 old = unfi[rev]
1239 1247 drevid = drevids[i]
1240 1248 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1241 1249 newdesc = getdescfromdrev(drev)
1242 1250 # Make sure commit message contain "Differential Revision"
1243 1251 if old.description() != newdesc:
1244 1252 if old.phase() == phases.public:
1245 1253 ui.warn(
1246 1254 _(b"warning: not updating public commit %s\n")
1247 1255 % scmutil.formatchangeid(old)
1248 1256 )
1249 1257 continue
1250 1258 parents = [
1251 1259 mapping.get(old.p1().node(), (old.p1(),))[0],
1252 1260 mapping.get(old.p2().node(), (old.p2(),))[0],
1253 1261 ]
1254 1262 new = context.metadataonlyctx(
1255 1263 repo,
1256 1264 old,
1257 1265 parents=parents,
1258 1266 text=newdesc,
1259 1267 user=old.user(),
1260 1268 date=old.date(),
1261 1269 extra=old.extra(),
1262 1270 )
1263 1271
1264 1272 newnode = new.commit()
1265 1273
1266 1274 mapping[old.node()] = [newnode]
1267 1275 # Update diff property
1268 1276 # If it fails just warn and keep going, otherwise the DREV
1269 1277 # associations will be lost
1270 1278 try:
1271 1279 writediffproperties(unfi[newnode], diffmap[old.node()])
1272 1280 except util.urlerr.urlerror:
1273 1281 ui.warnnoi18n(
1274 1282 b'Failed to update metadata for D%d\n' % drevid
1275 1283 )
1276 1284 # Remove local tags since it's no longer necessary
1277 1285 tagname = b'D%d' % drevid
1278 1286 if tagname in repo.tags():
1279 1287 tags.tag(
1280 1288 repo,
1281 1289 tagname,
1282 1290 nullid,
1283 1291 message=None,
1284 1292 user=None,
1285 1293 date=None,
1286 1294 local=True,
1287 1295 )
1288 1296 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1289 1297 if wnode in mapping:
1290 1298 unfi.setparents(mapping[wnode][0])
1291 1299
1292 1300
1293 1301 # Map from "hg:meta" keys to header understood by "hg import". The order is
1294 1302 # consistent with "hg export" output.
1295 1303 _metanamemap = util.sortdict(
1296 1304 [
1297 1305 (b'user', b'User'),
1298 1306 (b'date', b'Date'),
1299 1307 (b'branch', b'Branch'),
1300 1308 (b'node', b'Node ID'),
1301 1309 (b'parent', b'Parent '),
1302 1310 ]
1303 1311 )
1304 1312
1305 1313
1306 1314 def _confirmbeforesend(repo, revs, oldmap):
1307 1315 url, token = readurltoken(repo.ui)
1308 1316 ui = repo.ui
1309 1317 for rev in revs:
1310 1318 ctx = repo[rev]
1311 1319 desc = ctx.description().splitlines()[0]
1312 1320 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1313 1321 if drevid:
1314 1322 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1315 1323 else:
1316 1324 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1317 1325
1318 1326 ui.write(
1319 1327 _(b'%s - %s: %s\n')
1320 1328 % (
1321 1329 drevdesc,
1322 1330 ui.label(bytes(ctx), b'phabricator.node'),
1323 1331 ui.label(desc, b'phabricator.desc'),
1324 1332 )
1325 1333 )
1326 1334
1327 1335 if ui.promptchoice(
1328 1336 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1329 1337 ):
1330 1338 return False
1331 1339
1332 1340 return True
1333 1341
1334 1342
1335 1343 _knownstatusnames = {
1336 1344 b'accepted',
1337 1345 b'needsreview',
1338 1346 b'needsrevision',
1339 1347 b'closed',
1340 1348 b'abandoned',
1341 1349 b'changesplanned',
1342 1350 }
1343 1351
1344 1352
1345 1353 def _getstatusname(drev):
1346 1354 """get normalized status name from a Differential Revision"""
1347 1355 return drev[b'statusName'].replace(b' ', b'').lower()
1348 1356
1349 1357
1350 1358 # Small language to specify differential revisions. Support symbols: (), :X,
1351 1359 # +, and -.
1352 1360
1353 1361 _elements = {
1354 1362 # token-type: binding-strength, primary, prefix, infix, suffix
1355 1363 b'(': (12, None, (b'group', 1, b')'), None, None),
1356 1364 b':': (8, None, (b'ancestors', 8), None, None),
1357 1365 b'&': (5, None, None, (b'and_', 5), None),
1358 1366 b'+': (4, None, None, (b'add', 4), None),
1359 1367 b'-': (4, None, None, (b'sub', 4), None),
1360 1368 b')': (0, None, None, None, None),
1361 1369 b'symbol': (0, b'symbol', None, None, None),
1362 1370 b'end': (0, None, None, None, None),
1363 1371 }
1364 1372
1365 1373
1366 1374 def _tokenize(text):
1367 1375 view = memoryview(text) # zero-copy slice
1368 1376 special = b'():+-& '
1369 1377 pos = 0
1370 1378 length = len(text)
1371 1379 while pos < length:
1372 1380 symbol = b''.join(
1373 1381 itertools.takewhile(
1374 1382 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1375 1383 )
1376 1384 )
1377 1385 if symbol:
1378 1386 yield (b'symbol', symbol, pos)
1379 1387 pos += len(symbol)
1380 1388 else: # special char, ignore space
1381 1389 if text[pos : pos + 1] != b' ':
1382 1390 yield (text[pos : pos + 1], None, pos)
1383 1391 pos += 1
1384 1392 yield (b'end', None, pos)
1385 1393
1386 1394
1387 1395 def _parse(text):
1388 1396 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1389 1397 if pos != len(text):
1390 1398 raise error.ParseError(b'invalid token', pos)
1391 1399 return tree
1392 1400
1393 1401
1394 1402 def _parsedrev(symbol):
1395 1403 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1396 1404 if symbol.startswith(b'D') and symbol[1:].isdigit():
1397 1405 return int(symbol[1:])
1398 1406 if symbol.isdigit():
1399 1407 return int(symbol)
1400 1408
1401 1409
1402 1410 def _prefetchdrevs(tree):
1403 1411 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1404 1412 drevs = set()
1405 1413 ancestordrevs = set()
1406 1414 op = tree[0]
1407 1415 if op == b'symbol':
1408 1416 r = _parsedrev(tree[1])
1409 1417 if r:
1410 1418 drevs.add(r)
1411 1419 elif op == b'ancestors':
1412 1420 r, a = _prefetchdrevs(tree[1])
1413 1421 drevs.update(r)
1414 1422 ancestordrevs.update(r)
1415 1423 ancestordrevs.update(a)
1416 1424 else:
1417 1425 for t in tree[1:]:
1418 1426 r, a = _prefetchdrevs(t)
1419 1427 drevs.update(r)
1420 1428 ancestordrevs.update(a)
1421 1429 return drevs, ancestordrevs
1422 1430
1423 1431
1424 1432 def querydrev(ui, spec):
1425 1433 """return a list of "Differential Revision" dicts
1426 1434
1427 1435 spec is a string using a simple query language, see docstring in phabread
1428 1436 for details.
1429 1437
1430 1438 A "Differential Revision dict" looks like:
1431 1439
1432 1440 {
1433 1441 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1434 1442 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1435 1443 "auxiliary": {
1436 1444 "phabricator:depends-on": [
1437 1445 "PHID-DREV-gbapp366kutjebt7agcd"
1438 1446 ]
1439 1447 "phabricator:projects": [],
1440 1448 },
1441 1449 "branch": "default",
1442 1450 "ccs": [],
1443 1451 "commits": [],
1444 1452 "dateCreated": "1499181406",
1445 1453 "dateModified": "1499182103",
1446 1454 "diffs": [
1447 1455 "3",
1448 1456 "4",
1449 1457 ],
1450 1458 "hashes": [],
1451 1459 "id": "2",
1452 1460 "lineCount": "2",
1453 1461 "phid": "PHID-DREV-672qvysjcczopag46qty",
1454 1462 "properties": {},
1455 1463 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1456 1464 "reviewers": [],
1457 1465 "sourcePath": null
1458 1466 "status": "0",
1459 1467 "statusName": "Needs Review",
1460 1468 "summary": "",
1461 1469 "testPlan": "",
1462 1470 "title": "example",
1463 1471 "uri": "https://phab.example.com/D2",
1464 1472 }
1465 1473 """
1466 1474 # TODO: replace differential.query and differential.querydiffs with
1467 1475 # differential.diff.search because the former (and their output) are
1468 1476 # frozen, and planned to be deprecated and removed.
1469 1477
1470 1478 def fetch(params):
1471 1479 """params -> single drev or None"""
1472 1480 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1473 1481 if key in prefetched:
1474 1482 return prefetched[key]
1475 1483 drevs = callconduit(ui, b'differential.query', params)
1476 1484 # Fill prefetched with the result
1477 1485 for drev in drevs:
1478 1486 prefetched[drev[b'phid']] = drev
1479 1487 prefetched[int(drev[b'id'])] = drev
1480 1488 if key not in prefetched:
1481 1489 raise error.Abort(
1482 1490 _(b'cannot get Differential Revision %r') % params
1483 1491 )
1484 1492 return prefetched[key]
1485 1493
1486 1494 def getstack(topdrevids):
1487 1495 """given a top, get a stack from the bottom, [id] -> [id]"""
1488 1496 visited = set()
1489 1497 result = []
1490 1498 queue = [{b'ids': [i]} for i in topdrevids]
1491 1499 while queue:
1492 1500 params = queue.pop()
1493 1501 drev = fetch(params)
1494 1502 if drev[b'id'] in visited:
1495 1503 continue
1496 1504 visited.add(drev[b'id'])
1497 1505 result.append(int(drev[b'id']))
1498 1506 auxiliary = drev.get(b'auxiliary', {})
1499 1507 depends = auxiliary.get(b'phabricator:depends-on', [])
1500 1508 for phid in depends:
1501 1509 queue.append({b'phids': [phid]})
1502 1510 result.reverse()
1503 1511 return smartset.baseset(result)
1504 1512
1505 1513 # Initialize prefetch cache
1506 1514 prefetched = {} # {id or phid: drev}
1507 1515
1508 1516 tree = _parse(spec)
1509 1517 drevs, ancestordrevs = _prefetchdrevs(tree)
1510 1518
1511 1519 # developer config: phabricator.batchsize
1512 1520 batchsize = ui.configint(b'phabricator', b'batchsize')
1513 1521
1514 1522 # Prefetch Differential Revisions in batch
1515 1523 tofetch = set(drevs)
1516 1524 for r in ancestordrevs:
1517 1525 tofetch.update(range(max(1, r - batchsize), r + 1))
1518 1526 if drevs:
1519 1527 fetch({b'ids': list(tofetch)})
1520 1528 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1521 1529
1522 1530 # Walk through the tree, return smartsets
1523 1531 def walk(tree):
1524 1532 op = tree[0]
1525 1533 if op == b'symbol':
1526 1534 drev = _parsedrev(tree[1])
1527 1535 if drev:
1528 1536 return smartset.baseset([drev])
1529 1537 elif tree[1] in _knownstatusnames:
1530 1538 drevs = [
1531 1539 r
1532 1540 for r in validids
1533 1541 if _getstatusname(prefetched[r]) == tree[1]
1534 1542 ]
1535 1543 return smartset.baseset(drevs)
1536 1544 else:
1537 1545 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1538 1546 elif op in {b'and_', b'add', b'sub'}:
1539 1547 assert len(tree) == 3
1540 1548 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1541 1549 elif op == b'group':
1542 1550 return walk(tree[1])
1543 1551 elif op == b'ancestors':
1544 1552 return getstack(walk(tree[1]))
1545 1553 else:
1546 1554 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1547 1555
1548 1556 return [prefetched[r] for r in walk(tree)]
1549 1557
1550 1558
1551 1559 def getdescfromdrev(drev):
1552 1560 """get description (commit message) from "Differential Revision"
1553 1561
1554 1562 This is similar to differential.getcommitmessage API. But we only care
1555 1563 about limited fields: title, summary, test plan, and URL.
1556 1564 """
1557 1565 title = drev[b'title']
1558 1566 summary = drev[b'summary'].rstrip()
1559 1567 testplan = drev[b'testPlan'].rstrip()
1560 1568 if testplan:
1561 1569 testplan = b'Test Plan:\n%s' % testplan
1562 1570 uri = b'Differential Revision: %s' % drev[b'uri']
1563 1571 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1564 1572
1565 1573
1566 1574 def getdiffmeta(diff):
1567 1575 """get commit metadata (date, node, user, p1) from a diff object
1568 1576
1569 1577 The metadata could be "hg:meta", sent by phabsend, like:
1570 1578
1571 1579 "properties": {
1572 1580 "hg:meta": {
1573 1581 "branch": "default",
1574 1582 "date": "1499571514 25200",
1575 1583 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1576 1584 "user": "Foo Bar <foo@example.com>",
1577 1585 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1578 1586 }
1579 1587 }
1580 1588
1581 1589 Or converted from "local:commits", sent by "arc", like:
1582 1590
1583 1591 "properties": {
1584 1592 "local:commits": {
1585 1593 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1586 1594 "author": "Foo Bar",
1587 1595 "authorEmail": "foo@example.com"
1588 1596 "branch": "default",
1589 1597 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1590 1598 "local": "1000",
1591 1599 "message": "...",
1592 1600 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1593 1601 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1594 1602 "summary": "...",
1595 1603 "tag": "",
1596 1604 "time": 1499546314,
1597 1605 }
1598 1606 }
1599 1607 }
1600 1608
1601 1609 Note: metadata extracted from "local:commits" will lose time zone
1602 1610 information.
1603 1611 """
1604 1612 props = diff.get(b'properties') or {}
1605 1613 meta = props.get(b'hg:meta')
1606 1614 if not meta:
1607 1615 if props.get(b'local:commits'):
1608 1616 commit = sorted(props[b'local:commits'].values())[0]
1609 1617 meta = {}
1610 1618 if b'author' in commit and b'authorEmail' in commit:
1611 1619 meta[b'user'] = b'%s <%s>' % (
1612 1620 commit[b'author'],
1613 1621 commit[b'authorEmail'],
1614 1622 )
1615 1623 if b'time' in commit:
1616 1624 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1617 1625 if b'branch' in commit:
1618 1626 meta[b'branch'] = commit[b'branch']
1619 1627 node = commit.get(b'commit', commit.get(b'rev'))
1620 1628 if node:
1621 1629 meta[b'node'] = node
1622 1630 if len(commit.get(b'parents', ())) >= 1:
1623 1631 meta[b'parent'] = commit[b'parents'][0]
1624 1632 else:
1625 1633 meta = {}
1626 1634 if b'date' not in meta and b'dateCreated' in diff:
1627 1635 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1628 1636 if b'branch' not in meta and diff.get(b'branch'):
1629 1637 meta[b'branch'] = diff[b'branch']
1630 1638 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1631 1639 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1632 1640 return meta
1633 1641
1634 1642
1635 1643 def _getdrevs(ui, stack, specs):
1636 1644 """convert user supplied DREVSPECs into "Differential Revision" dicts
1637 1645
1638 1646 See ``hg help phabread`` for how to specify each DREVSPEC.
1639 1647 """
1640 1648 if len(specs) > 0:
1641 1649
1642 1650 def _formatspec(s):
1643 1651 if stack:
1644 1652 s = b':(%s)' % s
1645 1653 return b'(%s)' % s
1646 1654
1647 1655 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1648 1656
1649 1657 drevs = querydrev(ui, spec)
1650 1658 if drevs:
1651 1659 return drevs
1652 1660
1653 1661 raise error.Abort(_(b"empty DREVSPEC set"))
1654 1662
1655 1663
1656 1664 def readpatch(ui, drevs, write):
1657 1665 """generate plain-text patch readable by 'hg import'
1658 1666
1659 1667 write takes a list of (DREV, bytes), where DREV is the differential number
1660 1668 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1661 1669 to be imported. drevs is what "querydrev" returns, results of
1662 1670 "differential.query".
1663 1671 """
1664 1672 # Prefetch hg:meta property for all diffs
1665 1673 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1666 1674 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1667 1675
1668 1676 patches = []
1669 1677
1670 1678 # Generate patch for each drev
1671 1679 for drev in drevs:
1672 1680 ui.note(_(b'reading D%s\n') % drev[b'id'])
1673 1681
1674 1682 diffid = max(int(v) for v in drev[b'diffs'])
1675 1683 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1676 1684 desc = getdescfromdrev(drev)
1677 1685 header = b'# HG changeset patch\n'
1678 1686
1679 1687 # Try to preserve metadata from hg:meta property. Write hg patch
1680 1688 # headers that can be read by the "import" command. See patchheadermap
1681 1689 # and extract in mercurial/patch.py for supported headers.
1682 1690 meta = getdiffmeta(diffs[b'%d' % diffid])
1683 1691 for k in _metanamemap.keys():
1684 1692 if k in meta:
1685 1693 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1686 1694
1687 1695 content = b'%s%s\n%s' % (header, desc, body)
1688 1696 patches.append((drev[b'id'], content))
1689 1697
1690 1698 # Write patches to the supplied callback
1691 1699 write(patches)
1692 1700
1693 1701
1694 1702 @vcrcommand(
1695 1703 b'phabread',
1696 1704 [(b'', b'stack', False, _(b'read dependencies'))],
1697 1705 _(b'DREVSPEC... [OPTIONS]'),
1698 1706 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1699 1707 optionalrepo=True,
1700 1708 )
1701 1709 def phabread(ui, repo, *specs, **opts):
1702 1710 """print patches from Phabricator suitable for importing
1703 1711
1704 1712 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1705 1713 the number ``123``. It could also have common operators like ``+``, ``-``,
1706 1714 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1707 1715 select a stack. If multiple DREVSPEC values are given, the result is the
1708 1716 union of each individually evaluated value. No attempt is currently made
1709 1717 to reorder the values to run from parent to child.
1710 1718
1711 1719 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1712 1720 could be used to filter patches by status. For performance reason, they
1713 1721 only represent a subset of non-status selections and cannot be used alone.
1714 1722
1715 1723 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1716 1724 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1717 1725 stack up to D9.
1718 1726
1719 1727 If --stack is given, follow dependencies information and read all patches.
1720 1728 It is equivalent to the ``:`` operator.
1721 1729 """
1722 1730 opts = pycompat.byteskwargs(opts)
1723 1731 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1724 1732
1725 1733 def _write(patches):
1726 1734 for drev, content in patches:
1727 1735 ui.write(content)
1728 1736
1729 1737 readpatch(ui, drevs, _write)
1730 1738
1731 1739
1732 1740 @vcrcommand(
1733 1741 b'phabimport',
1734 1742 [(b'', b'stack', False, _(b'import dependencies as well'))],
1735 1743 _(b'DREVSPEC... [OPTIONS]'),
1736 1744 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1737 1745 )
1738 1746 def phabimport(ui, repo, *specs, **opts):
1739 1747 """import patches from Phabricator for the specified Differential Revisions
1740 1748
1741 1749 The patches are read and applied starting at the parent of the working
1742 1750 directory.
1743 1751
1744 1752 See ``hg help phabread`` for how to specify DREVSPEC.
1745 1753 """
1746 1754 opts = pycompat.byteskwargs(opts)
1747 1755
1748 1756 # --bypass avoids losing exec and symlink bits when importing on Windows,
1749 1757 # and allows importing with a dirty wdir. It also aborts instead of leaving
1750 1758 # rejects.
1751 1759 opts[b'bypass'] = True
1752 1760
1753 1761 # Mandatory default values, synced with commands.import
1754 1762 opts[b'strip'] = 1
1755 1763 opts[b'prefix'] = b''
1756 1764 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1757 1765 opts[b'obsolete'] = False
1758 1766
1759 1767 if ui.configbool(b'phabimport', b'secret'):
1760 1768 opts[b'secret'] = True
1761 1769 if ui.configbool(b'phabimport', b'obsolete'):
1762 1770 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1763 1771
1764 1772 def _write(patches):
1765 1773 parents = repo[None].parents()
1766 1774
1767 1775 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1768 1776 for drev, contents in patches:
1769 1777 ui.status(_(b'applying patch from D%s\n') % drev)
1770 1778
1771 1779 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1772 1780 msg, node, rej = cmdutil.tryimportone(
1773 1781 ui,
1774 1782 repo,
1775 1783 patchdata,
1776 1784 parents,
1777 1785 opts,
1778 1786 [],
1779 1787 None, # Never update wdir to another revision
1780 1788 )
1781 1789
1782 1790 if not node:
1783 1791 raise error.Abort(_(b'D%s: no diffs found') % drev)
1784 1792
1785 1793 ui.note(msg + b'\n')
1786 1794 parents = [repo[node]]
1787 1795
1788 1796 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1789 1797
1790 1798 readpatch(repo.ui, drevs, _write)
1791 1799
1792 1800
1793 1801 @vcrcommand(
1794 1802 b'phabupdate',
1795 1803 [
1796 1804 (b'', b'accept', False, _(b'accept revisions')),
1797 1805 (b'', b'reject', False, _(b'reject revisions')),
1798 1806 (b'', b'abandon', False, _(b'abandon revisions')),
1799 1807 (b'', b'reclaim', False, _(b'reclaim revisions')),
1800 1808 (b'm', b'comment', b'', _(b'comment on the last revision')),
1801 1809 ],
1802 1810 _(b'DREVSPEC... [OPTIONS]'),
1803 1811 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1804 1812 optionalrepo=True,
1805 1813 )
1806 1814 def phabupdate(ui, repo, *specs, **opts):
1807 1815 """update Differential Revision in batch
1808 1816
1809 1817 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1810 1818 """
1811 1819 opts = pycompat.byteskwargs(opts)
1812 1820 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1813 1821 if len(flags) > 1:
1814 1822 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1815 1823
1816 1824 actions = []
1817 1825 for f in flags:
1818 1826 actions.append({b'type': f, b'value': True})
1819 1827
1820 1828 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1821 1829 for i, drev in enumerate(drevs):
1822 1830 if i + 1 == len(drevs) and opts.get(b'comment'):
1823 1831 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1824 1832 if actions:
1825 1833 params = {
1826 1834 b'objectIdentifier': drev[b'phid'],
1827 1835 b'transactions': actions,
1828 1836 }
1829 1837 callconduit(ui, b'differential.revision.edit', params)
1830 1838
1831 1839
1832 1840 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1833 1841 def template_review(context, mapping):
1834 1842 """:phabreview: Object describing the review for this changeset.
1835 1843 Has attributes `url` and `id`.
1836 1844 """
1837 1845 ctx = context.resource(mapping, b'ctx')
1838 1846 m = _differentialrevisiondescre.search(ctx.description())
1839 1847 if m:
1840 1848 return templateutil.hybriddict(
1841 1849 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1842 1850 )
1843 1851 else:
1844 1852 tags = ctx.repo().nodetags(ctx.node())
1845 1853 for t in tags:
1846 1854 if _differentialrevisiontagre.match(t):
1847 1855 url = ctx.repo().ui.config(b'phabricator', b'url')
1848 1856 if not url.endswith(b'/'):
1849 1857 url += b'/'
1850 1858 url += t
1851 1859
1852 1860 return templateutil.hybriddict({b'url': url, b'id': t,})
1853 1861 return None
1854 1862
1855 1863
1856 1864 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1857 1865 def template_status(context, mapping):
1858 1866 """:phabstatus: String. Status of Phabricator differential.
1859 1867 """
1860 1868 ctx = context.resource(mapping, b'ctx')
1861 1869 repo = context.resource(mapping, b'repo')
1862 1870 ui = context.resource(mapping, b'ui')
1863 1871
1864 1872 rev = ctx.rev()
1865 1873 try:
1866 1874 drevid = getdrevmap(repo, [rev])[rev]
1867 1875 except KeyError:
1868 1876 return None
1869 1877 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1870 1878 for drev in drevs:
1871 1879 if int(drev[b'id']) == drevid:
1872 1880 return templateutil.hybriddict(
1873 1881 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1874 1882 )
1875 1883 return None
1876 1884
1877 1885
1878 1886 @show.showview(b'phabstatus', csettopic=b'work')
1879 1887 def phabstatusshowview(ui, repo, displayer):
1880 1888 """Phabricator differiential status"""
1881 1889 revs = repo.revs('sort(_underway(), topo)')
1882 1890 drevmap = getdrevmap(repo, revs)
1883 1891 unknownrevs, drevids, revsbydrevid = [], set(), {}
1884 1892 for rev, drevid in pycompat.iteritems(drevmap):
1885 1893 if drevid is not None:
1886 1894 drevids.add(drevid)
1887 1895 revsbydrevid.setdefault(drevid, set()).add(rev)
1888 1896 else:
1889 1897 unknownrevs.append(rev)
1890 1898
1891 1899 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1892 1900 drevsbyrev = {}
1893 1901 for drev in drevs:
1894 1902 for rev in revsbydrevid[int(drev[b'id'])]:
1895 1903 drevsbyrev[rev] = drev
1896 1904
1897 1905 def phabstatus(ctx):
1898 1906 drev = drevsbyrev[ctx.rev()]
1899 1907 status = ui.label(
1900 1908 b'%(statusName)s' % drev,
1901 1909 b'phabricator.status.%s' % _getstatusname(drev),
1902 1910 )
1903 1911 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1904 1912
1905 1913 revs -= smartset.baseset(unknownrevs)
1906 1914 revdag = graphmod.dagwalker(repo, revs)
1907 1915
1908 1916 ui.setconfig(b'experimental', b'graphshorten', True)
1909 1917 displayer._exthook = phabstatus
1910 1918 nodelen = show.longestshortest(repo, revs)
1911 1919 logcmdutil.displaygraph(
1912 1920 ui,
1913 1921 repo,
1914 1922 revdag,
1915 1923 displayer,
1916 1924 graphmod.asciiedges,
1917 1925 props={b'nodelen': nodelen},
1918 1926 )
General Comments 0
You need to be logged in to leave comments. Login now