##// END OF EJS Templates
phabricator: record all local commits used to create a Differential revision...
Matt Harbison -
r45133:0437959d default
parent child Browse files
Show More
@@ -1,1939 +1,1948 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 encoding,
66 66 error,
67 67 exthelper,
68 68 graphmod,
69 69 httpconnection as httpconnectionmod,
70 70 localrepo,
71 71 logcmdutil,
72 72 match,
73 73 mdiff,
74 74 obsutil,
75 75 parser,
76 76 patch,
77 77 phases,
78 78 pycompat,
79 79 scmutil,
80 80 smartset,
81 81 tags,
82 82 templatefilters,
83 83 templateutil,
84 84 url as urlmod,
85 85 util,
86 86 )
87 87 from mercurial.utils import (
88 88 procutil,
89 89 stringutil,
90 90 )
91 91 from . import show
92 92
93 93
94 94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 96 # be specifying the version(s) of Mercurial they are tested with, or
97 97 # leave the attribute unspecified.
98 98 testedwith = b'ships-with-hg-core'
99 99
100 100 eh = exthelper.exthelper()
101 101
102 102 cmdtable = eh.cmdtable
103 103 command = eh.command
104 104 configtable = eh.configtable
105 105 templatekeyword = eh.templatekeyword
106 106 uisetup = eh.finaluisetup
107 107
108 108 # developer config: phabricator.batchsize
109 109 eh.configitem(
110 110 b'phabricator', b'batchsize', default=12,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'callsign', default=None,
114 114 )
115 115 eh.configitem(
116 116 b'phabricator', b'curlcmd', default=None,
117 117 )
118 118 # developer config: phabricator.repophid
119 119 eh.configitem(
120 120 b'phabricator', b'repophid', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabricator', b'url', default=None,
124 124 )
125 125 eh.configitem(
126 126 b'phabsend', b'confirm', default=False,
127 127 )
128 128 eh.configitem(
129 129 b'phabimport', b'secret', default=False,
130 130 )
131 131 eh.configitem(
132 132 b'phabimport', b'obsolete', default=False,
133 133 )
134 134
135 135 colortable = {
136 136 b'phabricator.action.created': b'green',
137 137 b'phabricator.action.skipped': b'magenta',
138 138 b'phabricator.action.updated': b'magenta',
139 139 b'phabricator.desc': b'',
140 140 b'phabricator.drev': b'bold',
141 141 b'phabricator.node': b'',
142 142 b'phabricator.status.abandoned': b'magenta dim',
143 143 b'phabricator.status.accepted': b'green bold',
144 144 b'phabricator.status.closed': b'green',
145 145 b'phabricator.status.needsreview': b'yellow',
146 146 b'phabricator.status.needsrevision': b'red',
147 147 b'phabricator.status.changesplanned': b'red',
148 148 }
149 149
150 150 _VCR_FLAGS = [
151 151 (
152 152 b'',
153 153 b'test-vcr',
154 154 b'',
155 155 _(
156 156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 157 b', otherwise will mock all http requests using the specified vcr file.'
158 158 b' (ADVANCED)'
159 159 ),
160 160 ),
161 161 ]
162 162
163 163
164 164 @eh.wrapfunction(localrepo, "loadhgrc")
165 165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 167 """
168 168 result = False
169 169 arcconfig = {}
170 170
171 171 try:
172 172 # json.loads only accepts bytes from 3.6+
173 173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 174 # json.loads only returns unicode strings
175 175 arcconfig = pycompat.rapply(
176 176 lambda x: encoding.unitolocal(x)
177 177 if isinstance(x, pycompat.unicode)
178 178 else x,
179 179 pycompat.json_loads(rawparams),
180 180 )
181 181
182 182 result = True
183 183 except ValueError:
184 184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 185 except IOError:
186 186 pass
187 187
188 188 cfg = util.sortdict()
189 189
190 190 if b"repository.callsign" in arcconfig:
191 191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192 192
193 193 if b"phabricator.uri" in arcconfig:
194 194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195 195
196 196 if cfg:
197 197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198 198
199 199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200 200
201 201
202 202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 203 fullflags = flags + _VCR_FLAGS
204 204
205 205 def hgmatcher(r1, r2):
206 206 if r1.uri != r2.uri or r1.method != r2.method:
207 207 return False
208 208 r1params = util.urlreq.parseqs(r1.body)
209 209 r2params = util.urlreq.parseqs(r2.body)
210 210 for key in r1params:
211 211 if key not in r2params:
212 212 return False
213 213 value = r1params[key][0]
214 214 # we want to compare json payloads without worrying about ordering
215 215 if value.startswith(b'{') and value.endswith(b'}'):
216 216 r1json = pycompat.json_loads(value)
217 217 r2json = pycompat.json_loads(r2params[key][0])
218 218 if r1json != r2json:
219 219 return False
220 220 elif r2params[key][0] != value:
221 221 return False
222 222 return True
223 223
224 224 def sanitiserequest(request):
225 225 request.body = re.sub(
226 226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 227 )
228 228 return request
229 229
230 230 def sanitiseresponse(response):
231 231 if 'set-cookie' in response['headers']:
232 232 del response['headers']['set-cookie']
233 233 return response
234 234
235 235 def decorate(fn):
236 236 def inner(*args, **kwargs):
237 237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 238 if cassette:
239 239 import hgdemandimport
240 240
241 241 with hgdemandimport.deactivated():
242 242 import vcr as vcrmod
243 243 import vcr.stubs as stubs
244 244
245 245 vcr = vcrmod.VCR(
246 246 serializer='json',
247 247 before_record_request=sanitiserequest,
248 248 before_record_response=sanitiseresponse,
249 249 custom_patches=[
250 250 (
251 251 urlmod,
252 252 'httpconnection',
253 253 stubs.VCRHTTPConnection,
254 254 ),
255 255 (
256 256 urlmod,
257 257 'httpsconnection',
258 258 stubs.VCRHTTPSConnection,
259 259 ),
260 260 ],
261 261 )
262 262 vcr.register_matcher('hgmatcher', hgmatcher)
263 263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 264 return fn(*args, **kwargs)
265 265 return fn(*args, **kwargs)
266 266
267 267 cmd = util.checksignature(inner, depth=2)
268 268 cmd.__name__ = fn.__name__
269 269 cmd.__doc__ = fn.__doc__
270 270
271 271 return command(
272 272 name,
273 273 fullflags,
274 274 spec,
275 275 helpcategory=helpcategory,
276 276 optionalrepo=optionalrepo,
277 277 )(cmd)
278 278
279 279 return decorate
280 280
281 281
282 282 def urlencodenested(params):
283 283 """like urlencode, but works with nested parameters.
284 284
285 285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 288 """
289 289 flatparams = util.sortdict()
290 290
291 291 def process(prefix, obj):
292 292 if isinstance(obj, bool):
293 293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 296 if items is None:
297 297 flatparams[prefix] = obj
298 298 else:
299 299 for k, v in items(obj):
300 300 if prefix:
301 301 process(b'%s[%s]' % (prefix, k), v)
302 302 else:
303 303 process(k, v)
304 304
305 305 process(b'', params)
306 306 return util.urlreq.urlencode(flatparams)
307 307
308 308
309 309 def readurltoken(ui):
310 310 """return conduit url, token and make sure they exist
311 311
312 312 Currently read from [auth] config section. In the future, it might
313 313 make sense to read from .arcconfig and .arcrc as well.
314 314 """
315 315 url = ui.config(b'phabricator', b'url')
316 316 if not url:
317 317 raise error.Abort(
318 318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 319 )
320 320
321 321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 322 token = None
323 323
324 324 if res:
325 325 group, auth = res
326 326
327 327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328 328
329 329 token = auth.get(b'phabtoken')
330 330
331 331 if not token:
332 332 raise error.Abort(
333 333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 334 )
335 335
336 336 return url, token
337 337
338 338
339 339 def callconduit(ui, name, params):
340 340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 341 host, token = readurltoken(ui)
342 342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 344 params = params.copy()
345 345 params[b'__conduit__'] = {
346 346 b'token': token,
347 347 }
348 348 rawdata = {
349 349 b'params': templatefilters.json(params),
350 350 b'output': b'json',
351 351 b'__conduit__': 1,
352 352 }
353 353 data = urlencodenested(rawdata)
354 354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 355 if curlcmd:
356 356 sin, sout = procutil.popen2(
357 357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 358 )
359 359 sin.write(data)
360 360 sin.close()
361 361 body = sout.read()
362 362 else:
363 363 urlopener = urlmod.opener(ui, authinfo)
364 364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 365 with contextlib.closing(urlopener.open(request)) as rsp:
366 366 body = rsp.read()
367 367 ui.debug(b'Conduit Response: %s\n' % body)
368 368 parsed = pycompat.rapply(
369 369 lambda x: encoding.unitolocal(x)
370 370 if isinstance(x, pycompat.unicode)
371 371 else x,
372 372 # json.loads only accepts bytes from py3.6+
373 373 pycompat.json_loads(encoding.unifromlocal(body)),
374 374 )
375 375 if parsed.get(b'error_code'):
376 376 msg = _(b'Conduit Error (%s): %s') % (
377 377 parsed[b'error_code'],
378 378 parsed[b'error_info'],
379 379 )
380 380 raise error.Abort(msg)
381 381 return parsed[b'result']
382 382
383 383
384 384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 385 def debugcallconduit(ui, repo, name):
386 386 """call Conduit API
387 387
388 388 Call parameters are read from stdin as a JSON blob. Result will be written
389 389 to stdout as a JSON blob.
390 390 """
391 391 # json.loads only accepts bytes from 3.6+
392 392 rawparams = encoding.unifromlocal(ui.fin.read())
393 393 # json.loads only returns unicode strings
394 394 params = pycompat.rapply(
395 395 lambda x: encoding.unitolocal(x)
396 396 if isinstance(x, pycompat.unicode)
397 397 else x,
398 398 pycompat.json_loads(rawparams),
399 399 )
400 400 # json.dumps only accepts unicode strings
401 401 result = pycompat.rapply(
402 402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 403 callconduit(ui, name, params),
404 404 )
405 405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 406 ui.write(b'%s\n' % encoding.unitolocal(s))
407 407
408 408
409 409 def getrepophid(repo):
410 410 """given callsign, return repository PHID or None"""
411 411 # developer config: phabricator.repophid
412 412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 413 if repophid:
414 414 return repophid
415 415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 416 if not callsign:
417 417 return None
418 418 query = callconduit(
419 419 repo.ui,
420 420 b'diffusion.repository.search',
421 421 {b'constraints': {b'callsigns': [callsign]}},
422 422 )
423 423 if len(query[b'data']) == 0:
424 424 return None
425 425 repophid = query[b'data'][0][b'phid']
426 426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 427 return repophid
428 428
429 429
430 430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 431 _differentialrevisiondescre = re.compile(
432 432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 433 )
434 434
435 435
436 436 def getoldnodedrevmap(repo, nodelist):
437 437 """find previous nodes that has been sent to Phabricator
438 438
439 439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 440 for node in nodelist with known previous sent versions, or associated
441 441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 442 be ``None``.
443 443
444 444 Examines commit messages like "Differential Revision:" to get the
445 445 association information.
446 446
447 447 If such commit message line is not found, examines all precursors and their
448 448 tags. Tags with format like "D1234" are considered a match and the node
449 449 with that tag, and the number after "D" (ex. 1234) will be returned.
450 450
451 451 The ``old node``, if not None, is guaranteed to be the last diff of
452 452 corresponding Differential Revision, and exist in the repo.
453 453 """
454 454 unfi = repo.unfiltered()
455 455 has_node = unfi.changelog.index.has_node
456 456
457 457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 459 for node in nodelist:
460 460 ctx = unfi[node]
461 461 # For tags like "D123", put them into "toconfirm" to verify later
462 462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 463 for n in precnodes:
464 464 if has_node(n):
465 465 for tag in unfi.nodetags(n):
466 466 m = _differentialrevisiontagre.match(tag)
467 467 if m:
468 468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 469 break
470 470 else:
471 471 continue # move to next predecessor
472 472 break # found a tag, stop
473 473 else:
474 474 # Check commit message
475 475 m = _differentialrevisiondescre.search(ctx.description())
476 476 if m:
477 477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478 478
479 479 # Double check if tags are genuine by collecting all old nodes from
480 480 # Phabricator, and expect precursors overlap with it.
481 481 if toconfirm:
482 482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 483 alldiffs = callconduit(
484 484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 485 )
486 486 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
487 487 for newnode, (force, precset, drev) in toconfirm.items():
488 488 diffs = [
489 489 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
490 490 ]
491 491
492 492 # "precursors" as known by Phabricator
493 493 phprecset = {getnode(d) for d in diffs}
494 494
495 495 # Ignore if precursors (Phabricator and local repo) do not overlap,
496 496 # and force is not set (when commit message says nothing)
497 497 if not force and not bool(phprecset & precset):
498 498 tagname = b'D%d' % drev
499 499 tags.tag(
500 500 repo,
501 501 tagname,
502 502 nullid,
503 503 message=None,
504 504 user=None,
505 505 date=None,
506 506 local=True,
507 507 )
508 508 unfi.ui.warn(
509 509 _(
510 510 b'D%d: local tag removed - does not match '
511 511 b'Differential history\n'
512 512 )
513 513 % drev
514 514 )
515 515 continue
516 516
517 517 # Find the last node using Phabricator metadata, and make sure it
518 518 # exists in the repo
519 519 oldnode = lastdiff = None
520 520 if diffs:
521 521 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
522 522 oldnode = getnode(lastdiff)
523 523 if oldnode and not has_node(oldnode):
524 524 oldnode = None
525 525
526 526 result[newnode] = (oldnode, lastdiff, drev)
527 527
528 528 return result
529 529
530 530
531 531 def getdrevmap(repo, revs):
532 532 """Return a dict mapping each rev in `revs` to their Differential Revision
533 533 ID or None.
534 534 """
535 535 result = {}
536 536 for rev in revs:
537 537 result[rev] = None
538 538 ctx = repo[rev]
539 539 # Check commit message
540 540 m = _differentialrevisiondescre.search(ctx.description())
541 541 if m:
542 542 result[rev] = int(m.group('id'))
543 543 continue
544 544 # Check tags
545 545 for tag in repo.nodetags(ctx.node()):
546 546 m = _differentialrevisiontagre.match(tag)
547 547 if m:
548 548 result[rev] = int(m.group(1))
549 549 break
550 550
551 551 return result
552 552
553 553
554 554 def getdiff(basectx, ctx, diffopts):
555 555 """plain-text diff without header (user, commit message, etc)"""
556 556 output = util.stringio()
557 557 for chunk, _label in patch.diffui(
558 558 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
559 559 ):
560 560 output.write(chunk)
561 561 return output.getvalue()
562 562
563 563
564 564 class DiffChangeType(object):
565 565 ADD = 1
566 566 CHANGE = 2
567 567 DELETE = 3
568 568 MOVE_AWAY = 4
569 569 COPY_AWAY = 5
570 570 MOVE_HERE = 6
571 571 COPY_HERE = 7
572 572 MULTICOPY = 8
573 573
574 574
575 575 class DiffFileType(object):
576 576 TEXT = 1
577 577 IMAGE = 2
578 578 BINARY = 3
579 579
580 580
581 581 @attr.s
582 582 class phabhunk(dict):
583 583 """Represents a Differential hunk, which is owned by a Differential change
584 584 """
585 585
586 586 oldOffset = attr.ib(default=0) # camelcase-required
587 587 oldLength = attr.ib(default=0) # camelcase-required
588 588 newOffset = attr.ib(default=0) # camelcase-required
589 589 newLength = attr.ib(default=0) # camelcase-required
590 590 corpus = attr.ib(default='')
591 591 # These get added to the phabchange's equivalents
592 592 addLines = attr.ib(default=0) # camelcase-required
593 593 delLines = attr.ib(default=0) # camelcase-required
594 594
595 595
596 596 @attr.s
597 597 class phabchange(object):
598 598 """Represents a Differential change, owns Differential hunks and owned by a
599 599 Differential diff. Each one represents one file in a diff.
600 600 """
601 601
602 602 currentPath = attr.ib(default=None) # camelcase-required
603 603 oldPath = attr.ib(default=None) # camelcase-required
604 604 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
605 605 metadata = attr.ib(default=attr.Factory(dict))
606 606 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
607 607 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
608 608 type = attr.ib(default=DiffChangeType.CHANGE)
609 609 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
610 610 commitHash = attr.ib(default=None) # camelcase-required
611 611 addLines = attr.ib(default=0) # camelcase-required
612 612 delLines = attr.ib(default=0) # camelcase-required
613 613 hunks = attr.ib(default=attr.Factory(list))
614 614
615 615 def copynewmetadatatoold(self):
616 616 for key in list(self.metadata.keys()):
617 617 newkey = key.replace(b'new:', b'old:')
618 618 self.metadata[newkey] = self.metadata[key]
619 619
620 620 def addoldmode(self, value):
621 621 self.oldProperties[b'unix:filemode'] = value
622 622
623 623 def addnewmode(self, value):
624 624 self.newProperties[b'unix:filemode'] = value
625 625
626 626 def addhunk(self, hunk):
627 627 if not isinstance(hunk, phabhunk):
628 628 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
629 629 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
630 630 # It's useful to include these stats since the Phab web UI shows them,
631 631 # and uses them to estimate how large a change a Revision is. Also used
632 632 # in email subjects for the [+++--] bit.
633 633 self.addLines += hunk.addLines
634 634 self.delLines += hunk.delLines
635 635
636 636
637 637 @attr.s
638 638 class phabdiff(object):
639 639 """Represents a Differential diff, owns Differential changes. Corresponds
640 640 to a commit.
641 641 """
642 642
643 643 # Doesn't seem to be any reason to send this (output of uname -n)
644 644 sourceMachine = attr.ib(default=b'') # camelcase-required
645 645 sourcePath = attr.ib(default=b'/') # camelcase-required
646 646 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
647 647 sourceControlPath = attr.ib(default=b'/') # camelcase-required
648 648 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
649 649 branch = attr.ib(default=b'default')
650 650 bookmark = attr.ib(default=None)
651 651 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
652 652 lintStatus = attr.ib(default=b'none') # camelcase-required
653 653 unitStatus = attr.ib(default=b'none') # camelcase-required
654 654 changes = attr.ib(default=attr.Factory(dict))
655 655 repositoryPHID = attr.ib(default=None) # camelcase-required
656 656
657 657 def addchange(self, change):
658 658 if not isinstance(change, phabchange):
659 659 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
660 660 self.changes[change.currentPath] = pycompat.byteskwargs(
661 661 attr.asdict(change)
662 662 )
663 663
664 664
665 665 def maketext(pchange, basectx, ctx, fname):
666 666 """populate the phabchange for a text file"""
667 667 repo = ctx.repo()
668 668 fmatcher = match.exact([fname])
669 669 diffopts = mdiff.diffopts(git=True, context=32767)
670 670 _pfctx, _fctx, header, fhunks = next(
671 671 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
672 672 )
673 673
674 674 for fhunk in fhunks:
675 675 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
676 676 corpus = b''.join(lines[1:])
677 677 shunk = list(header)
678 678 shunk.extend(lines)
679 679 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
680 680 patch.diffstatdata(util.iterlines(shunk))
681 681 )
682 682 pchange.addhunk(
683 683 phabhunk(
684 684 oldOffset,
685 685 oldLength,
686 686 newOffset,
687 687 newLength,
688 688 corpus,
689 689 addLines,
690 690 delLines,
691 691 )
692 692 )
693 693
694 694
695 695 def uploadchunks(fctx, fphid):
696 696 """upload large binary files as separate chunks.
697 697 Phab requests chunking over 8MiB, and splits into 4MiB chunks
698 698 """
699 699 ui = fctx.repo().ui
700 700 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
701 701 with ui.makeprogress(
702 702 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
703 703 ) as progress:
704 704 for chunk in chunks:
705 705 progress.increment()
706 706 if chunk[b'complete']:
707 707 continue
708 708 bstart = int(chunk[b'byteStart'])
709 709 bend = int(chunk[b'byteEnd'])
710 710 callconduit(
711 711 ui,
712 712 b'file.uploadchunk',
713 713 {
714 714 b'filePHID': fphid,
715 715 b'byteStart': bstart,
716 716 b'data': base64.b64encode(fctx.data()[bstart:bend]),
717 717 b'dataEncoding': b'base64',
718 718 },
719 719 )
720 720
721 721
722 722 def uploadfile(fctx):
723 723 """upload binary files to Phabricator"""
724 724 repo = fctx.repo()
725 725 ui = repo.ui
726 726 fname = fctx.path()
727 727 size = fctx.size()
728 728 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
729 729
730 730 # an allocate call is required first to see if an upload is even required
731 731 # (Phab might already have it) and to determine if chunking is needed
732 732 allocateparams = {
733 733 b'name': fname,
734 734 b'contentLength': size,
735 735 b'contentHash': fhash,
736 736 }
737 737 filealloc = callconduit(ui, b'file.allocate', allocateparams)
738 738 fphid = filealloc[b'filePHID']
739 739
740 740 if filealloc[b'upload']:
741 741 ui.write(_(b'uploading %s\n') % bytes(fctx))
742 742 if not fphid:
743 743 uploadparams = {
744 744 b'name': fname,
745 745 b'data_base64': base64.b64encode(fctx.data()),
746 746 }
747 747 fphid = callconduit(ui, b'file.upload', uploadparams)
748 748 else:
749 749 uploadchunks(fctx, fphid)
750 750 else:
751 751 ui.debug(b'server already has %s\n' % bytes(fctx))
752 752
753 753 if not fphid:
754 754 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
755 755
756 756 return fphid
757 757
758 758
759 759 def addoldbinary(pchange, oldfctx, fctx):
760 760 """add the metadata for the previous version of a binary file to the
761 761 phabchange for the new version
762 762
763 763 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
764 764 version of the file, or None if the file is being removed.
765 765 """
766 766 if not fctx or fctx.cmp(oldfctx):
767 767 # Files differ, add the old one
768 768 pchange.metadata[b'old:file:size'] = oldfctx.size()
769 769 mimeguess, _enc = mimetypes.guess_type(
770 770 encoding.unifromlocal(oldfctx.path())
771 771 )
772 772 if mimeguess:
773 773 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
774 774 mimeguess
775 775 )
776 776 fphid = uploadfile(oldfctx)
777 777 pchange.metadata[b'old:binary-phid'] = fphid
778 778 else:
779 779 # If it's left as IMAGE/BINARY web UI might try to display it
780 780 pchange.fileType = DiffFileType.TEXT
781 781 pchange.copynewmetadatatoold()
782 782
783 783
784 784 def makebinary(pchange, fctx):
785 785 """populate the phabchange for a binary file"""
786 786 pchange.fileType = DiffFileType.BINARY
787 787 fphid = uploadfile(fctx)
788 788 pchange.metadata[b'new:binary-phid'] = fphid
789 789 pchange.metadata[b'new:file:size'] = fctx.size()
790 790 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
791 791 if mimeguess:
792 792 mimeguess = pycompat.bytestr(mimeguess)
793 793 pchange.metadata[b'new:file:mime-type'] = mimeguess
794 794 if mimeguess.startswith(b'image/'):
795 795 pchange.fileType = DiffFileType.IMAGE
796 796
797 797
798 798 # Copied from mercurial/patch.py
799 799 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
800 800
801 801
802 802 def notutf8(fctx):
803 803 """detect non-UTF-8 text files since Phabricator requires them to be marked
804 804 as binary
805 805 """
806 806 try:
807 807 fctx.data().decode('utf-8')
808 808 return False
809 809 except UnicodeDecodeError:
810 810 fctx.repo().ui.write(
811 811 _(b'file %s detected as non-UTF-8, marked as binary\n')
812 812 % fctx.path()
813 813 )
814 814 return True
815 815
816 816
817 817 def addremoved(pdiff, basectx, ctx, removed):
818 818 """add removed files to the phabdiff. Shouldn't include moves"""
819 819 for fname in removed:
820 820 pchange = phabchange(
821 821 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
822 822 )
823 823 oldfctx = basectx.p1()[fname]
824 824 pchange.addoldmode(gitmode[oldfctx.flags()])
825 825 if not (oldfctx.isbinary() or notutf8(oldfctx)):
826 826 maketext(pchange, basectx, ctx, fname)
827 827
828 828 pdiff.addchange(pchange)
829 829
830 830
831 831 def addmodified(pdiff, basectx, ctx, modified):
832 832 """add modified files to the phabdiff"""
833 833 for fname in modified:
834 834 fctx = ctx[fname]
835 835 oldfctx = basectx.p1()[fname]
836 836 pchange = phabchange(currentPath=fname, oldPath=fname)
837 837 filemode = gitmode[fctx.flags()]
838 838 originalmode = gitmode[oldfctx.flags()]
839 839 if filemode != originalmode:
840 840 pchange.addoldmode(originalmode)
841 841 pchange.addnewmode(filemode)
842 842
843 843 if (
844 844 fctx.isbinary()
845 845 or notutf8(fctx)
846 846 or oldfctx.isbinary()
847 847 or notutf8(oldfctx)
848 848 ):
849 849 makebinary(pchange, fctx)
850 850 addoldbinary(pchange, oldfctx, fctx)
851 851 else:
852 852 maketext(pchange, basectx, ctx, fname)
853 853
854 854 pdiff.addchange(pchange)
855 855
856 856
857 857 def addadded(pdiff, basectx, ctx, added, removed):
858 858 """add file adds to the phabdiff, both new files and copies/moves"""
859 859 # Keep track of files that've been recorded as moved/copied, so if there are
860 860 # additional copies we can mark them (moves get removed from removed)
861 861 copiedchanges = {}
862 862 movedchanges = {}
863 863
864 864 copy = {}
865 865 if basectx != ctx:
866 866 copy = copies.pathcopies(basectx.p1(), ctx)
867 867
868 868 for fname in added:
869 869 fctx = ctx[fname]
870 870 oldfctx = None
871 871 pchange = phabchange(currentPath=fname)
872 872
873 873 filemode = gitmode[fctx.flags()]
874 874
875 875 if copy:
876 876 originalfname = copy.get(fname, fname)
877 877 else:
878 878 originalfname = fname
879 879 if fctx.renamed():
880 880 originalfname = fctx.renamed()[0]
881 881
882 882 renamed = fname != originalfname
883 883
884 884 if renamed:
885 885 oldfctx = basectx.p1()[originalfname]
886 886 originalmode = gitmode[oldfctx.flags()]
887 887 pchange.oldPath = originalfname
888 888
889 889 if originalfname in removed:
890 890 origpchange = phabchange(
891 891 currentPath=originalfname,
892 892 oldPath=originalfname,
893 893 type=DiffChangeType.MOVE_AWAY,
894 894 awayPaths=[fname],
895 895 )
896 896 movedchanges[originalfname] = origpchange
897 897 removed.remove(originalfname)
898 898 pchange.type = DiffChangeType.MOVE_HERE
899 899 elif originalfname in movedchanges:
900 900 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
901 901 movedchanges[originalfname].awayPaths.append(fname)
902 902 pchange.type = DiffChangeType.COPY_HERE
903 903 else: # pure copy
904 904 if originalfname not in copiedchanges:
905 905 origpchange = phabchange(
906 906 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
907 907 )
908 908 copiedchanges[originalfname] = origpchange
909 909 else:
910 910 origpchange = copiedchanges[originalfname]
911 911 origpchange.awayPaths.append(fname)
912 912 pchange.type = DiffChangeType.COPY_HERE
913 913
914 914 if filemode != originalmode:
915 915 pchange.addoldmode(originalmode)
916 916 pchange.addnewmode(filemode)
917 917 else: # Brand-new file
918 918 pchange.addnewmode(gitmode[fctx.flags()])
919 919 pchange.type = DiffChangeType.ADD
920 920
921 921 if (
922 922 fctx.isbinary()
923 923 or notutf8(fctx)
924 924 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
925 925 ):
926 926 makebinary(pchange, fctx)
927 927 if renamed:
928 928 addoldbinary(pchange, oldfctx, fctx)
929 929 else:
930 930 maketext(pchange, basectx, ctx, fname)
931 931
932 932 pdiff.addchange(pchange)
933 933
934 934 for _path, copiedchange in copiedchanges.items():
935 935 pdiff.addchange(copiedchange)
936 936 for _path, movedchange in movedchanges.items():
937 937 pdiff.addchange(movedchange)
938 938
939 939
940 940 def creatediff(basectx, ctx):
941 941 """create a Differential Diff"""
942 942 repo = ctx.repo()
943 943 repophid = getrepophid(repo)
944 944 # Create a "Differential Diff" via "differential.creatediff" API
945 945 pdiff = phabdiff(
946 946 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
947 947 branch=b'%s' % ctx.branch(),
948 948 )
949 949 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
950 950 # addadded will remove moved files from removed, so addremoved won't get
951 951 # them
952 952 addadded(pdiff, basectx, ctx, added, removed)
953 953 addmodified(pdiff, basectx, ctx, modified)
954 954 addremoved(pdiff, basectx, ctx, removed)
955 955 if repophid:
956 956 pdiff.repositoryPHID = repophid
957 957 diff = callconduit(
958 958 repo.ui,
959 959 b'differential.creatediff',
960 960 pycompat.byteskwargs(attr.asdict(pdiff)),
961 961 )
962 962 if not diff:
963 963 if basectx != ctx:
964 964 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
965 965 else:
966 966 msg = _(b'cannot create diff for %s') % ctx
967 967 raise error.Abort(msg)
968 968 return diff
969 969
970 970
971 def writediffproperties(ctx, diff):
972 """write metadata to diff so patches could be applied losslessly"""
971 def writediffproperties(ctxs, diff):
972 """write metadata to diff so patches could be applied losslessly
973
974 ``ctxs`` is the list of commits that created the diff, in ascending order.
975 The list is generally a single commit, but may be several when using
976 ``phabsend --fold``.
977 """
973 978 # creatediff returns with a diffid but query returns with an id
974 979 diffid = diff.get(b'diffid', diff.get(b'id'))
980 basectx = ctxs[0]
981 tipctx = ctxs[-1]
982
975 983 params = {
976 984 b'diff_id': diffid,
977 985 b'name': b'hg:meta',
978 986 b'data': templatefilters.json(
979 987 {
980 b'user': ctx.user(),
981 b'date': b'%d %d' % ctx.date(),
982 b'branch': ctx.branch(),
983 b'node': ctx.hex(),
984 b'parent': ctx.p1().hex(),
988 b'user': tipctx.user(),
989 b'date': b'%d %d' % tipctx.date(),
990 b'branch': tipctx.branch(),
991 b'node': tipctx.hex(),
992 b'parent': basectx.p1().hex(),
985 993 }
986 994 ),
987 995 }
988 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
996 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
989 997
998 commits = {}
999 for ctx in ctxs:
1000 commits[ctx.hex()] = {
1001 b'author': stringutil.person(ctx.user()),
1002 b'authorEmail': stringutil.email(ctx.user()),
1003 b'time': int(ctx.date()[0]),
1004 b'commit': ctx.hex(),
1005 b'parents': [ctx.p1().hex()],
1006 b'branch': ctx.branch(),
1007 }
990 1008 params = {
991 1009 b'diff_id': diffid,
992 1010 b'name': b'local:commits',
993 b'data': templatefilters.json(
994 {
995 ctx.hex(): {
996 b'author': stringutil.person(ctx.user()),
997 b'authorEmail': stringutil.email(ctx.user()),
998 b'time': int(ctx.date()[0]),
999 b'commit': ctx.hex(),
1000 b'parents': [ctx.p1().hex()],
1001 b'branch': ctx.branch(),
1002 },
1003 }
1004 ),
1011 b'data': templatefilters.json(commits),
1005 1012 }
1006 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
1013 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1007 1014
1008 1015
1009 1016 def createdifferentialrevision(
1010 1017 ctx,
1011 1018 revid=None,
1012 1019 parentrevphid=None,
1013 1020 oldnode=None,
1014 1021 olddiff=None,
1015 1022 actions=None,
1016 1023 comment=None,
1017 1024 ):
1018 1025 """create or update a Differential Revision
1019 1026
1020 1027 If revid is None, create a new Differential Revision, otherwise update
1021 1028 revid. If parentrevphid is not None, set it as a dependency.
1022 1029
1023 1030 If oldnode is not None, check if the patch content (without commit message
1024 1031 and metadata) has changed before creating another diff.
1025 1032
1026 1033 If actions is not None, they will be appended to the transaction.
1027 1034 """
1028 1035 basectx = ctx
1029 1036 repo = ctx.repo()
1030 1037 if oldnode:
1031 1038 diffopts = mdiff.diffopts(git=True, context=32767)
1032 1039 oldctx = repo.unfiltered()[oldnode]
1033 1040 oldbasectx = oldctx
1034 1041 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1035 1042 oldbasectx, oldctx, diffopts
1036 1043 )
1037 1044 else:
1038 1045 neednewdiff = True
1039 1046
1040 1047 transactions = []
1041 1048 if neednewdiff:
1042 1049 diff = creatediff(basectx, ctx)
1043 1050 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1044 1051 if comment:
1045 1052 transactions.append({b'type': b'comment', b'value': comment})
1046 1053 else:
1047 1054 # Even if we don't need to upload a new diff because the patch content
1048 1055 # does not change. We might still need to update its metadata so
1049 1056 # pushers could know the correct node metadata.
1050 1057 assert olddiff
1051 1058 diff = olddiff
1052 writediffproperties(ctx, diff)
1059 writediffproperties([ctx], diff)
1053 1060
1054 1061 # Set the parent Revision every time, so commit re-ordering is picked-up
1055 1062 if parentrevphid:
1056 1063 transactions.append(
1057 1064 {b'type': b'parents.set', b'value': [parentrevphid]}
1058 1065 )
1059 1066
1060 1067 if actions:
1061 1068 transactions += actions
1062 1069
1063 1070 # Parse commit message and update related fields.
1064 1071 desc = ctx.description()
1065 1072 info = callconduit(
1066 1073 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1067 1074 )
1068 1075 for k, v in info[b'fields'].items():
1069 1076 if k in [b'title', b'summary', b'testPlan']:
1070 1077 transactions.append({b'type': k, b'value': v})
1071 1078
1072 1079 params = {b'transactions': transactions}
1073 1080 if revid is not None:
1074 1081 # Update an existing Differential Revision
1075 1082 params[b'objectIdentifier'] = revid
1076 1083
1077 1084 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1078 1085 if not revision:
1079 1086 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1080 1087
1081 1088 return revision, diff
1082 1089
1083 1090
1084 1091 def userphids(ui, names):
1085 1092 """convert user names to PHIDs"""
1086 1093 names = [name.lower() for name in names]
1087 1094 query = {b'constraints': {b'usernames': names}}
1088 1095 result = callconduit(ui, b'user.search', query)
1089 1096 # username not found is not an error of the API. So check if we have missed
1090 1097 # some names here.
1091 1098 data = result[b'data']
1092 1099 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1093 1100 unresolved = set(names) - resolved
1094 1101 if unresolved:
1095 1102 raise error.Abort(
1096 1103 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1097 1104 )
1098 1105 return [entry[b'phid'] for entry in data]
1099 1106
1100 1107
1101 1108 @vcrcommand(
1102 1109 b'phabsend',
1103 1110 [
1104 1111 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1105 1112 (b'', b'amend', True, _(b'update commit messages')),
1106 1113 (b'', b'reviewer', [], _(b'specify reviewers')),
1107 1114 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1108 1115 (
1109 1116 b'm',
1110 1117 b'comment',
1111 1118 b'',
1112 1119 _(b'add a comment to Revisions with new/updated Diffs'),
1113 1120 ),
1114 1121 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1115 1122 ],
1116 1123 _(b'REV [OPTIONS]'),
1117 1124 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1118 1125 )
1119 1126 def phabsend(ui, repo, *revs, **opts):
1120 1127 """upload changesets to Phabricator
1121 1128
1122 1129 If there are multiple revisions specified, they will be send as a stack
1123 1130 with a linear dependencies relationship using the order specified by the
1124 1131 revset.
1125 1132
1126 1133 For the first time uploading changesets, local tags will be created to
1127 1134 maintain the association. After the first time, phabsend will check
1128 1135 obsstore and tags information so it can figure out whether to update an
1129 1136 existing Differential Revision, or create a new one.
1130 1137
1131 1138 If --amend is set, update commit messages so they have the
1132 1139 ``Differential Revision`` URL, remove related tags. This is similar to what
1133 1140 arcanist will do, and is more desired in author-push workflows. Otherwise,
1134 1141 use local tags to record the ``Differential Revision`` association.
1135 1142
1136 1143 The --confirm option lets you confirm changesets before sending them. You
1137 1144 can also add following to your configuration file to make it default
1138 1145 behaviour::
1139 1146
1140 1147 [phabsend]
1141 1148 confirm = true
1142 1149
1143 1150 phabsend will check obsstore and the above association to decide whether to
1144 1151 update an existing Differential Revision, or create a new one.
1145 1152 """
1146 1153 opts = pycompat.byteskwargs(opts)
1147 1154 revs = list(revs) + opts.get(b'rev', [])
1148 1155 revs = scmutil.revrange(repo, revs)
1149 1156 revs.sort() # ascending order to preserve topological parent/child in phab
1150 1157
1151 1158 if not revs:
1152 1159 raise error.Abort(_(b'phabsend requires at least one changeset'))
1153 1160 if opts.get(b'amend'):
1154 1161 cmdutil.checkunfinished(repo)
1155 1162
1156 1163 # {newnode: (oldnode, olddiff, olddrev}
1157 1164 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1158 1165
1159 1166 confirm = ui.configbool(b'phabsend', b'confirm')
1160 1167 confirm |= bool(opts.get(b'confirm'))
1161 1168 if confirm:
1162 1169 confirmed = _confirmbeforesend(repo, revs, oldmap)
1163 1170 if not confirmed:
1164 1171 raise error.Abort(_(b'phabsend cancelled'))
1165 1172
1166 1173 actions = []
1167 1174 reviewers = opts.get(b'reviewer', [])
1168 1175 blockers = opts.get(b'blocker', [])
1169 1176 phids = []
1170 1177 if reviewers:
1171 1178 phids.extend(userphids(repo.ui, reviewers))
1172 1179 if blockers:
1173 1180 phids.extend(
1174 1181 map(
1175 1182 lambda phid: b'blocking(%s)' % phid,
1176 1183 userphids(repo.ui, blockers),
1177 1184 )
1178 1185 )
1179 1186 if phids:
1180 1187 actions.append({b'type': b'reviewers.add', b'value': phids})
1181 1188
1182 1189 drevids = [] # [int]
1183 1190 diffmap = {} # {newnode: diff}
1184 1191
1185 1192 # Send patches one by one so we know their Differential Revision PHIDs and
1186 1193 # can provide dependency relationship
1187 1194 lastrevphid = None
1188 1195 for rev in revs:
1189 1196 ui.debug(b'sending rev %d\n' % rev)
1190 1197 ctx = repo[rev]
1191 1198
1192 1199 # Get Differential Revision ID
1193 1200 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1194 1201 if oldnode != ctx.node() or opts.get(b'amend'):
1195 1202 # Create or update Differential Revision
1196 1203 revision, diff = createdifferentialrevision(
1197 1204 ctx,
1198 1205 revid,
1199 1206 lastrevphid,
1200 1207 oldnode,
1201 1208 olddiff,
1202 1209 actions,
1203 1210 opts.get(b'comment'),
1204 1211 )
1205 1212 diffmap[ctx.node()] = diff
1206 1213 newrevid = int(revision[b'object'][b'id'])
1207 1214 newrevphid = revision[b'object'][b'phid']
1208 1215 if revid:
1209 1216 action = b'updated'
1210 1217 else:
1211 1218 action = b'created'
1212 1219
1213 1220 # Create a local tag to note the association, if commit message
1214 1221 # does not have it already
1215 1222 m = _differentialrevisiondescre.search(ctx.description())
1216 1223 if not m or int(m.group('id')) != newrevid:
1217 1224 tagname = b'D%d' % newrevid
1218 1225 tags.tag(
1219 1226 repo,
1220 1227 tagname,
1221 1228 ctx.node(),
1222 1229 message=None,
1223 1230 user=None,
1224 1231 date=None,
1225 1232 local=True,
1226 1233 )
1227 1234 else:
1228 1235 # Nothing changed. But still set "newrevphid" so the next revision
1229 1236 # could depend on this one and "newrevid" for the summary line.
1230 1237 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1231 1238 newrevid = revid
1232 1239 action = b'skipped'
1233 1240
1234 1241 actiondesc = ui.label(
1235 1242 {
1236 1243 b'created': _(b'created'),
1237 1244 b'skipped': _(b'skipped'),
1238 1245 b'updated': _(b'updated'),
1239 1246 }[action],
1240 1247 b'phabricator.action.%s' % action,
1241 1248 )
1242 1249 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1243 1250 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1244 1251 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1245 1252 ui.write(
1246 1253 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1247 1254 )
1248 1255 drevids.append(newrevid)
1249 1256 lastrevphid = newrevphid
1250 1257
1251 1258 # Update commit messages and remove tags
1252 1259 if opts.get(b'amend'):
1253 1260 unfi = repo.unfiltered()
1254 1261 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1255 1262 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1256 1263 wnode = unfi[b'.'].node()
1257 1264 mapping = {} # {oldnode: [newnode]}
1258 1265 for i, rev in enumerate(revs):
1259 1266 old = unfi[rev]
1260 1267 drevid = drevids[i]
1261 1268 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1262 1269 newdesc = getdescfromdrev(drev)
1263 1270 # Make sure commit message contain "Differential Revision"
1264 1271 if old.description() != newdesc:
1265 1272 if old.phase() == phases.public:
1266 1273 ui.warn(
1267 1274 _(b"warning: not updating public commit %s\n")
1268 1275 % scmutil.formatchangeid(old)
1269 1276 )
1270 1277 continue
1271 1278 parents = [
1272 1279 mapping.get(old.p1().node(), (old.p1(),))[0],
1273 1280 mapping.get(old.p2().node(), (old.p2(),))[0],
1274 1281 ]
1275 1282 new = context.metadataonlyctx(
1276 1283 repo,
1277 1284 old,
1278 1285 parents=parents,
1279 1286 text=newdesc,
1280 1287 user=old.user(),
1281 1288 date=old.date(),
1282 1289 extra=old.extra(),
1283 1290 )
1284 1291
1285 1292 newnode = new.commit()
1286 1293
1287 1294 mapping[old.node()] = [newnode]
1288 1295 # Update diff property
1289 1296 # If it fails just warn and keep going, otherwise the DREV
1290 1297 # associations will be lost
1291 1298 try:
1292 writediffproperties(unfi[newnode], diffmap[old.node()])
1299 writediffproperties(
1300 [unfi[newnode]], diffmap[old.node()]
1301 )
1293 1302 except util.urlerr.urlerror:
1294 1303 ui.warnnoi18n(
1295 1304 b'Failed to update metadata for D%d\n' % drevid
1296 1305 )
1297 1306 # Remove local tags since it's no longer necessary
1298 1307 tagname = b'D%d' % drevid
1299 1308 if tagname in repo.tags():
1300 1309 tags.tag(
1301 1310 repo,
1302 1311 tagname,
1303 1312 nullid,
1304 1313 message=None,
1305 1314 user=None,
1306 1315 date=None,
1307 1316 local=True,
1308 1317 )
1309 1318 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1310 1319 if wnode in mapping:
1311 1320 unfi.setparents(mapping[wnode][0])
1312 1321
1313 1322
1314 1323 # Map from "hg:meta" keys to header understood by "hg import". The order is
1315 1324 # consistent with "hg export" output.
1316 1325 _metanamemap = util.sortdict(
1317 1326 [
1318 1327 (b'user', b'User'),
1319 1328 (b'date', b'Date'),
1320 1329 (b'branch', b'Branch'),
1321 1330 (b'node', b'Node ID'),
1322 1331 (b'parent', b'Parent '),
1323 1332 ]
1324 1333 )
1325 1334
1326 1335
1327 1336 def _confirmbeforesend(repo, revs, oldmap):
1328 1337 url, token = readurltoken(repo.ui)
1329 1338 ui = repo.ui
1330 1339 for rev in revs:
1331 1340 ctx = repo[rev]
1332 1341 desc = ctx.description().splitlines()[0]
1333 1342 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1334 1343 if drevid:
1335 1344 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1336 1345 else:
1337 1346 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1338 1347
1339 1348 ui.write(
1340 1349 _(b'%s - %s: %s\n')
1341 1350 % (
1342 1351 drevdesc,
1343 1352 ui.label(bytes(ctx), b'phabricator.node'),
1344 1353 ui.label(desc, b'phabricator.desc'),
1345 1354 )
1346 1355 )
1347 1356
1348 1357 if ui.promptchoice(
1349 1358 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1350 1359 ):
1351 1360 return False
1352 1361
1353 1362 return True
1354 1363
1355 1364
1356 1365 _knownstatusnames = {
1357 1366 b'accepted',
1358 1367 b'needsreview',
1359 1368 b'needsrevision',
1360 1369 b'closed',
1361 1370 b'abandoned',
1362 1371 b'changesplanned',
1363 1372 }
1364 1373
1365 1374
1366 1375 def _getstatusname(drev):
1367 1376 """get normalized status name from a Differential Revision"""
1368 1377 return drev[b'statusName'].replace(b' ', b'').lower()
1369 1378
1370 1379
1371 1380 # Small language to specify differential revisions. Support symbols: (), :X,
1372 1381 # +, and -.
1373 1382
1374 1383 _elements = {
1375 1384 # token-type: binding-strength, primary, prefix, infix, suffix
1376 1385 b'(': (12, None, (b'group', 1, b')'), None, None),
1377 1386 b':': (8, None, (b'ancestors', 8), None, None),
1378 1387 b'&': (5, None, None, (b'and_', 5), None),
1379 1388 b'+': (4, None, None, (b'add', 4), None),
1380 1389 b'-': (4, None, None, (b'sub', 4), None),
1381 1390 b')': (0, None, None, None, None),
1382 1391 b'symbol': (0, b'symbol', None, None, None),
1383 1392 b'end': (0, None, None, None, None),
1384 1393 }
1385 1394
1386 1395
1387 1396 def _tokenize(text):
1388 1397 view = memoryview(text) # zero-copy slice
1389 1398 special = b'():+-& '
1390 1399 pos = 0
1391 1400 length = len(text)
1392 1401 while pos < length:
1393 1402 symbol = b''.join(
1394 1403 itertools.takewhile(
1395 1404 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1396 1405 )
1397 1406 )
1398 1407 if symbol:
1399 1408 yield (b'symbol', symbol, pos)
1400 1409 pos += len(symbol)
1401 1410 else: # special char, ignore space
1402 1411 if text[pos : pos + 1] != b' ':
1403 1412 yield (text[pos : pos + 1], None, pos)
1404 1413 pos += 1
1405 1414 yield (b'end', None, pos)
1406 1415
1407 1416
1408 1417 def _parse(text):
1409 1418 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1410 1419 if pos != len(text):
1411 1420 raise error.ParseError(b'invalid token', pos)
1412 1421 return tree
1413 1422
1414 1423
1415 1424 def _parsedrev(symbol):
1416 1425 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1417 1426 if symbol.startswith(b'D') and symbol[1:].isdigit():
1418 1427 return int(symbol[1:])
1419 1428 if symbol.isdigit():
1420 1429 return int(symbol)
1421 1430
1422 1431
1423 1432 def _prefetchdrevs(tree):
1424 1433 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1425 1434 drevs = set()
1426 1435 ancestordrevs = set()
1427 1436 op = tree[0]
1428 1437 if op == b'symbol':
1429 1438 r = _parsedrev(tree[1])
1430 1439 if r:
1431 1440 drevs.add(r)
1432 1441 elif op == b'ancestors':
1433 1442 r, a = _prefetchdrevs(tree[1])
1434 1443 drevs.update(r)
1435 1444 ancestordrevs.update(r)
1436 1445 ancestordrevs.update(a)
1437 1446 else:
1438 1447 for t in tree[1:]:
1439 1448 r, a = _prefetchdrevs(t)
1440 1449 drevs.update(r)
1441 1450 ancestordrevs.update(a)
1442 1451 return drevs, ancestordrevs
1443 1452
1444 1453
1445 1454 def querydrev(ui, spec):
1446 1455 """return a list of "Differential Revision" dicts
1447 1456
1448 1457 spec is a string using a simple query language, see docstring in phabread
1449 1458 for details.
1450 1459
1451 1460 A "Differential Revision dict" looks like:
1452 1461
1453 1462 {
1454 1463 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1455 1464 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1456 1465 "auxiliary": {
1457 1466 "phabricator:depends-on": [
1458 1467 "PHID-DREV-gbapp366kutjebt7agcd"
1459 1468 ]
1460 1469 "phabricator:projects": [],
1461 1470 },
1462 1471 "branch": "default",
1463 1472 "ccs": [],
1464 1473 "commits": [],
1465 1474 "dateCreated": "1499181406",
1466 1475 "dateModified": "1499182103",
1467 1476 "diffs": [
1468 1477 "3",
1469 1478 "4",
1470 1479 ],
1471 1480 "hashes": [],
1472 1481 "id": "2",
1473 1482 "lineCount": "2",
1474 1483 "phid": "PHID-DREV-672qvysjcczopag46qty",
1475 1484 "properties": {},
1476 1485 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1477 1486 "reviewers": [],
1478 1487 "sourcePath": null
1479 1488 "status": "0",
1480 1489 "statusName": "Needs Review",
1481 1490 "summary": "",
1482 1491 "testPlan": "",
1483 1492 "title": "example",
1484 1493 "uri": "https://phab.example.com/D2",
1485 1494 }
1486 1495 """
1487 1496 # TODO: replace differential.query and differential.querydiffs with
1488 1497 # differential.diff.search because the former (and their output) are
1489 1498 # frozen, and planned to be deprecated and removed.
1490 1499
1491 1500 def fetch(params):
1492 1501 """params -> single drev or None"""
1493 1502 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1494 1503 if key in prefetched:
1495 1504 return prefetched[key]
1496 1505 drevs = callconduit(ui, b'differential.query', params)
1497 1506 # Fill prefetched with the result
1498 1507 for drev in drevs:
1499 1508 prefetched[drev[b'phid']] = drev
1500 1509 prefetched[int(drev[b'id'])] = drev
1501 1510 if key not in prefetched:
1502 1511 raise error.Abort(
1503 1512 _(b'cannot get Differential Revision %r') % params
1504 1513 )
1505 1514 return prefetched[key]
1506 1515
1507 1516 def getstack(topdrevids):
1508 1517 """given a top, get a stack from the bottom, [id] -> [id]"""
1509 1518 visited = set()
1510 1519 result = []
1511 1520 queue = [{b'ids': [i]} for i in topdrevids]
1512 1521 while queue:
1513 1522 params = queue.pop()
1514 1523 drev = fetch(params)
1515 1524 if drev[b'id'] in visited:
1516 1525 continue
1517 1526 visited.add(drev[b'id'])
1518 1527 result.append(int(drev[b'id']))
1519 1528 auxiliary = drev.get(b'auxiliary', {})
1520 1529 depends = auxiliary.get(b'phabricator:depends-on', [])
1521 1530 for phid in depends:
1522 1531 queue.append({b'phids': [phid]})
1523 1532 result.reverse()
1524 1533 return smartset.baseset(result)
1525 1534
1526 1535 # Initialize prefetch cache
1527 1536 prefetched = {} # {id or phid: drev}
1528 1537
1529 1538 tree = _parse(spec)
1530 1539 drevs, ancestordrevs = _prefetchdrevs(tree)
1531 1540
1532 1541 # developer config: phabricator.batchsize
1533 1542 batchsize = ui.configint(b'phabricator', b'batchsize')
1534 1543
1535 1544 # Prefetch Differential Revisions in batch
1536 1545 tofetch = set(drevs)
1537 1546 for r in ancestordrevs:
1538 1547 tofetch.update(range(max(1, r - batchsize), r + 1))
1539 1548 if drevs:
1540 1549 fetch({b'ids': list(tofetch)})
1541 1550 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1542 1551
1543 1552 # Walk through the tree, return smartsets
1544 1553 def walk(tree):
1545 1554 op = tree[0]
1546 1555 if op == b'symbol':
1547 1556 drev = _parsedrev(tree[1])
1548 1557 if drev:
1549 1558 return smartset.baseset([drev])
1550 1559 elif tree[1] in _knownstatusnames:
1551 1560 drevs = [
1552 1561 r
1553 1562 for r in validids
1554 1563 if _getstatusname(prefetched[r]) == tree[1]
1555 1564 ]
1556 1565 return smartset.baseset(drevs)
1557 1566 else:
1558 1567 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1559 1568 elif op in {b'and_', b'add', b'sub'}:
1560 1569 assert len(tree) == 3
1561 1570 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1562 1571 elif op == b'group':
1563 1572 return walk(tree[1])
1564 1573 elif op == b'ancestors':
1565 1574 return getstack(walk(tree[1]))
1566 1575 else:
1567 1576 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1568 1577
1569 1578 return [prefetched[r] for r in walk(tree)]
1570 1579
1571 1580
1572 1581 def getdescfromdrev(drev):
1573 1582 """get description (commit message) from "Differential Revision"
1574 1583
1575 1584 This is similar to differential.getcommitmessage API. But we only care
1576 1585 about limited fields: title, summary, test plan, and URL.
1577 1586 """
1578 1587 title = drev[b'title']
1579 1588 summary = drev[b'summary'].rstrip()
1580 1589 testplan = drev[b'testPlan'].rstrip()
1581 1590 if testplan:
1582 1591 testplan = b'Test Plan:\n%s' % testplan
1583 1592 uri = b'Differential Revision: %s' % drev[b'uri']
1584 1593 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1585 1594
1586 1595
1587 1596 def getdiffmeta(diff):
1588 1597 """get commit metadata (date, node, user, p1) from a diff object
1589 1598
1590 1599 The metadata could be "hg:meta", sent by phabsend, like:
1591 1600
1592 1601 "properties": {
1593 1602 "hg:meta": {
1594 1603 "branch": "default",
1595 1604 "date": "1499571514 25200",
1596 1605 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1597 1606 "user": "Foo Bar <foo@example.com>",
1598 1607 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1599 1608 }
1600 1609 }
1601 1610
1602 1611 Or converted from "local:commits", sent by "arc", like:
1603 1612
1604 1613 "properties": {
1605 1614 "local:commits": {
1606 1615 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1607 1616 "author": "Foo Bar",
1608 1617 "authorEmail": "foo@example.com"
1609 1618 "branch": "default",
1610 1619 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1611 1620 "local": "1000",
1612 1621 "message": "...",
1613 1622 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1614 1623 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1615 1624 "summary": "...",
1616 1625 "tag": "",
1617 1626 "time": 1499546314,
1618 1627 }
1619 1628 }
1620 1629 }
1621 1630
1622 1631 Note: metadata extracted from "local:commits" will lose time zone
1623 1632 information.
1624 1633 """
1625 1634 props = diff.get(b'properties') or {}
1626 1635 meta = props.get(b'hg:meta')
1627 1636 if not meta:
1628 1637 if props.get(b'local:commits'):
1629 1638 commit = sorted(props[b'local:commits'].values())[0]
1630 1639 meta = {}
1631 1640 if b'author' in commit and b'authorEmail' in commit:
1632 1641 meta[b'user'] = b'%s <%s>' % (
1633 1642 commit[b'author'],
1634 1643 commit[b'authorEmail'],
1635 1644 )
1636 1645 if b'time' in commit:
1637 1646 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1638 1647 if b'branch' in commit:
1639 1648 meta[b'branch'] = commit[b'branch']
1640 1649 node = commit.get(b'commit', commit.get(b'rev'))
1641 1650 if node:
1642 1651 meta[b'node'] = node
1643 1652 if len(commit.get(b'parents', ())) >= 1:
1644 1653 meta[b'parent'] = commit[b'parents'][0]
1645 1654 else:
1646 1655 meta = {}
1647 1656 if b'date' not in meta and b'dateCreated' in diff:
1648 1657 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1649 1658 if b'branch' not in meta and diff.get(b'branch'):
1650 1659 meta[b'branch'] = diff[b'branch']
1651 1660 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1652 1661 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1653 1662 return meta
1654 1663
1655 1664
1656 1665 def _getdrevs(ui, stack, specs):
1657 1666 """convert user supplied DREVSPECs into "Differential Revision" dicts
1658 1667
1659 1668 See ``hg help phabread`` for how to specify each DREVSPEC.
1660 1669 """
1661 1670 if len(specs) > 0:
1662 1671
1663 1672 def _formatspec(s):
1664 1673 if stack:
1665 1674 s = b':(%s)' % s
1666 1675 return b'(%s)' % s
1667 1676
1668 1677 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1669 1678
1670 1679 drevs = querydrev(ui, spec)
1671 1680 if drevs:
1672 1681 return drevs
1673 1682
1674 1683 raise error.Abort(_(b"empty DREVSPEC set"))
1675 1684
1676 1685
1677 1686 def readpatch(ui, drevs, write):
1678 1687 """generate plain-text patch readable by 'hg import'
1679 1688
1680 1689 write takes a list of (DREV, bytes), where DREV is the differential number
1681 1690 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1682 1691 to be imported. drevs is what "querydrev" returns, results of
1683 1692 "differential.query".
1684 1693 """
1685 1694 # Prefetch hg:meta property for all diffs
1686 1695 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1687 1696 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1688 1697
1689 1698 patches = []
1690 1699
1691 1700 # Generate patch for each drev
1692 1701 for drev in drevs:
1693 1702 ui.note(_(b'reading D%s\n') % drev[b'id'])
1694 1703
1695 1704 diffid = max(int(v) for v in drev[b'diffs'])
1696 1705 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1697 1706 desc = getdescfromdrev(drev)
1698 1707 header = b'# HG changeset patch\n'
1699 1708
1700 1709 # Try to preserve metadata from hg:meta property. Write hg patch
1701 1710 # headers that can be read by the "import" command. See patchheadermap
1702 1711 # and extract in mercurial/patch.py for supported headers.
1703 1712 meta = getdiffmeta(diffs[b'%d' % diffid])
1704 1713 for k in _metanamemap.keys():
1705 1714 if k in meta:
1706 1715 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1707 1716
1708 1717 content = b'%s%s\n%s' % (header, desc, body)
1709 1718 patches.append((drev[b'id'], content))
1710 1719
1711 1720 # Write patches to the supplied callback
1712 1721 write(patches)
1713 1722
1714 1723
1715 1724 @vcrcommand(
1716 1725 b'phabread',
1717 1726 [(b'', b'stack', False, _(b'read dependencies'))],
1718 1727 _(b'DREVSPEC... [OPTIONS]'),
1719 1728 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1720 1729 optionalrepo=True,
1721 1730 )
1722 1731 def phabread(ui, repo, *specs, **opts):
1723 1732 """print patches from Phabricator suitable for importing
1724 1733
1725 1734 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1726 1735 the number ``123``. It could also have common operators like ``+``, ``-``,
1727 1736 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1728 1737 select a stack. If multiple DREVSPEC values are given, the result is the
1729 1738 union of each individually evaluated value. No attempt is currently made
1730 1739 to reorder the values to run from parent to child.
1731 1740
1732 1741 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1733 1742 could be used to filter patches by status. For performance reason, they
1734 1743 only represent a subset of non-status selections and cannot be used alone.
1735 1744
1736 1745 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1737 1746 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1738 1747 stack up to D9.
1739 1748
1740 1749 If --stack is given, follow dependencies information and read all patches.
1741 1750 It is equivalent to the ``:`` operator.
1742 1751 """
1743 1752 opts = pycompat.byteskwargs(opts)
1744 1753 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1745 1754
1746 1755 def _write(patches):
1747 1756 for drev, content in patches:
1748 1757 ui.write(content)
1749 1758
1750 1759 readpatch(ui, drevs, _write)
1751 1760
1752 1761
1753 1762 @vcrcommand(
1754 1763 b'phabimport',
1755 1764 [(b'', b'stack', False, _(b'import dependencies as well'))],
1756 1765 _(b'DREVSPEC... [OPTIONS]'),
1757 1766 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1758 1767 )
1759 1768 def phabimport(ui, repo, *specs, **opts):
1760 1769 """import patches from Phabricator for the specified Differential Revisions
1761 1770
1762 1771 The patches are read and applied starting at the parent of the working
1763 1772 directory.
1764 1773
1765 1774 See ``hg help phabread`` for how to specify DREVSPEC.
1766 1775 """
1767 1776 opts = pycompat.byteskwargs(opts)
1768 1777
1769 1778 # --bypass avoids losing exec and symlink bits when importing on Windows,
1770 1779 # and allows importing with a dirty wdir. It also aborts instead of leaving
1771 1780 # rejects.
1772 1781 opts[b'bypass'] = True
1773 1782
1774 1783 # Mandatory default values, synced with commands.import
1775 1784 opts[b'strip'] = 1
1776 1785 opts[b'prefix'] = b''
1777 1786 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1778 1787 opts[b'obsolete'] = False
1779 1788
1780 1789 if ui.configbool(b'phabimport', b'secret'):
1781 1790 opts[b'secret'] = True
1782 1791 if ui.configbool(b'phabimport', b'obsolete'):
1783 1792 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1784 1793
1785 1794 def _write(patches):
1786 1795 parents = repo[None].parents()
1787 1796
1788 1797 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1789 1798 for drev, contents in patches:
1790 1799 ui.status(_(b'applying patch from D%s\n') % drev)
1791 1800
1792 1801 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1793 1802 msg, node, rej = cmdutil.tryimportone(
1794 1803 ui,
1795 1804 repo,
1796 1805 patchdata,
1797 1806 parents,
1798 1807 opts,
1799 1808 [],
1800 1809 None, # Never update wdir to another revision
1801 1810 )
1802 1811
1803 1812 if not node:
1804 1813 raise error.Abort(_(b'D%s: no diffs found') % drev)
1805 1814
1806 1815 ui.note(msg + b'\n')
1807 1816 parents = [repo[node]]
1808 1817
1809 1818 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1810 1819
1811 1820 readpatch(repo.ui, drevs, _write)
1812 1821
1813 1822
1814 1823 @vcrcommand(
1815 1824 b'phabupdate',
1816 1825 [
1817 1826 (b'', b'accept', False, _(b'accept revisions')),
1818 1827 (b'', b'reject', False, _(b'reject revisions')),
1819 1828 (b'', b'abandon', False, _(b'abandon revisions')),
1820 1829 (b'', b'reclaim', False, _(b'reclaim revisions')),
1821 1830 (b'm', b'comment', b'', _(b'comment on the last revision')),
1822 1831 ],
1823 1832 _(b'DREVSPEC... [OPTIONS]'),
1824 1833 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1825 1834 optionalrepo=True,
1826 1835 )
1827 1836 def phabupdate(ui, repo, *specs, **opts):
1828 1837 """update Differential Revision in batch
1829 1838
1830 1839 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1831 1840 """
1832 1841 opts = pycompat.byteskwargs(opts)
1833 1842 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1834 1843 if len(flags) > 1:
1835 1844 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1836 1845
1837 1846 actions = []
1838 1847 for f in flags:
1839 1848 actions.append({b'type': f, b'value': True})
1840 1849
1841 1850 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1842 1851 for i, drev in enumerate(drevs):
1843 1852 if i + 1 == len(drevs) and opts.get(b'comment'):
1844 1853 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1845 1854 if actions:
1846 1855 params = {
1847 1856 b'objectIdentifier': drev[b'phid'],
1848 1857 b'transactions': actions,
1849 1858 }
1850 1859 callconduit(ui, b'differential.revision.edit', params)
1851 1860
1852 1861
1853 1862 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1854 1863 def template_review(context, mapping):
1855 1864 """:phabreview: Object describing the review for this changeset.
1856 1865 Has attributes `url` and `id`.
1857 1866 """
1858 1867 ctx = context.resource(mapping, b'ctx')
1859 1868 m = _differentialrevisiondescre.search(ctx.description())
1860 1869 if m:
1861 1870 return templateutil.hybriddict(
1862 1871 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1863 1872 )
1864 1873 else:
1865 1874 tags = ctx.repo().nodetags(ctx.node())
1866 1875 for t in tags:
1867 1876 if _differentialrevisiontagre.match(t):
1868 1877 url = ctx.repo().ui.config(b'phabricator', b'url')
1869 1878 if not url.endswith(b'/'):
1870 1879 url += b'/'
1871 1880 url += t
1872 1881
1873 1882 return templateutil.hybriddict({b'url': url, b'id': t,})
1874 1883 return None
1875 1884
1876 1885
1877 1886 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1878 1887 def template_status(context, mapping):
1879 1888 """:phabstatus: String. Status of Phabricator differential.
1880 1889 """
1881 1890 ctx = context.resource(mapping, b'ctx')
1882 1891 repo = context.resource(mapping, b'repo')
1883 1892 ui = context.resource(mapping, b'ui')
1884 1893
1885 1894 rev = ctx.rev()
1886 1895 try:
1887 1896 drevid = getdrevmap(repo, [rev])[rev]
1888 1897 except KeyError:
1889 1898 return None
1890 1899 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1891 1900 for drev in drevs:
1892 1901 if int(drev[b'id']) == drevid:
1893 1902 return templateutil.hybriddict(
1894 1903 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1895 1904 )
1896 1905 return None
1897 1906
1898 1907
1899 1908 @show.showview(b'phabstatus', csettopic=b'work')
1900 1909 def phabstatusshowview(ui, repo, displayer):
1901 1910 """Phabricator differiential status"""
1902 1911 revs = repo.revs('sort(_underway(), topo)')
1903 1912 drevmap = getdrevmap(repo, revs)
1904 1913 unknownrevs, drevids, revsbydrevid = [], set(), {}
1905 1914 for rev, drevid in pycompat.iteritems(drevmap):
1906 1915 if drevid is not None:
1907 1916 drevids.add(drevid)
1908 1917 revsbydrevid.setdefault(drevid, set()).add(rev)
1909 1918 else:
1910 1919 unknownrevs.append(rev)
1911 1920
1912 1921 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1913 1922 drevsbyrev = {}
1914 1923 for drev in drevs:
1915 1924 for rev in revsbydrevid[int(drev[b'id'])]:
1916 1925 drevsbyrev[rev] = drev
1917 1926
1918 1927 def phabstatus(ctx):
1919 1928 drev = drevsbyrev[ctx.rev()]
1920 1929 status = ui.label(
1921 1930 b'%(statusName)s' % drev,
1922 1931 b'phabricator.status.%s' % _getstatusname(drev),
1923 1932 )
1924 1933 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1925 1934
1926 1935 revs -= smartset.baseset(unknownrevs)
1927 1936 revdag = graphmod.dagwalker(repo, revs)
1928 1937
1929 1938 ui.setconfig(b'experimental', b'graphshorten', True)
1930 1939 displayer._exthook = phabstatus
1931 1940 nodelen = show.longestshortest(repo, revs)
1932 1941 logcmdutil.displaygraph(
1933 1942 ui,
1934 1943 repo,
1935 1944 revdag,
1936 1945 displayer,
1937 1946 graphmod.asciiedges,
1938 1947 props={b'nodelen': nodelen},
1939 1948 )
General Comments 0
You need to be logged in to leave comments. Login now