##// END OF EJS Templates
phabricator: color the status in the "phabstatus" view...
Matt Harbison -
r44310:b0867b77 default
parent child Browse files
Show More
@@ -1,1746 +1,1756 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 logcmdutil,
70 70 match,
71 71 mdiff,
72 72 obsutil,
73 73 parser,
74 74 patch,
75 75 phases,
76 76 pycompat,
77 77 scmutil,
78 78 smartset,
79 79 tags,
80 80 templatefilters,
81 81 templateutil,
82 82 url as urlmod,
83 83 util,
84 84 )
85 85 from mercurial.utils import (
86 86 procutil,
87 87 stringutil,
88 88 )
89 89 from . import show
90 90
91 91
92 92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 94 # be specifying the version(s) of Mercurial they are tested with, or
95 95 # leave the attribute unspecified.
96 96 testedwith = b'ships-with-hg-core'
97 97
98 98 eh = exthelper.exthelper()
99 99
100 100 cmdtable = eh.cmdtable
101 101 command = eh.command
102 102 configtable = eh.configtable
103 103 templatekeyword = eh.templatekeyword
104 104
105 105 # developer config: phabricator.batchsize
106 106 eh.configitem(
107 107 b'phabricator', b'batchsize', default=12,
108 108 )
109 109 eh.configitem(
110 110 b'phabricator', b'callsign', default=None,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'curlcmd', default=None,
114 114 )
115 115 # developer config: phabricator.repophid
116 116 eh.configitem(
117 117 b'phabricator', b'repophid', default=None,
118 118 )
119 119 eh.configitem(
120 120 b'phabricator', b'url', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabsend', b'confirm', default=False,
124 124 )
125 125
126 126 colortable = {
127 127 b'phabricator.action.created': b'green',
128 128 b'phabricator.action.skipped': b'magenta',
129 129 b'phabricator.action.updated': b'magenta',
130 130 b'phabricator.desc': b'',
131 131 b'phabricator.drev': b'bold',
132 132 b'phabricator.node': b'',
133 b'phabricator.status.abandoned': b'magenta dim',
134 b'phabricator.status.accepted': b'green bold',
135 b'phabricator.status.closed': b'green',
136 b'phabricator.status.needsreview': b'yellow',
137 b'phabricator.status.needsrevision': b'red',
138 b'phabricator.status.changesplanned': b'red',
133 139 }
134 140
135 141 _VCR_FLAGS = [
136 142 (
137 143 b'',
138 144 b'test-vcr',
139 145 b'',
140 146 _(
141 147 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
142 148 b', otherwise will mock all http requests using the specified vcr file.'
143 149 b' (ADVANCED)'
144 150 ),
145 151 ),
146 152 ]
147 153
148 154
149 155 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
150 156 fullflags = flags + _VCR_FLAGS
151 157
152 158 def hgmatcher(r1, r2):
153 159 if r1.uri != r2.uri or r1.method != r2.method:
154 160 return False
155 161 r1params = util.urlreq.parseqs(r1.body)
156 162 r2params = util.urlreq.parseqs(r2.body)
157 163 for key in r1params:
158 164 if key not in r2params:
159 165 return False
160 166 value = r1params[key][0]
161 167 # we want to compare json payloads without worrying about ordering
162 168 if value.startswith(b'{') and value.endswith(b'}'):
163 169 r1json = pycompat.json_loads(value)
164 170 r2json = pycompat.json_loads(r2params[key][0])
165 171 if r1json != r2json:
166 172 return False
167 173 elif r2params[key][0] != value:
168 174 return False
169 175 return True
170 176
171 177 def sanitiserequest(request):
172 178 request.body = re.sub(
173 179 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
174 180 )
175 181 return request
176 182
177 183 def sanitiseresponse(response):
178 184 if 'set-cookie' in response['headers']:
179 185 del response['headers']['set-cookie']
180 186 return response
181 187
182 188 def decorate(fn):
183 189 def inner(*args, **kwargs):
184 190 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
185 191 if cassette:
186 192 import hgdemandimport
187 193
188 194 with hgdemandimport.deactivated():
189 195 import vcr as vcrmod
190 196 import vcr.stubs as stubs
191 197
192 198 vcr = vcrmod.VCR(
193 199 serializer='json',
194 200 before_record_request=sanitiserequest,
195 201 before_record_response=sanitiseresponse,
196 202 custom_patches=[
197 203 (
198 204 urlmod,
199 205 'httpconnection',
200 206 stubs.VCRHTTPConnection,
201 207 ),
202 208 (
203 209 urlmod,
204 210 'httpsconnection',
205 211 stubs.VCRHTTPSConnection,
206 212 ),
207 213 ],
208 214 )
209 215 vcr.register_matcher('hgmatcher', hgmatcher)
210 216 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
211 217 return fn(*args, **kwargs)
212 218 return fn(*args, **kwargs)
213 219
214 220 inner.__name__ = fn.__name__
215 221 inner.__doc__ = fn.__doc__
216 222 return command(
217 223 name,
218 224 fullflags,
219 225 spec,
220 226 helpcategory=helpcategory,
221 227 optionalrepo=optionalrepo,
222 228 )(inner)
223 229
224 230 return decorate
225 231
226 232
227 233 def urlencodenested(params):
228 234 """like urlencode, but works with nested parameters.
229 235
230 236 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
231 237 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
232 238 urlencode. Note: the encoding is consistent with PHP's http_build_query.
233 239 """
234 240 flatparams = util.sortdict()
235 241
236 242 def process(prefix, obj):
237 243 if isinstance(obj, bool):
238 244 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
239 245 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
240 246 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
241 247 if items is None:
242 248 flatparams[prefix] = obj
243 249 else:
244 250 for k, v in items(obj):
245 251 if prefix:
246 252 process(b'%s[%s]' % (prefix, k), v)
247 253 else:
248 254 process(k, v)
249 255
250 256 process(b'', params)
251 257 return util.urlreq.urlencode(flatparams)
252 258
253 259
254 260 def readurltoken(ui):
255 261 """return conduit url, token and make sure they exist
256 262
257 263 Currently read from [auth] config section. In the future, it might
258 264 make sense to read from .arcconfig and .arcrc as well.
259 265 """
260 266 url = ui.config(b'phabricator', b'url')
261 267 if not url:
262 268 raise error.Abort(
263 269 _(b'config %s.%s is required') % (b'phabricator', b'url')
264 270 )
265 271
266 272 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
267 273 token = None
268 274
269 275 if res:
270 276 group, auth = res
271 277
272 278 ui.debug(b"using auth.%s.* for authentication\n" % group)
273 279
274 280 token = auth.get(b'phabtoken')
275 281
276 282 if not token:
277 283 raise error.Abort(
278 284 _(b'Can\'t find conduit token associated to %s') % (url,)
279 285 )
280 286
281 287 return url, token
282 288
283 289
284 290 def callconduit(ui, name, params):
285 291 """call Conduit API, params is a dict. return json.loads result, or None"""
286 292 host, token = readurltoken(ui)
287 293 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
288 294 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
289 295 params = params.copy()
290 296 params[b'__conduit__'] = {
291 297 b'token': token,
292 298 }
293 299 rawdata = {
294 300 b'params': templatefilters.json(params),
295 301 b'output': b'json',
296 302 b'__conduit__': 1,
297 303 }
298 304 data = urlencodenested(rawdata)
299 305 curlcmd = ui.config(b'phabricator', b'curlcmd')
300 306 if curlcmd:
301 307 sin, sout = procutil.popen2(
302 308 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
303 309 )
304 310 sin.write(data)
305 311 sin.close()
306 312 body = sout.read()
307 313 else:
308 314 urlopener = urlmod.opener(ui, authinfo)
309 315 request = util.urlreq.request(pycompat.strurl(url), data=data)
310 316 with contextlib.closing(urlopener.open(request)) as rsp:
311 317 body = rsp.read()
312 318 ui.debug(b'Conduit Response: %s\n' % body)
313 319 parsed = pycompat.rapply(
314 320 lambda x: encoding.unitolocal(x)
315 321 if isinstance(x, pycompat.unicode)
316 322 else x,
317 323 # json.loads only accepts bytes from py3.6+
318 324 pycompat.json_loads(encoding.unifromlocal(body)),
319 325 )
320 326 if parsed.get(b'error_code'):
321 327 msg = _(b'Conduit Error (%s): %s') % (
322 328 parsed[b'error_code'],
323 329 parsed[b'error_info'],
324 330 )
325 331 raise error.Abort(msg)
326 332 return parsed[b'result']
327 333
328 334
329 335 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
330 336 def debugcallconduit(ui, repo, name):
331 337 """call Conduit API
332 338
333 339 Call parameters are read from stdin as a JSON blob. Result will be written
334 340 to stdout as a JSON blob.
335 341 """
336 342 # json.loads only accepts bytes from 3.6+
337 343 rawparams = encoding.unifromlocal(ui.fin.read())
338 344 # json.loads only returns unicode strings
339 345 params = pycompat.rapply(
340 346 lambda x: encoding.unitolocal(x)
341 347 if isinstance(x, pycompat.unicode)
342 348 else x,
343 349 pycompat.json_loads(rawparams),
344 350 )
345 351 # json.dumps only accepts unicode strings
346 352 result = pycompat.rapply(
347 353 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
348 354 callconduit(ui, name, params),
349 355 )
350 356 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
351 357 ui.write(b'%s\n' % encoding.unitolocal(s))
352 358
353 359
354 360 def getrepophid(repo):
355 361 """given callsign, return repository PHID or None"""
356 362 # developer config: phabricator.repophid
357 363 repophid = repo.ui.config(b'phabricator', b'repophid')
358 364 if repophid:
359 365 return repophid
360 366 callsign = repo.ui.config(b'phabricator', b'callsign')
361 367 if not callsign:
362 368 return None
363 369 query = callconduit(
364 370 repo.ui,
365 371 b'diffusion.repository.search',
366 372 {b'constraints': {b'callsigns': [callsign]}},
367 373 )
368 374 if len(query[b'data']) == 0:
369 375 return None
370 376 repophid = query[b'data'][0][b'phid']
371 377 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
372 378 return repophid
373 379
374 380
375 381 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
376 382 _differentialrevisiondescre = re.compile(
377 383 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
378 384 )
379 385
380 386
381 387 def getoldnodedrevmap(repo, nodelist):
382 388 """find previous nodes that has been sent to Phabricator
383 389
384 390 return {node: (oldnode, Differential diff, Differential Revision ID)}
385 391 for node in nodelist with known previous sent versions, or associated
386 392 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
387 393 be ``None``.
388 394
389 395 Examines commit messages like "Differential Revision:" to get the
390 396 association information.
391 397
392 398 If such commit message line is not found, examines all precursors and their
393 399 tags. Tags with format like "D1234" are considered a match and the node
394 400 with that tag, and the number after "D" (ex. 1234) will be returned.
395 401
396 402 The ``old node``, if not None, is guaranteed to be the last diff of
397 403 corresponding Differential Revision, and exist in the repo.
398 404 """
399 405 unfi = repo.unfiltered()
400 406 has_node = unfi.changelog.index.has_node
401 407
402 408 result = {} # {node: (oldnode?, lastdiff?, drev)}
403 409 toconfirm = {} # {node: (force, {precnode}, drev)}
404 410 for node in nodelist:
405 411 ctx = unfi[node]
406 412 # For tags like "D123", put them into "toconfirm" to verify later
407 413 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
408 414 for n in precnodes:
409 415 if has_node(n):
410 416 for tag in unfi.nodetags(n):
411 417 m = _differentialrevisiontagre.match(tag)
412 418 if m:
413 419 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
414 420 break
415 421 else:
416 422 continue # move to next predecessor
417 423 break # found a tag, stop
418 424 else:
419 425 # Check commit message
420 426 m = _differentialrevisiondescre.search(ctx.description())
421 427 if m:
422 428 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
423 429
424 430 # Double check if tags are genuine by collecting all old nodes from
425 431 # Phabricator, and expect precursors overlap with it.
426 432 if toconfirm:
427 433 drevs = [drev for force, precs, drev in toconfirm.values()]
428 434 alldiffs = callconduit(
429 435 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
430 436 )
431 437 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
432 438 for newnode, (force, precset, drev) in toconfirm.items():
433 439 diffs = [
434 440 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
435 441 ]
436 442
437 443 # "precursors" as known by Phabricator
438 444 phprecset = set(getnode(d) for d in diffs)
439 445
440 446 # Ignore if precursors (Phabricator and local repo) do not overlap,
441 447 # and force is not set (when commit message says nothing)
442 448 if not force and not bool(phprecset & precset):
443 449 tagname = b'D%d' % drev
444 450 tags.tag(
445 451 repo,
446 452 tagname,
447 453 nullid,
448 454 message=None,
449 455 user=None,
450 456 date=None,
451 457 local=True,
452 458 )
453 459 unfi.ui.warn(
454 460 _(
455 461 b'D%d: local tag removed - does not match '
456 462 b'Differential history\n'
457 463 )
458 464 % drev
459 465 )
460 466 continue
461 467
462 468 # Find the last node using Phabricator metadata, and make sure it
463 469 # exists in the repo
464 470 oldnode = lastdiff = None
465 471 if diffs:
466 472 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
467 473 oldnode = getnode(lastdiff)
468 474 if oldnode and not has_node(oldnode):
469 475 oldnode = None
470 476
471 477 result[newnode] = (oldnode, lastdiff, drev)
472 478
473 479 return result
474 480
475 481
476 482 def getdrevmap(repo, revs):
477 483 """Return a dict mapping each rev in `revs` to their Differential Revision
478 484 ID or None.
479 485 """
480 486 result = {}
481 487 for rev in revs:
482 488 result[rev] = None
483 489 ctx = repo[rev]
484 490 # Check commit message
485 491 m = _differentialrevisiondescre.search(ctx.description())
486 492 if m:
487 493 result[rev] = int(m.group('id'))
488 494 continue
489 495 # Check tags
490 496 for tag in repo.nodetags(ctx.node()):
491 497 m = _differentialrevisiontagre.match(tag)
492 498 if m:
493 499 result[rev] = int(m.group(1))
494 500 break
495 501
496 502 return result
497 503
498 504
499 505 def getdiff(ctx, diffopts):
500 506 """plain-text diff without header (user, commit message, etc)"""
501 507 output = util.stringio()
502 508 for chunk, _label in patch.diffui(
503 509 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
504 510 ):
505 511 output.write(chunk)
506 512 return output.getvalue()
507 513
508 514
509 515 class DiffChangeType(object):
510 516 ADD = 1
511 517 CHANGE = 2
512 518 DELETE = 3
513 519 MOVE_AWAY = 4
514 520 COPY_AWAY = 5
515 521 MOVE_HERE = 6
516 522 COPY_HERE = 7
517 523 MULTICOPY = 8
518 524
519 525
520 526 class DiffFileType(object):
521 527 TEXT = 1
522 528 IMAGE = 2
523 529 BINARY = 3
524 530
525 531
526 532 @attr.s
527 533 class phabhunk(dict):
528 534 """Represents a Differential hunk, which is owned by a Differential change
529 535 """
530 536
531 537 oldOffset = attr.ib(default=0) # camelcase-required
532 538 oldLength = attr.ib(default=0) # camelcase-required
533 539 newOffset = attr.ib(default=0) # camelcase-required
534 540 newLength = attr.ib(default=0) # camelcase-required
535 541 corpus = attr.ib(default='')
536 542 # These get added to the phabchange's equivalents
537 543 addLines = attr.ib(default=0) # camelcase-required
538 544 delLines = attr.ib(default=0) # camelcase-required
539 545
540 546
541 547 @attr.s
542 548 class phabchange(object):
543 549 """Represents a Differential change, owns Differential hunks and owned by a
544 550 Differential diff. Each one represents one file in a diff.
545 551 """
546 552
547 553 currentPath = attr.ib(default=None) # camelcase-required
548 554 oldPath = attr.ib(default=None) # camelcase-required
549 555 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
550 556 metadata = attr.ib(default=attr.Factory(dict))
551 557 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 558 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
553 559 type = attr.ib(default=DiffChangeType.CHANGE)
554 560 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
555 561 commitHash = attr.ib(default=None) # camelcase-required
556 562 addLines = attr.ib(default=0) # camelcase-required
557 563 delLines = attr.ib(default=0) # camelcase-required
558 564 hunks = attr.ib(default=attr.Factory(list))
559 565
560 566 def copynewmetadatatoold(self):
561 567 for key in list(self.metadata.keys()):
562 568 newkey = key.replace(b'new:', b'old:')
563 569 self.metadata[newkey] = self.metadata[key]
564 570
565 571 def addoldmode(self, value):
566 572 self.oldProperties[b'unix:filemode'] = value
567 573
568 574 def addnewmode(self, value):
569 575 self.newProperties[b'unix:filemode'] = value
570 576
571 577 def addhunk(self, hunk):
572 578 if not isinstance(hunk, phabhunk):
573 579 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
574 580 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
575 581 # It's useful to include these stats since the Phab web UI shows them,
576 582 # and uses them to estimate how large a change a Revision is. Also used
577 583 # in email subjects for the [+++--] bit.
578 584 self.addLines += hunk.addLines
579 585 self.delLines += hunk.delLines
580 586
581 587
582 588 @attr.s
583 589 class phabdiff(object):
584 590 """Represents a Differential diff, owns Differential changes. Corresponds
585 591 to a commit.
586 592 """
587 593
588 594 # Doesn't seem to be any reason to send this (output of uname -n)
589 595 sourceMachine = attr.ib(default=b'') # camelcase-required
590 596 sourcePath = attr.ib(default=b'/') # camelcase-required
591 597 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
592 598 sourceControlPath = attr.ib(default=b'/') # camelcase-required
593 599 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
594 600 branch = attr.ib(default=b'default')
595 601 bookmark = attr.ib(default=None)
596 602 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
597 603 lintStatus = attr.ib(default=b'none') # camelcase-required
598 604 unitStatus = attr.ib(default=b'none') # camelcase-required
599 605 changes = attr.ib(default=attr.Factory(dict))
600 606 repositoryPHID = attr.ib(default=None) # camelcase-required
601 607
602 608 def addchange(self, change):
603 609 if not isinstance(change, phabchange):
604 610 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
605 611 self.changes[change.currentPath] = pycompat.byteskwargs(
606 612 attr.asdict(change)
607 613 )
608 614
609 615
610 616 def maketext(pchange, ctx, fname):
611 617 """populate the phabchange for a text file"""
612 618 repo = ctx.repo()
613 619 fmatcher = match.exact([fname])
614 620 diffopts = mdiff.diffopts(git=True, context=32767)
615 621 _pfctx, _fctx, header, fhunks = next(
616 622 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
617 623 )
618 624
619 625 for fhunk in fhunks:
620 626 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
621 627 corpus = b''.join(lines[1:])
622 628 shunk = list(header)
623 629 shunk.extend(lines)
624 630 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
625 631 patch.diffstatdata(util.iterlines(shunk))
626 632 )
627 633 pchange.addhunk(
628 634 phabhunk(
629 635 oldOffset,
630 636 oldLength,
631 637 newOffset,
632 638 newLength,
633 639 corpus,
634 640 addLines,
635 641 delLines,
636 642 )
637 643 )
638 644
639 645
640 646 def uploadchunks(fctx, fphid):
641 647 """upload large binary files as separate chunks.
642 648 Phab requests chunking over 8MiB, and splits into 4MiB chunks
643 649 """
644 650 ui = fctx.repo().ui
645 651 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
646 652 with ui.makeprogress(
647 653 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
648 654 ) as progress:
649 655 for chunk in chunks:
650 656 progress.increment()
651 657 if chunk[b'complete']:
652 658 continue
653 659 bstart = int(chunk[b'byteStart'])
654 660 bend = int(chunk[b'byteEnd'])
655 661 callconduit(
656 662 ui,
657 663 b'file.uploadchunk',
658 664 {
659 665 b'filePHID': fphid,
660 666 b'byteStart': bstart,
661 667 b'data': base64.b64encode(fctx.data()[bstart:bend]),
662 668 b'dataEncoding': b'base64',
663 669 },
664 670 )
665 671
666 672
667 673 def uploadfile(fctx):
668 674 """upload binary files to Phabricator"""
669 675 repo = fctx.repo()
670 676 ui = repo.ui
671 677 fname = fctx.path()
672 678 size = fctx.size()
673 679 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
674 680
675 681 # an allocate call is required first to see if an upload is even required
676 682 # (Phab might already have it) and to determine if chunking is needed
677 683 allocateparams = {
678 684 b'name': fname,
679 685 b'contentLength': size,
680 686 b'contentHash': fhash,
681 687 }
682 688 filealloc = callconduit(ui, b'file.allocate', allocateparams)
683 689 fphid = filealloc[b'filePHID']
684 690
685 691 if filealloc[b'upload']:
686 692 ui.write(_(b'uploading %s\n') % bytes(fctx))
687 693 if not fphid:
688 694 uploadparams = {
689 695 b'name': fname,
690 696 b'data_base64': base64.b64encode(fctx.data()),
691 697 }
692 698 fphid = callconduit(ui, b'file.upload', uploadparams)
693 699 else:
694 700 uploadchunks(fctx, fphid)
695 701 else:
696 702 ui.debug(b'server already has %s\n' % bytes(fctx))
697 703
698 704 if not fphid:
699 705 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
700 706
701 707 return fphid
702 708
703 709
704 710 def addoldbinary(pchange, fctx, originalfname):
705 711 """add the metadata for the previous version of a binary file to the
706 712 phabchange for the new version
707 713 """
708 714 oldfctx = fctx.p1()[originalfname]
709 715 if fctx.cmp(oldfctx):
710 716 # Files differ, add the old one
711 717 pchange.metadata[b'old:file:size'] = oldfctx.size()
712 718 mimeguess, _enc = mimetypes.guess_type(
713 719 encoding.unifromlocal(oldfctx.path())
714 720 )
715 721 if mimeguess:
716 722 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
717 723 mimeguess
718 724 )
719 725 fphid = uploadfile(oldfctx)
720 726 pchange.metadata[b'old:binary-phid'] = fphid
721 727 else:
722 728 # If it's left as IMAGE/BINARY web UI might try to display it
723 729 pchange.fileType = DiffFileType.TEXT
724 730 pchange.copynewmetadatatoold()
725 731
726 732
727 733 def makebinary(pchange, fctx):
728 734 """populate the phabchange for a binary file"""
729 735 pchange.fileType = DiffFileType.BINARY
730 736 fphid = uploadfile(fctx)
731 737 pchange.metadata[b'new:binary-phid'] = fphid
732 738 pchange.metadata[b'new:file:size'] = fctx.size()
733 739 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
734 740 if mimeguess:
735 741 mimeguess = pycompat.bytestr(mimeguess)
736 742 pchange.metadata[b'new:file:mime-type'] = mimeguess
737 743 if mimeguess.startswith(b'image/'):
738 744 pchange.fileType = DiffFileType.IMAGE
739 745
740 746
741 747 # Copied from mercurial/patch.py
742 748 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
743 749
744 750
745 751 def notutf8(fctx):
746 752 """detect non-UTF-8 text files since Phabricator requires them to be marked
747 753 as binary
748 754 """
749 755 try:
750 756 fctx.data().decode('utf-8')
751 757 if fctx.parents():
752 758 fctx.p1().data().decode('utf-8')
753 759 return False
754 760 except UnicodeDecodeError:
755 761 fctx.repo().ui.write(
756 762 _(b'file %s detected as non-UTF-8, marked as binary\n')
757 763 % fctx.path()
758 764 )
759 765 return True
760 766
761 767
762 768 def addremoved(pdiff, ctx, removed):
763 769 """add removed files to the phabdiff. Shouldn't include moves"""
764 770 for fname in removed:
765 771 pchange = phabchange(
766 772 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
767 773 )
768 774 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
769 775 fctx = ctx.p1()[fname]
770 776 if not (fctx.isbinary() or notutf8(fctx)):
771 777 maketext(pchange, ctx, fname)
772 778
773 779 pdiff.addchange(pchange)
774 780
775 781
776 782 def addmodified(pdiff, ctx, modified):
777 783 """add modified files to the phabdiff"""
778 784 for fname in modified:
779 785 fctx = ctx[fname]
780 786 pchange = phabchange(currentPath=fname, oldPath=fname)
781 787 filemode = gitmode[ctx[fname].flags()]
782 788 originalmode = gitmode[ctx.p1()[fname].flags()]
783 789 if filemode != originalmode:
784 790 pchange.addoldmode(originalmode)
785 791 pchange.addnewmode(filemode)
786 792
787 793 if fctx.isbinary() or notutf8(fctx):
788 794 makebinary(pchange, fctx)
789 795 addoldbinary(pchange, fctx, fname)
790 796 else:
791 797 maketext(pchange, ctx, fname)
792 798
793 799 pdiff.addchange(pchange)
794 800
795 801
796 802 def addadded(pdiff, ctx, added, removed):
797 803 """add file adds to the phabdiff, both new files and copies/moves"""
798 804 # Keep track of files that've been recorded as moved/copied, so if there are
799 805 # additional copies we can mark them (moves get removed from removed)
800 806 copiedchanges = {}
801 807 movedchanges = {}
802 808 for fname in added:
803 809 fctx = ctx[fname]
804 810 pchange = phabchange(currentPath=fname)
805 811
806 812 filemode = gitmode[ctx[fname].flags()]
807 813 renamed = fctx.renamed()
808 814
809 815 if renamed:
810 816 originalfname = renamed[0]
811 817 originalmode = gitmode[ctx.p1()[originalfname].flags()]
812 818 pchange.oldPath = originalfname
813 819
814 820 if originalfname in removed:
815 821 origpchange = phabchange(
816 822 currentPath=originalfname,
817 823 oldPath=originalfname,
818 824 type=DiffChangeType.MOVE_AWAY,
819 825 awayPaths=[fname],
820 826 )
821 827 movedchanges[originalfname] = origpchange
822 828 removed.remove(originalfname)
823 829 pchange.type = DiffChangeType.MOVE_HERE
824 830 elif originalfname in movedchanges:
825 831 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
826 832 movedchanges[originalfname].awayPaths.append(fname)
827 833 pchange.type = DiffChangeType.COPY_HERE
828 834 else: # pure copy
829 835 if originalfname not in copiedchanges:
830 836 origpchange = phabchange(
831 837 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
832 838 )
833 839 copiedchanges[originalfname] = origpchange
834 840 else:
835 841 origpchange = copiedchanges[originalfname]
836 842 origpchange.awayPaths.append(fname)
837 843 pchange.type = DiffChangeType.COPY_HERE
838 844
839 845 if filemode != originalmode:
840 846 pchange.addoldmode(originalmode)
841 847 pchange.addnewmode(filemode)
842 848 else: # Brand-new file
843 849 pchange.addnewmode(gitmode[fctx.flags()])
844 850 pchange.type = DiffChangeType.ADD
845 851
846 852 if fctx.isbinary() or notutf8(fctx):
847 853 makebinary(pchange, fctx)
848 854 if renamed:
849 855 addoldbinary(pchange, fctx, originalfname)
850 856 else:
851 857 maketext(pchange, ctx, fname)
852 858
853 859 pdiff.addchange(pchange)
854 860
855 861 for _path, copiedchange in copiedchanges.items():
856 862 pdiff.addchange(copiedchange)
857 863 for _path, movedchange in movedchanges.items():
858 864 pdiff.addchange(movedchange)
859 865
860 866
861 867 def creatediff(ctx):
862 868 """create a Differential Diff"""
863 869 repo = ctx.repo()
864 870 repophid = getrepophid(repo)
865 871 # Create a "Differential Diff" via "differential.creatediff" API
866 872 pdiff = phabdiff(
867 873 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
868 874 branch=b'%s' % ctx.branch(),
869 875 )
870 876 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
871 877 # addadded will remove moved files from removed, so addremoved won't get
872 878 # them
873 879 addadded(pdiff, ctx, added, removed)
874 880 addmodified(pdiff, ctx, modified)
875 881 addremoved(pdiff, ctx, removed)
876 882 if repophid:
877 883 pdiff.repositoryPHID = repophid
878 884 diff = callconduit(
879 885 repo.ui,
880 886 b'differential.creatediff',
881 887 pycompat.byteskwargs(attr.asdict(pdiff)),
882 888 )
883 889 if not diff:
884 890 raise error.Abort(_(b'cannot create diff for %s') % ctx)
885 891 return diff
886 892
887 893
888 894 def writediffproperties(ctx, diff):
889 895 """write metadata to diff so patches could be applied losslessly"""
890 896 # creatediff returns with a diffid but query returns with an id
891 897 diffid = diff.get(b'diffid', diff.get(b'id'))
892 898 params = {
893 899 b'diff_id': diffid,
894 900 b'name': b'hg:meta',
895 901 b'data': templatefilters.json(
896 902 {
897 903 b'user': ctx.user(),
898 904 b'date': b'%d %d' % ctx.date(),
899 905 b'branch': ctx.branch(),
900 906 b'node': ctx.hex(),
901 907 b'parent': ctx.p1().hex(),
902 908 }
903 909 ),
904 910 }
905 911 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
906 912
907 913 params = {
908 914 b'diff_id': diffid,
909 915 b'name': b'local:commits',
910 916 b'data': templatefilters.json(
911 917 {
912 918 ctx.hex(): {
913 919 b'author': stringutil.person(ctx.user()),
914 920 b'authorEmail': stringutil.email(ctx.user()),
915 921 b'time': int(ctx.date()[0]),
916 922 b'commit': ctx.hex(),
917 923 b'parents': [ctx.p1().hex()],
918 924 b'branch': ctx.branch(),
919 925 },
920 926 }
921 927 ),
922 928 }
923 929 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
924 930
925 931
926 932 def createdifferentialrevision(
927 933 ctx,
928 934 revid=None,
929 935 parentrevphid=None,
930 936 oldnode=None,
931 937 olddiff=None,
932 938 actions=None,
933 939 comment=None,
934 940 ):
935 941 """create or update a Differential Revision
936 942
937 943 If revid is None, create a new Differential Revision, otherwise update
938 944 revid. If parentrevphid is not None, set it as a dependency.
939 945
940 946 If oldnode is not None, check if the patch content (without commit message
941 947 and metadata) has changed before creating another diff.
942 948
943 949 If actions is not None, they will be appended to the transaction.
944 950 """
945 951 repo = ctx.repo()
946 952 if oldnode:
947 953 diffopts = mdiff.diffopts(git=True, context=32767)
948 954 oldctx = repo.unfiltered()[oldnode]
949 955 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
950 956 else:
951 957 neednewdiff = True
952 958
953 959 transactions = []
954 960 if neednewdiff:
955 961 diff = creatediff(ctx)
956 962 transactions.append({b'type': b'update', b'value': diff[b'phid']})
957 963 if comment:
958 964 transactions.append({b'type': b'comment', b'value': comment})
959 965 else:
960 966 # Even if we don't need to upload a new diff because the patch content
961 967 # does not change. We might still need to update its metadata so
962 968 # pushers could know the correct node metadata.
963 969 assert olddiff
964 970 diff = olddiff
965 971 writediffproperties(ctx, diff)
966 972
967 973 # Set the parent Revision every time, so commit re-ordering is picked-up
968 974 if parentrevphid:
969 975 transactions.append(
970 976 {b'type': b'parents.set', b'value': [parentrevphid]}
971 977 )
972 978
973 979 if actions:
974 980 transactions += actions
975 981
976 982 # Parse commit message and update related fields.
977 983 desc = ctx.description()
978 984 info = callconduit(
979 985 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
980 986 )
981 987 for k, v in info[b'fields'].items():
982 988 if k in [b'title', b'summary', b'testPlan']:
983 989 transactions.append({b'type': k, b'value': v})
984 990
985 991 params = {b'transactions': transactions}
986 992 if revid is not None:
987 993 # Update an existing Differential Revision
988 994 params[b'objectIdentifier'] = revid
989 995
990 996 revision = callconduit(repo.ui, b'differential.revision.edit', params)
991 997 if not revision:
992 998 raise error.Abort(_(b'cannot create revision for %s') % ctx)
993 999
994 1000 return revision, diff
995 1001
996 1002
997 1003 def userphids(repo, names):
998 1004 """convert user names to PHIDs"""
999 1005 names = [name.lower() for name in names]
1000 1006 query = {b'constraints': {b'usernames': names}}
1001 1007 result = callconduit(repo.ui, b'user.search', query)
1002 1008 # username not found is not an error of the API. So check if we have missed
1003 1009 # some names here.
1004 1010 data = result[b'data']
1005 1011 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1006 1012 unresolved = set(names) - resolved
1007 1013 if unresolved:
1008 1014 raise error.Abort(
1009 1015 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1010 1016 )
1011 1017 return [entry[b'phid'] for entry in data]
1012 1018
1013 1019
1014 1020 @vcrcommand(
1015 1021 b'phabsend',
1016 1022 [
1017 1023 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1018 1024 (b'', b'amend', True, _(b'update commit messages')),
1019 1025 (b'', b'reviewer', [], _(b'specify reviewers')),
1020 1026 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1021 1027 (
1022 1028 b'm',
1023 1029 b'comment',
1024 1030 b'',
1025 1031 _(b'add a comment to Revisions with new/updated Diffs'),
1026 1032 ),
1027 1033 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1028 1034 ],
1029 1035 _(b'REV [OPTIONS]'),
1030 1036 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1031 1037 )
1032 1038 def phabsend(ui, repo, *revs, **opts):
1033 1039 """upload changesets to Phabricator
1034 1040
1035 1041 If there are multiple revisions specified, they will be send as a stack
1036 1042 with a linear dependencies relationship using the order specified by the
1037 1043 revset.
1038 1044
1039 1045 For the first time uploading changesets, local tags will be created to
1040 1046 maintain the association. After the first time, phabsend will check
1041 1047 obsstore and tags information so it can figure out whether to update an
1042 1048 existing Differential Revision, or create a new one.
1043 1049
1044 1050 If --amend is set, update commit messages so they have the
1045 1051 ``Differential Revision`` URL, remove related tags. This is similar to what
1046 1052 arcanist will do, and is more desired in author-push workflows. Otherwise,
1047 1053 use local tags to record the ``Differential Revision`` association.
1048 1054
1049 1055 The --confirm option lets you confirm changesets before sending them. You
1050 1056 can also add following to your configuration file to make it default
1051 1057 behaviour::
1052 1058
1053 1059 [phabsend]
1054 1060 confirm = true
1055 1061
1056 1062 phabsend will check obsstore and the above association to decide whether to
1057 1063 update an existing Differential Revision, or create a new one.
1058 1064 """
1059 1065 opts = pycompat.byteskwargs(opts)
1060 1066 revs = list(revs) + opts.get(b'rev', [])
1061 1067 revs = scmutil.revrange(repo, revs)
1062 1068
1063 1069 if not revs:
1064 1070 raise error.Abort(_(b'phabsend requires at least one changeset'))
1065 1071 if opts.get(b'amend'):
1066 1072 cmdutil.checkunfinished(repo)
1067 1073
1068 1074 # {newnode: (oldnode, olddiff, olddrev}
1069 1075 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1070 1076
1071 1077 confirm = ui.configbool(b'phabsend', b'confirm')
1072 1078 confirm |= bool(opts.get(b'confirm'))
1073 1079 if confirm:
1074 1080 confirmed = _confirmbeforesend(repo, revs, oldmap)
1075 1081 if not confirmed:
1076 1082 raise error.Abort(_(b'phabsend cancelled'))
1077 1083
1078 1084 actions = []
1079 1085 reviewers = opts.get(b'reviewer', [])
1080 1086 blockers = opts.get(b'blocker', [])
1081 1087 phids = []
1082 1088 if reviewers:
1083 1089 phids.extend(userphids(repo, reviewers))
1084 1090 if blockers:
1085 1091 phids.extend(
1086 1092 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1087 1093 )
1088 1094 if phids:
1089 1095 actions.append({b'type': b'reviewers.add', b'value': phids})
1090 1096
1091 1097 drevids = [] # [int]
1092 1098 diffmap = {} # {newnode: diff}
1093 1099
1094 1100 # Send patches one by one so we know their Differential Revision PHIDs and
1095 1101 # can provide dependency relationship
1096 1102 lastrevphid = None
1097 1103 for rev in revs:
1098 1104 ui.debug(b'sending rev %d\n' % rev)
1099 1105 ctx = repo[rev]
1100 1106
1101 1107 # Get Differential Revision ID
1102 1108 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1103 1109 if oldnode != ctx.node() or opts.get(b'amend'):
1104 1110 # Create or update Differential Revision
1105 1111 revision, diff = createdifferentialrevision(
1106 1112 ctx,
1107 1113 revid,
1108 1114 lastrevphid,
1109 1115 oldnode,
1110 1116 olddiff,
1111 1117 actions,
1112 1118 opts.get(b'comment'),
1113 1119 )
1114 1120 diffmap[ctx.node()] = diff
1115 1121 newrevid = int(revision[b'object'][b'id'])
1116 1122 newrevphid = revision[b'object'][b'phid']
1117 1123 if revid:
1118 1124 action = b'updated'
1119 1125 else:
1120 1126 action = b'created'
1121 1127
1122 1128 # Create a local tag to note the association, if commit message
1123 1129 # does not have it already
1124 1130 m = _differentialrevisiondescre.search(ctx.description())
1125 1131 if not m or int(m.group('id')) != newrevid:
1126 1132 tagname = b'D%d' % newrevid
1127 1133 tags.tag(
1128 1134 repo,
1129 1135 tagname,
1130 1136 ctx.node(),
1131 1137 message=None,
1132 1138 user=None,
1133 1139 date=None,
1134 1140 local=True,
1135 1141 )
1136 1142 else:
1137 1143 # Nothing changed. But still set "newrevphid" so the next revision
1138 1144 # could depend on this one and "newrevid" for the summary line.
1139 1145 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1140 1146 newrevid = revid
1141 1147 action = b'skipped'
1142 1148
1143 1149 actiondesc = ui.label(
1144 1150 {
1145 1151 b'created': _(b'created'),
1146 1152 b'skipped': _(b'skipped'),
1147 1153 b'updated': _(b'updated'),
1148 1154 }[action],
1149 1155 b'phabricator.action.%s' % action,
1150 1156 )
1151 1157 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1152 1158 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1153 1159 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1154 1160 ui.write(
1155 1161 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1156 1162 )
1157 1163 drevids.append(newrevid)
1158 1164 lastrevphid = newrevphid
1159 1165
1160 1166 # Update commit messages and remove tags
1161 1167 if opts.get(b'amend'):
1162 1168 unfi = repo.unfiltered()
1163 1169 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1164 1170 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1165 1171 wnode = unfi[b'.'].node()
1166 1172 mapping = {} # {oldnode: [newnode]}
1167 1173 for i, rev in enumerate(revs):
1168 1174 old = unfi[rev]
1169 1175 drevid = drevids[i]
1170 1176 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1171 1177 newdesc = getdescfromdrev(drev)
1172 1178 # Make sure commit message contain "Differential Revision"
1173 1179 if old.description() != newdesc:
1174 1180 if old.phase() == phases.public:
1175 1181 ui.warn(
1176 1182 _(b"warning: not updating public commit %s\n")
1177 1183 % scmutil.formatchangeid(old)
1178 1184 )
1179 1185 continue
1180 1186 parents = [
1181 1187 mapping.get(old.p1().node(), (old.p1(),))[0],
1182 1188 mapping.get(old.p2().node(), (old.p2(),))[0],
1183 1189 ]
1184 1190 new = context.metadataonlyctx(
1185 1191 repo,
1186 1192 old,
1187 1193 parents=parents,
1188 1194 text=newdesc,
1189 1195 user=old.user(),
1190 1196 date=old.date(),
1191 1197 extra=old.extra(),
1192 1198 )
1193 1199
1194 1200 newnode = new.commit()
1195 1201
1196 1202 mapping[old.node()] = [newnode]
1197 1203 # Update diff property
1198 1204 # If it fails just warn and keep going, otherwise the DREV
1199 1205 # associations will be lost
1200 1206 try:
1201 1207 writediffproperties(unfi[newnode], diffmap[old.node()])
1202 1208 except util.urlerr.urlerror:
1203 1209 ui.warnnoi18n(
1204 1210 b'Failed to update metadata for D%d\n' % drevid
1205 1211 )
1206 1212 # Remove local tags since it's no longer necessary
1207 1213 tagname = b'D%d' % drevid
1208 1214 if tagname in repo.tags():
1209 1215 tags.tag(
1210 1216 repo,
1211 1217 tagname,
1212 1218 nullid,
1213 1219 message=None,
1214 1220 user=None,
1215 1221 date=None,
1216 1222 local=True,
1217 1223 )
1218 1224 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1219 1225 if wnode in mapping:
1220 1226 unfi.setparents(mapping[wnode][0])
1221 1227
1222 1228
1223 1229 # Map from "hg:meta" keys to header understood by "hg import". The order is
1224 1230 # consistent with "hg export" output.
1225 1231 _metanamemap = util.sortdict(
1226 1232 [
1227 1233 (b'user', b'User'),
1228 1234 (b'date', b'Date'),
1229 1235 (b'branch', b'Branch'),
1230 1236 (b'node', b'Node ID'),
1231 1237 (b'parent', b'Parent '),
1232 1238 ]
1233 1239 )
1234 1240
1235 1241
1236 1242 def _confirmbeforesend(repo, revs, oldmap):
1237 1243 url, token = readurltoken(repo.ui)
1238 1244 ui = repo.ui
1239 1245 for rev in revs:
1240 1246 ctx = repo[rev]
1241 1247 desc = ctx.description().splitlines()[0]
1242 1248 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1243 1249 if drevid:
1244 1250 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1245 1251 else:
1246 1252 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1247 1253
1248 1254 ui.write(
1249 1255 _(b'%s - %s: %s\n')
1250 1256 % (
1251 1257 drevdesc,
1252 1258 ui.label(bytes(ctx), b'phabricator.node'),
1253 1259 ui.label(desc, b'phabricator.desc'),
1254 1260 )
1255 1261 )
1256 1262
1257 1263 if ui.promptchoice(
1258 1264 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1259 1265 ):
1260 1266 return False
1261 1267
1262 1268 return True
1263 1269
1264 1270
1265 1271 _knownstatusnames = {
1266 1272 b'accepted',
1267 1273 b'needsreview',
1268 1274 b'needsrevision',
1269 1275 b'closed',
1270 1276 b'abandoned',
1271 1277 b'changesplanned',
1272 1278 }
1273 1279
1274 1280
1275 1281 def _getstatusname(drev):
1276 1282 """get normalized status name from a Differential Revision"""
1277 1283 return drev[b'statusName'].replace(b' ', b'').lower()
1278 1284
1279 1285
1280 1286 # Small language to specify differential revisions. Support symbols: (), :X,
1281 1287 # +, and -.
1282 1288
1283 1289 _elements = {
1284 1290 # token-type: binding-strength, primary, prefix, infix, suffix
1285 1291 b'(': (12, None, (b'group', 1, b')'), None, None),
1286 1292 b':': (8, None, (b'ancestors', 8), None, None),
1287 1293 b'&': (5, None, None, (b'and_', 5), None),
1288 1294 b'+': (4, None, None, (b'add', 4), None),
1289 1295 b'-': (4, None, None, (b'sub', 4), None),
1290 1296 b')': (0, None, None, None, None),
1291 1297 b'symbol': (0, b'symbol', None, None, None),
1292 1298 b'end': (0, None, None, None, None),
1293 1299 }
1294 1300
1295 1301
1296 1302 def _tokenize(text):
1297 1303 view = memoryview(text) # zero-copy slice
1298 1304 special = b'():+-& '
1299 1305 pos = 0
1300 1306 length = len(text)
1301 1307 while pos < length:
1302 1308 symbol = b''.join(
1303 1309 itertools.takewhile(
1304 1310 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1305 1311 )
1306 1312 )
1307 1313 if symbol:
1308 1314 yield (b'symbol', symbol, pos)
1309 1315 pos += len(symbol)
1310 1316 else: # special char, ignore space
1311 1317 if text[pos : pos + 1] != b' ':
1312 1318 yield (text[pos : pos + 1], None, pos)
1313 1319 pos += 1
1314 1320 yield (b'end', None, pos)
1315 1321
1316 1322
1317 1323 def _parse(text):
1318 1324 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1319 1325 if pos != len(text):
1320 1326 raise error.ParseError(b'invalid token', pos)
1321 1327 return tree
1322 1328
1323 1329
1324 1330 def _parsedrev(symbol):
1325 1331 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1326 1332 if symbol.startswith(b'D') and symbol[1:].isdigit():
1327 1333 return int(symbol[1:])
1328 1334 if symbol.isdigit():
1329 1335 return int(symbol)
1330 1336
1331 1337
1332 1338 def _prefetchdrevs(tree):
1333 1339 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1334 1340 drevs = set()
1335 1341 ancestordrevs = set()
1336 1342 op = tree[0]
1337 1343 if op == b'symbol':
1338 1344 r = _parsedrev(tree[1])
1339 1345 if r:
1340 1346 drevs.add(r)
1341 1347 elif op == b'ancestors':
1342 1348 r, a = _prefetchdrevs(tree[1])
1343 1349 drevs.update(r)
1344 1350 ancestordrevs.update(r)
1345 1351 ancestordrevs.update(a)
1346 1352 else:
1347 1353 for t in tree[1:]:
1348 1354 r, a = _prefetchdrevs(t)
1349 1355 drevs.update(r)
1350 1356 ancestordrevs.update(a)
1351 1357 return drevs, ancestordrevs
1352 1358
1353 1359
1354 1360 def querydrev(repo, spec):
1355 1361 """return a list of "Differential Revision" dicts
1356 1362
1357 1363 spec is a string using a simple query language, see docstring in phabread
1358 1364 for details.
1359 1365
1360 1366 A "Differential Revision dict" looks like:
1361 1367
1362 1368 {
1363 1369 "id": "2",
1364 1370 "phid": "PHID-DREV-672qvysjcczopag46qty",
1365 1371 "title": "example",
1366 1372 "uri": "https://phab.example.com/D2",
1367 1373 "dateCreated": "1499181406",
1368 1374 "dateModified": "1499182103",
1369 1375 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1370 1376 "status": "0",
1371 1377 "statusName": "Needs Review",
1372 1378 "properties": [],
1373 1379 "branch": null,
1374 1380 "summary": "",
1375 1381 "testPlan": "",
1376 1382 "lineCount": "2",
1377 1383 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1378 1384 "diffs": [
1379 1385 "3",
1380 1386 "4",
1381 1387 ],
1382 1388 "commits": [],
1383 1389 "reviewers": [],
1384 1390 "ccs": [],
1385 1391 "hashes": [],
1386 1392 "auxiliary": {
1387 1393 "phabricator:projects": [],
1388 1394 "phabricator:depends-on": [
1389 1395 "PHID-DREV-gbapp366kutjebt7agcd"
1390 1396 ]
1391 1397 },
1392 1398 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1393 1399 "sourcePath": null
1394 1400 }
1395 1401 """
1396 1402
1397 1403 def fetch(params):
1398 1404 """params -> single drev or None"""
1399 1405 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1400 1406 if key in prefetched:
1401 1407 return prefetched[key]
1402 1408 drevs = callconduit(repo.ui, b'differential.query', params)
1403 1409 # Fill prefetched with the result
1404 1410 for drev in drevs:
1405 1411 prefetched[drev[b'phid']] = drev
1406 1412 prefetched[int(drev[b'id'])] = drev
1407 1413 if key not in prefetched:
1408 1414 raise error.Abort(
1409 1415 _(b'cannot get Differential Revision %r') % params
1410 1416 )
1411 1417 return prefetched[key]
1412 1418
1413 1419 def getstack(topdrevids):
1414 1420 """given a top, get a stack from the bottom, [id] -> [id]"""
1415 1421 visited = set()
1416 1422 result = []
1417 1423 queue = [{b'ids': [i]} for i in topdrevids]
1418 1424 while queue:
1419 1425 params = queue.pop()
1420 1426 drev = fetch(params)
1421 1427 if drev[b'id'] in visited:
1422 1428 continue
1423 1429 visited.add(drev[b'id'])
1424 1430 result.append(int(drev[b'id']))
1425 1431 auxiliary = drev.get(b'auxiliary', {})
1426 1432 depends = auxiliary.get(b'phabricator:depends-on', [])
1427 1433 for phid in depends:
1428 1434 queue.append({b'phids': [phid]})
1429 1435 result.reverse()
1430 1436 return smartset.baseset(result)
1431 1437
1432 1438 # Initialize prefetch cache
1433 1439 prefetched = {} # {id or phid: drev}
1434 1440
1435 1441 tree = _parse(spec)
1436 1442 drevs, ancestordrevs = _prefetchdrevs(tree)
1437 1443
1438 1444 # developer config: phabricator.batchsize
1439 1445 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1440 1446
1441 1447 # Prefetch Differential Revisions in batch
1442 1448 tofetch = set(drevs)
1443 1449 for r in ancestordrevs:
1444 1450 tofetch.update(range(max(1, r - batchsize), r + 1))
1445 1451 if drevs:
1446 1452 fetch({b'ids': list(tofetch)})
1447 1453 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1448 1454
1449 1455 # Walk through the tree, return smartsets
1450 1456 def walk(tree):
1451 1457 op = tree[0]
1452 1458 if op == b'symbol':
1453 1459 drev = _parsedrev(tree[1])
1454 1460 if drev:
1455 1461 return smartset.baseset([drev])
1456 1462 elif tree[1] in _knownstatusnames:
1457 1463 drevs = [
1458 1464 r
1459 1465 for r in validids
1460 1466 if _getstatusname(prefetched[r]) == tree[1]
1461 1467 ]
1462 1468 return smartset.baseset(drevs)
1463 1469 else:
1464 1470 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1465 1471 elif op in {b'and_', b'add', b'sub'}:
1466 1472 assert len(tree) == 3
1467 1473 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1468 1474 elif op == b'group':
1469 1475 return walk(tree[1])
1470 1476 elif op == b'ancestors':
1471 1477 return getstack(walk(tree[1]))
1472 1478 else:
1473 1479 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1474 1480
1475 1481 return [prefetched[r] for r in walk(tree)]
1476 1482
1477 1483
1478 1484 def getdescfromdrev(drev):
1479 1485 """get description (commit message) from "Differential Revision"
1480 1486
1481 1487 This is similar to differential.getcommitmessage API. But we only care
1482 1488 about limited fields: title, summary, test plan, and URL.
1483 1489 """
1484 1490 title = drev[b'title']
1485 1491 summary = drev[b'summary'].rstrip()
1486 1492 testplan = drev[b'testPlan'].rstrip()
1487 1493 if testplan:
1488 1494 testplan = b'Test Plan:\n%s' % testplan
1489 1495 uri = b'Differential Revision: %s' % drev[b'uri']
1490 1496 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1491 1497
1492 1498
1493 1499 def getdiffmeta(diff):
1494 1500 """get commit metadata (date, node, user, p1) from a diff object
1495 1501
1496 1502 The metadata could be "hg:meta", sent by phabsend, like:
1497 1503
1498 1504 "properties": {
1499 1505 "hg:meta": {
1500 1506 "date": "1499571514 25200",
1501 1507 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1502 1508 "user": "Foo Bar <foo@example.com>",
1503 1509 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1504 1510 }
1505 1511 }
1506 1512
1507 1513 Or converted from "local:commits", sent by "arc", like:
1508 1514
1509 1515 "properties": {
1510 1516 "local:commits": {
1511 1517 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1512 1518 "author": "Foo Bar",
1513 1519 "time": 1499546314,
1514 1520 "branch": "default",
1515 1521 "tag": "",
1516 1522 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1517 1523 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1518 1524 "local": "1000",
1519 1525 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1520 1526 "summary": "...",
1521 1527 "message": "...",
1522 1528 "authorEmail": "foo@example.com"
1523 1529 }
1524 1530 }
1525 1531 }
1526 1532
1527 1533 Note: metadata extracted from "local:commits" will lose time zone
1528 1534 information.
1529 1535 """
1530 1536 props = diff.get(b'properties') or {}
1531 1537 meta = props.get(b'hg:meta')
1532 1538 if not meta:
1533 1539 if props.get(b'local:commits'):
1534 1540 commit = sorted(props[b'local:commits'].values())[0]
1535 1541 meta = {}
1536 1542 if b'author' in commit and b'authorEmail' in commit:
1537 1543 meta[b'user'] = b'%s <%s>' % (
1538 1544 commit[b'author'],
1539 1545 commit[b'authorEmail'],
1540 1546 )
1541 1547 if b'time' in commit:
1542 1548 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1543 1549 if b'branch' in commit:
1544 1550 meta[b'branch'] = commit[b'branch']
1545 1551 node = commit.get(b'commit', commit.get(b'rev'))
1546 1552 if node:
1547 1553 meta[b'node'] = node
1548 1554 if len(commit.get(b'parents', ())) >= 1:
1549 1555 meta[b'parent'] = commit[b'parents'][0]
1550 1556 else:
1551 1557 meta = {}
1552 1558 if b'date' not in meta and b'dateCreated' in diff:
1553 1559 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1554 1560 if b'branch' not in meta and diff.get(b'branch'):
1555 1561 meta[b'branch'] = diff[b'branch']
1556 1562 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1557 1563 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1558 1564 return meta
1559 1565
1560 1566
1561 1567 def readpatch(repo, drevs, write):
1562 1568 """generate plain-text patch readable by 'hg import'
1563 1569
1564 1570 write is usually ui.write. drevs is what "querydrev" returns, results of
1565 1571 "differential.query".
1566 1572 """
1567 1573 # Prefetch hg:meta property for all diffs
1568 1574 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1569 1575 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1570 1576
1571 1577 # Generate patch for each drev
1572 1578 for drev in drevs:
1573 1579 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1574 1580
1575 1581 diffid = max(int(v) for v in drev[b'diffs'])
1576 1582 body = callconduit(
1577 1583 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1578 1584 )
1579 1585 desc = getdescfromdrev(drev)
1580 1586 header = b'# HG changeset patch\n'
1581 1587
1582 1588 # Try to preserve metadata from hg:meta property. Write hg patch
1583 1589 # headers that can be read by the "import" command. See patchheadermap
1584 1590 # and extract in mercurial/patch.py for supported headers.
1585 1591 meta = getdiffmeta(diffs[b'%d' % diffid])
1586 1592 for k in _metanamemap.keys():
1587 1593 if k in meta:
1588 1594 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1589 1595
1590 1596 content = b'%s%s\n%s' % (header, desc, body)
1591 1597 write(content)
1592 1598
1593 1599
1594 1600 @vcrcommand(
1595 1601 b'phabread',
1596 1602 [(b'', b'stack', False, _(b'read dependencies'))],
1597 1603 _(b'DREVSPEC [OPTIONS]'),
1598 1604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1599 1605 )
1600 1606 def phabread(ui, repo, spec, **opts):
1601 1607 """print patches from Phabricator suitable for importing
1602 1608
1603 1609 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1604 1610 the number ``123``. It could also have common operators like ``+``, ``-``,
1605 1611 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1606 1612 select a stack.
1607 1613
1608 1614 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1609 1615 could be used to filter patches by status. For performance reason, they
1610 1616 only represent a subset of non-status selections and cannot be used alone.
1611 1617
1612 1618 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1613 1619 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1614 1620 stack up to D9.
1615 1621
1616 1622 If --stack is given, follow dependencies information and read all patches.
1617 1623 It is equivalent to the ``:`` operator.
1618 1624 """
1619 1625 opts = pycompat.byteskwargs(opts)
1620 1626 if opts.get(b'stack'):
1621 1627 spec = b':(%s)' % spec
1622 1628 drevs = querydrev(repo, spec)
1623 1629 readpatch(repo, drevs, ui.write)
1624 1630
1625 1631
1626 1632 @vcrcommand(
1627 1633 b'phabupdate',
1628 1634 [
1629 1635 (b'', b'accept', False, _(b'accept revisions')),
1630 1636 (b'', b'reject', False, _(b'reject revisions')),
1631 1637 (b'', b'abandon', False, _(b'abandon revisions')),
1632 1638 (b'', b'reclaim', False, _(b'reclaim revisions')),
1633 1639 (b'm', b'comment', b'', _(b'comment on the last revision')),
1634 1640 ],
1635 1641 _(b'DREVSPEC [OPTIONS]'),
1636 1642 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1637 1643 )
1638 1644 def phabupdate(ui, repo, spec, **opts):
1639 1645 """update Differential Revision in batch
1640 1646
1641 1647 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1642 1648 """
1643 1649 opts = pycompat.byteskwargs(opts)
1644 1650 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1645 1651 if len(flags) > 1:
1646 1652 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1647 1653
1648 1654 actions = []
1649 1655 for f in flags:
1650 1656 actions.append({b'type': f, b'value': True})
1651 1657
1652 1658 drevs = querydrev(repo, spec)
1653 1659 for i, drev in enumerate(drevs):
1654 1660 if i + 1 == len(drevs) and opts.get(b'comment'):
1655 1661 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1656 1662 if actions:
1657 1663 params = {
1658 1664 b'objectIdentifier': drev[b'phid'],
1659 1665 b'transactions': actions,
1660 1666 }
1661 1667 callconduit(ui, b'differential.revision.edit', params)
1662 1668
1663 1669
1664 1670 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1665 1671 def template_review(context, mapping):
1666 1672 """:phabreview: Object describing the review for this changeset.
1667 1673 Has attributes `url` and `id`.
1668 1674 """
1669 1675 ctx = context.resource(mapping, b'ctx')
1670 1676 m = _differentialrevisiondescre.search(ctx.description())
1671 1677 if m:
1672 1678 return templateutil.hybriddict(
1673 1679 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1674 1680 )
1675 1681 else:
1676 1682 tags = ctx.repo().nodetags(ctx.node())
1677 1683 for t in tags:
1678 1684 if _differentialrevisiontagre.match(t):
1679 1685 url = ctx.repo().ui.config(b'phabricator', b'url')
1680 1686 if not url.endswith(b'/'):
1681 1687 url += b'/'
1682 1688 url += t
1683 1689
1684 1690 return templateutil.hybriddict({b'url': url, b'id': t,})
1685 1691 return None
1686 1692
1687 1693
1688 1694 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1689 1695 def template_status(context, mapping):
1690 1696 """:phabstatus: String. Status of Phabricator differential.
1691 1697 """
1692 1698 ctx = context.resource(mapping, b'ctx')
1693 1699 repo = context.resource(mapping, b'repo')
1694 1700 ui = context.resource(mapping, b'ui')
1695 1701
1696 1702 rev = ctx.rev()
1697 1703 try:
1698 1704 drevid = getdrevmap(repo, [rev])[rev]
1699 1705 except KeyError:
1700 1706 return None
1701 1707 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1702 1708 for drev in drevs:
1703 1709 if int(drev[b'id']) == drevid:
1704 1710 return templateutil.hybriddict(
1705 1711 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1706 1712 )
1707 1713 return None
1708 1714
1709 1715
1710 1716 @show.showview(b'phabstatus', csettopic=b'work')
1711 1717 def phabstatusshowview(ui, repo, displayer):
1712 1718 """Phabricator differiential status"""
1713 1719 revs = repo.revs('sort(_underway(), topo)')
1714 1720 drevmap = getdrevmap(repo, revs)
1715 1721 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1716 1722 for rev, drevid in pycompat.iteritems(drevmap):
1717 1723 if drevid is not None:
1718 1724 drevids.add(drevid)
1719 1725 revsbydrevid.setdefault(drevid, set([])).add(rev)
1720 1726 else:
1721 1727 unknownrevs.append(rev)
1722 1728
1723 1729 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1724 1730 drevsbyrev = {}
1725 1731 for drev in drevs:
1726 1732 for rev in revsbydrevid[int(drev[b'id'])]:
1727 1733 drevsbyrev[rev] = drev
1728 1734
1729 1735 def phabstatus(ctx):
1730 1736 drev = drevsbyrev[ctx.rev()]
1731 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1737 status = ui.label(
1738 b'%(statusName)s' % drev,
1739 b'phabricator.status.%s' % _getstatusname(drev),
1740 )
1741 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1732 1742
1733 1743 revs -= smartset.baseset(unknownrevs)
1734 1744 revdag = graphmod.dagwalker(repo, revs)
1735 1745
1736 1746 ui.setconfig(b'experimental', b'graphshorten', True)
1737 1747 displayer._exthook = phabstatus
1738 1748 nodelen = show.longestshortest(repo, revs)
1739 1749 logcmdutil.displaygraph(
1740 1750 ui,
1741 1751 repo,
1742 1752 revdag,
1743 1753 displayer,
1744 1754 graphmod.asciiedges,
1745 1755 props={b'nodelen': nodelen},
1746 1756 )
General Comments 0
You need to be logged in to leave comments. Login now