##// END OF EJS Templates
phabricator: add a "phabstatus" template keyword...
Denis Laxalde -
r44292:79c01212 default
parent child Browse files
Show More
@@ -1,1723 +1,1745 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 logcmdutil,
70 70 match,
71 71 mdiff,
72 72 obsutil,
73 73 parser,
74 74 patch,
75 75 phases,
76 76 pycompat,
77 77 scmutil,
78 78 smartset,
79 79 tags,
80 80 templatefilters,
81 81 templateutil,
82 82 url as urlmod,
83 83 util,
84 84 )
85 85 from mercurial.utils import (
86 86 procutil,
87 87 stringutil,
88 88 )
89 89 from . import show
90 90
91 91
92 92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 94 # be specifying the version(s) of Mercurial they are tested with, or
95 95 # leave the attribute unspecified.
96 96 testedwith = b'ships-with-hg-core'
97 97
98 98 eh = exthelper.exthelper()
99 99
100 100 cmdtable = eh.cmdtable
101 101 command = eh.command
102 102 configtable = eh.configtable
103 103 templatekeyword = eh.templatekeyword
104 104
105 105 # developer config: phabricator.batchsize
106 106 eh.configitem(
107 107 b'phabricator', b'batchsize', default=12,
108 108 )
109 109 eh.configitem(
110 110 b'phabricator', b'callsign', default=None,
111 111 )
112 112 eh.configitem(
113 113 b'phabricator', b'curlcmd', default=None,
114 114 )
115 115 # developer config: phabricator.repophid
116 116 eh.configitem(
117 117 b'phabricator', b'repophid', default=None,
118 118 )
119 119 eh.configitem(
120 120 b'phabricator', b'url', default=None,
121 121 )
122 122 eh.configitem(
123 123 b'phabsend', b'confirm', default=False,
124 124 )
125 125
126 126 colortable = {
127 127 b'phabricator.action.created': b'green',
128 128 b'phabricator.action.skipped': b'magenta',
129 129 b'phabricator.action.updated': b'magenta',
130 130 b'phabricator.desc': b'',
131 131 b'phabricator.drev': b'bold',
132 132 b'phabricator.node': b'',
133 133 }
134 134
135 135 _VCR_FLAGS = [
136 136 (
137 137 b'',
138 138 b'test-vcr',
139 139 b'',
140 140 _(
141 141 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
142 142 b', otherwise will mock all http requests using the specified vcr file.'
143 143 b' (ADVANCED)'
144 144 ),
145 145 ),
146 146 ]
147 147
148 148
149 149 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
150 150 fullflags = flags + _VCR_FLAGS
151 151
152 152 def hgmatcher(r1, r2):
153 153 if r1.uri != r2.uri or r1.method != r2.method:
154 154 return False
155 155 r1params = util.urlreq.parseqs(r1.body)
156 156 r2params = util.urlreq.parseqs(r2.body)
157 157 for key in r1params:
158 158 if key not in r2params:
159 159 return False
160 160 value = r1params[key][0]
161 161 # we want to compare json payloads without worrying about ordering
162 162 if value.startswith(b'{') and value.endswith(b'}'):
163 163 r1json = pycompat.json_loads(value)
164 164 r2json = pycompat.json_loads(r2params[key][0])
165 165 if r1json != r2json:
166 166 return False
167 167 elif r2params[key][0] != value:
168 168 return False
169 169 return True
170 170
171 171 def sanitiserequest(request):
172 172 request.body = re.sub(
173 173 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
174 174 )
175 175 return request
176 176
177 177 def sanitiseresponse(response):
178 178 if 'set-cookie' in response['headers']:
179 179 del response['headers']['set-cookie']
180 180 return response
181 181
182 182 def decorate(fn):
183 183 def inner(*args, **kwargs):
184 184 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
185 185 if cassette:
186 186 import hgdemandimport
187 187
188 188 with hgdemandimport.deactivated():
189 189 import vcr as vcrmod
190 190 import vcr.stubs as stubs
191 191
192 192 vcr = vcrmod.VCR(
193 193 serializer='json',
194 194 before_record_request=sanitiserequest,
195 195 before_record_response=sanitiseresponse,
196 196 custom_patches=[
197 197 (
198 198 urlmod,
199 199 'httpconnection',
200 200 stubs.VCRHTTPConnection,
201 201 ),
202 202 (
203 203 urlmod,
204 204 'httpsconnection',
205 205 stubs.VCRHTTPSConnection,
206 206 ),
207 207 ],
208 208 )
209 209 vcr.register_matcher('hgmatcher', hgmatcher)
210 210 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
211 211 return fn(*args, **kwargs)
212 212 return fn(*args, **kwargs)
213 213
214 214 inner.__name__ = fn.__name__
215 215 inner.__doc__ = fn.__doc__
216 216 return command(
217 217 name,
218 218 fullflags,
219 219 spec,
220 220 helpcategory=helpcategory,
221 221 optionalrepo=optionalrepo,
222 222 )(inner)
223 223
224 224 return decorate
225 225
226 226
227 227 def urlencodenested(params):
228 228 """like urlencode, but works with nested parameters.
229 229
230 230 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
231 231 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
232 232 urlencode. Note: the encoding is consistent with PHP's http_build_query.
233 233 """
234 234 flatparams = util.sortdict()
235 235
236 236 def process(prefix, obj):
237 237 if isinstance(obj, bool):
238 238 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
239 239 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
240 240 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
241 241 if items is None:
242 242 flatparams[prefix] = obj
243 243 else:
244 244 for k, v in items(obj):
245 245 if prefix:
246 246 process(b'%s[%s]' % (prefix, k), v)
247 247 else:
248 248 process(k, v)
249 249
250 250 process(b'', params)
251 251 return util.urlreq.urlencode(flatparams)
252 252
253 253
254 254 def readurltoken(ui):
255 255 """return conduit url, token and make sure they exist
256 256
257 257 Currently read from [auth] config section. In the future, it might
258 258 make sense to read from .arcconfig and .arcrc as well.
259 259 """
260 260 url = ui.config(b'phabricator', b'url')
261 261 if not url:
262 262 raise error.Abort(
263 263 _(b'config %s.%s is required') % (b'phabricator', b'url')
264 264 )
265 265
266 266 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
267 267 token = None
268 268
269 269 if res:
270 270 group, auth = res
271 271
272 272 ui.debug(b"using auth.%s.* for authentication\n" % group)
273 273
274 274 token = auth.get(b'phabtoken')
275 275
276 276 if not token:
277 277 raise error.Abort(
278 278 _(b'Can\'t find conduit token associated to %s') % (url,)
279 279 )
280 280
281 281 return url, token
282 282
283 283
284 284 def callconduit(ui, name, params):
285 285 """call Conduit API, params is a dict. return json.loads result, or None"""
286 286 host, token = readurltoken(ui)
287 287 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
288 288 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
289 289 params = params.copy()
290 290 params[b'__conduit__'] = {
291 291 b'token': token,
292 292 }
293 293 rawdata = {
294 294 b'params': templatefilters.json(params),
295 295 b'output': b'json',
296 296 b'__conduit__': 1,
297 297 }
298 298 data = urlencodenested(rawdata)
299 299 curlcmd = ui.config(b'phabricator', b'curlcmd')
300 300 if curlcmd:
301 301 sin, sout = procutil.popen2(
302 302 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
303 303 )
304 304 sin.write(data)
305 305 sin.close()
306 306 body = sout.read()
307 307 else:
308 308 urlopener = urlmod.opener(ui, authinfo)
309 309 request = util.urlreq.request(pycompat.strurl(url), data=data)
310 310 with contextlib.closing(urlopener.open(request)) as rsp:
311 311 body = rsp.read()
312 312 ui.debug(b'Conduit Response: %s\n' % body)
313 313 parsed = pycompat.rapply(
314 314 lambda x: encoding.unitolocal(x)
315 315 if isinstance(x, pycompat.unicode)
316 316 else x,
317 317 # json.loads only accepts bytes from py3.6+
318 318 pycompat.json_loads(encoding.unifromlocal(body)),
319 319 )
320 320 if parsed.get(b'error_code'):
321 321 msg = _(b'Conduit Error (%s): %s') % (
322 322 parsed[b'error_code'],
323 323 parsed[b'error_info'],
324 324 )
325 325 raise error.Abort(msg)
326 326 return parsed[b'result']
327 327
328 328
329 329 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
330 330 def debugcallconduit(ui, repo, name):
331 331 """call Conduit API
332 332
333 333 Call parameters are read from stdin as a JSON blob. Result will be written
334 334 to stdout as a JSON blob.
335 335 """
336 336 # json.loads only accepts bytes from 3.6+
337 337 rawparams = encoding.unifromlocal(ui.fin.read())
338 338 # json.loads only returns unicode strings
339 339 params = pycompat.rapply(
340 340 lambda x: encoding.unitolocal(x)
341 341 if isinstance(x, pycompat.unicode)
342 342 else x,
343 343 pycompat.json_loads(rawparams),
344 344 )
345 345 # json.dumps only accepts unicode strings
346 346 result = pycompat.rapply(
347 347 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
348 348 callconduit(ui, name, params),
349 349 )
350 350 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
351 351 ui.write(b'%s\n' % encoding.unitolocal(s))
352 352
353 353
354 354 def getrepophid(repo):
355 355 """given callsign, return repository PHID or None"""
356 356 # developer config: phabricator.repophid
357 357 repophid = repo.ui.config(b'phabricator', b'repophid')
358 358 if repophid:
359 359 return repophid
360 360 callsign = repo.ui.config(b'phabricator', b'callsign')
361 361 if not callsign:
362 362 return None
363 363 query = callconduit(
364 364 repo.ui,
365 365 b'diffusion.repository.search',
366 366 {b'constraints': {b'callsigns': [callsign]}},
367 367 )
368 368 if len(query[b'data']) == 0:
369 369 return None
370 370 repophid = query[b'data'][0][b'phid']
371 371 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
372 372 return repophid
373 373
374 374
375 375 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
376 376 _differentialrevisiondescre = re.compile(
377 377 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
378 378 )
379 379
380 380
381 381 def getoldnodedrevmap(repo, nodelist):
382 382 """find previous nodes that has been sent to Phabricator
383 383
384 384 return {node: (oldnode, Differential diff, Differential Revision ID)}
385 385 for node in nodelist with known previous sent versions, or associated
386 386 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
387 387 be ``None``.
388 388
389 389 Examines commit messages like "Differential Revision:" to get the
390 390 association information.
391 391
392 392 If such commit message line is not found, examines all precursors and their
393 393 tags. Tags with format like "D1234" are considered a match and the node
394 394 with that tag, and the number after "D" (ex. 1234) will be returned.
395 395
396 396 The ``old node``, if not None, is guaranteed to be the last diff of
397 397 corresponding Differential Revision, and exist in the repo.
398 398 """
399 399 unfi = repo.unfiltered()
400 400 has_node = unfi.changelog.index.has_node
401 401
402 402 result = {} # {node: (oldnode?, lastdiff?, drev)}
403 403 toconfirm = {} # {node: (force, {precnode}, drev)}
404 404 for node in nodelist:
405 405 ctx = unfi[node]
406 406 # For tags like "D123", put them into "toconfirm" to verify later
407 407 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
408 408 for n in precnodes:
409 409 if has_node(n):
410 410 for tag in unfi.nodetags(n):
411 411 m = _differentialrevisiontagre.match(tag)
412 412 if m:
413 413 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
414 414 break
415 415 else:
416 416 continue # move to next predecessor
417 417 break # found a tag, stop
418 418 else:
419 419 # Check commit message
420 420 m = _differentialrevisiondescre.search(ctx.description())
421 421 if m:
422 422 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
423 423
424 424 # Double check if tags are genuine by collecting all old nodes from
425 425 # Phabricator, and expect precursors overlap with it.
426 426 if toconfirm:
427 427 drevs = [drev for force, precs, drev in toconfirm.values()]
428 428 alldiffs = callconduit(
429 429 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
430 430 )
431 431 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
432 432 for newnode, (force, precset, drev) in toconfirm.items():
433 433 diffs = [
434 434 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
435 435 ]
436 436
437 437 # "precursors" as known by Phabricator
438 438 phprecset = set(getnode(d) for d in diffs)
439 439
440 440 # Ignore if precursors (Phabricator and local repo) do not overlap,
441 441 # and force is not set (when commit message says nothing)
442 442 if not force and not bool(phprecset & precset):
443 443 tagname = b'D%d' % drev
444 444 tags.tag(
445 445 repo,
446 446 tagname,
447 447 nullid,
448 448 message=None,
449 449 user=None,
450 450 date=None,
451 451 local=True,
452 452 )
453 453 unfi.ui.warn(
454 454 _(
455 455 b'D%d: local tag removed - does not match '
456 456 b'Differential history\n'
457 457 )
458 458 % drev
459 459 )
460 460 continue
461 461
462 462 # Find the last node using Phabricator metadata, and make sure it
463 463 # exists in the repo
464 464 oldnode = lastdiff = None
465 465 if diffs:
466 466 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
467 467 oldnode = getnode(lastdiff)
468 468 if oldnode and not has_node(oldnode):
469 469 oldnode = None
470 470
471 471 result[newnode] = (oldnode, lastdiff, drev)
472 472
473 473 return result
474 474
475 475
476 476 def getdrevmap(repo, revs):
477 477 """Return a dict mapping each rev in `revs` to their Differential Revision
478 478 ID or None.
479 479 """
480 480 result = {}
481 481 for rev in revs:
482 482 result[rev] = None
483 483 ctx = repo[rev]
484 484 # Check commit message
485 485 m = _differentialrevisiondescre.search(ctx.description())
486 486 if m:
487 487 result[rev] = int(m.group('id'))
488 488 continue
489 489 # Check tags
490 490 for tag in repo.nodetags(ctx.node()):
491 491 m = _differentialrevisiontagre.match(tag)
492 492 if m:
493 493 result[rev] = int(m.group(1))
494 494 break
495 495
496 496 return result
497 497
498 498
499 499 def getdiff(ctx, diffopts):
500 500 """plain-text diff without header (user, commit message, etc)"""
501 501 output = util.stringio()
502 502 for chunk, _label in patch.diffui(
503 503 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
504 504 ):
505 505 output.write(chunk)
506 506 return output.getvalue()
507 507
508 508
509 509 class DiffChangeType(object):
510 510 ADD = 1
511 511 CHANGE = 2
512 512 DELETE = 3
513 513 MOVE_AWAY = 4
514 514 COPY_AWAY = 5
515 515 MOVE_HERE = 6
516 516 COPY_HERE = 7
517 517 MULTICOPY = 8
518 518
519 519
520 520 class DiffFileType(object):
521 521 TEXT = 1
522 522 IMAGE = 2
523 523 BINARY = 3
524 524
525 525
526 526 @attr.s
527 527 class phabhunk(dict):
528 528 """Represents a Differential hunk, which is owned by a Differential change
529 529 """
530 530
531 531 oldOffset = attr.ib(default=0) # camelcase-required
532 532 oldLength = attr.ib(default=0) # camelcase-required
533 533 newOffset = attr.ib(default=0) # camelcase-required
534 534 newLength = attr.ib(default=0) # camelcase-required
535 535 corpus = attr.ib(default='')
536 536 # These get added to the phabchange's equivalents
537 537 addLines = attr.ib(default=0) # camelcase-required
538 538 delLines = attr.ib(default=0) # camelcase-required
539 539
540 540
541 541 @attr.s
542 542 class phabchange(object):
543 543 """Represents a Differential change, owns Differential hunks and owned by a
544 544 Differential diff. Each one represents one file in a diff.
545 545 """
546 546
547 547 currentPath = attr.ib(default=None) # camelcase-required
548 548 oldPath = attr.ib(default=None) # camelcase-required
549 549 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
550 550 metadata = attr.ib(default=attr.Factory(dict))
551 551 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
552 552 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
553 553 type = attr.ib(default=DiffChangeType.CHANGE)
554 554 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
555 555 commitHash = attr.ib(default=None) # camelcase-required
556 556 addLines = attr.ib(default=0) # camelcase-required
557 557 delLines = attr.ib(default=0) # camelcase-required
558 558 hunks = attr.ib(default=attr.Factory(list))
559 559
560 560 def copynewmetadatatoold(self):
561 561 for key in list(self.metadata.keys()):
562 562 newkey = key.replace(b'new:', b'old:')
563 563 self.metadata[newkey] = self.metadata[key]
564 564
565 565 def addoldmode(self, value):
566 566 self.oldProperties[b'unix:filemode'] = value
567 567
568 568 def addnewmode(self, value):
569 569 self.newProperties[b'unix:filemode'] = value
570 570
571 571 def addhunk(self, hunk):
572 572 if not isinstance(hunk, phabhunk):
573 573 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
574 574 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
575 575 # It's useful to include these stats since the Phab web UI shows them,
576 576 # and uses them to estimate how large a change a Revision is. Also used
577 577 # in email subjects for the [+++--] bit.
578 578 self.addLines += hunk.addLines
579 579 self.delLines += hunk.delLines
580 580
581 581
582 582 @attr.s
583 583 class phabdiff(object):
584 584 """Represents a Differential diff, owns Differential changes. Corresponds
585 585 to a commit.
586 586 """
587 587
588 588 # Doesn't seem to be any reason to send this (output of uname -n)
589 589 sourceMachine = attr.ib(default=b'') # camelcase-required
590 590 sourcePath = attr.ib(default=b'/') # camelcase-required
591 591 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
592 592 sourceControlPath = attr.ib(default=b'/') # camelcase-required
593 593 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
594 594 branch = attr.ib(default=b'default')
595 595 bookmark = attr.ib(default=None)
596 596 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
597 597 lintStatus = attr.ib(default=b'none') # camelcase-required
598 598 unitStatus = attr.ib(default=b'none') # camelcase-required
599 599 changes = attr.ib(default=attr.Factory(dict))
600 600 repositoryPHID = attr.ib(default=None) # camelcase-required
601 601
602 602 def addchange(self, change):
603 603 if not isinstance(change, phabchange):
604 604 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
605 605 self.changes[change.currentPath] = pycompat.byteskwargs(
606 606 attr.asdict(change)
607 607 )
608 608
609 609
610 610 def maketext(pchange, ctx, fname):
611 611 """populate the phabchange for a text file"""
612 612 repo = ctx.repo()
613 613 fmatcher = match.exact([fname])
614 614 diffopts = mdiff.diffopts(git=True, context=32767)
615 615 _pfctx, _fctx, header, fhunks = next(
616 616 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
617 617 )
618 618
619 619 for fhunk in fhunks:
620 620 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
621 621 corpus = b''.join(lines[1:])
622 622 shunk = list(header)
623 623 shunk.extend(lines)
624 624 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
625 625 patch.diffstatdata(util.iterlines(shunk))
626 626 )
627 627 pchange.addhunk(
628 628 phabhunk(
629 629 oldOffset,
630 630 oldLength,
631 631 newOffset,
632 632 newLength,
633 633 corpus,
634 634 addLines,
635 635 delLines,
636 636 )
637 637 )
638 638
639 639
640 640 def uploadchunks(fctx, fphid):
641 641 """upload large binary files as separate chunks.
642 642 Phab requests chunking over 8MiB, and splits into 4MiB chunks
643 643 """
644 644 ui = fctx.repo().ui
645 645 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
646 646 with ui.makeprogress(
647 647 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
648 648 ) as progress:
649 649 for chunk in chunks:
650 650 progress.increment()
651 651 if chunk[b'complete']:
652 652 continue
653 653 bstart = int(chunk[b'byteStart'])
654 654 bend = int(chunk[b'byteEnd'])
655 655 callconduit(
656 656 ui,
657 657 b'file.uploadchunk',
658 658 {
659 659 b'filePHID': fphid,
660 660 b'byteStart': bstart,
661 661 b'data': base64.b64encode(fctx.data()[bstart:bend]),
662 662 b'dataEncoding': b'base64',
663 663 },
664 664 )
665 665
666 666
667 667 def uploadfile(fctx):
668 668 """upload binary files to Phabricator"""
669 669 repo = fctx.repo()
670 670 ui = repo.ui
671 671 fname = fctx.path()
672 672 size = fctx.size()
673 673 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
674 674
675 675 # an allocate call is required first to see if an upload is even required
676 676 # (Phab might already have it) and to determine if chunking is needed
677 677 allocateparams = {
678 678 b'name': fname,
679 679 b'contentLength': size,
680 680 b'contentHash': fhash,
681 681 }
682 682 filealloc = callconduit(ui, b'file.allocate', allocateparams)
683 683 fphid = filealloc[b'filePHID']
684 684
685 685 if filealloc[b'upload']:
686 686 ui.write(_(b'uploading %s\n') % bytes(fctx))
687 687 if not fphid:
688 688 uploadparams = {
689 689 b'name': fname,
690 690 b'data_base64': base64.b64encode(fctx.data()),
691 691 }
692 692 fphid = callconduit(ui, b'file.upload', uploadparams)
693 693 else:
694 694 uploadchunks(fctx, fphid)
695 695 else:
696 696 ui.debug(b'server already has %s\n' % bytes(fctx))
697 697
698 698 if not fphid:
699 699 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
700 700
701 701 return fphid
702 702
703 703
704 704 def addoldbinary(pchange, fctx, originalfname):
705 705 """add the metadata for the previous version of a binary file to the
706 706 phabchange for the new version
707 707 """
708 708 oldfctx = fctx.p1()[originalfname]
709 709 if fctx.cmp(oldfctx):
710 710 # Files differ, add the old one
711 711 pchange.metadata[b'old:file:size'] = oldfctx.size()
712 712 mimeguess, _enc = mimetypes.guess_type(
713 713 encoding.unifromlocal(oldfctx.path())
714 714 )
715 715 if mimeguess:
716 716 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
717 717 mimeguess
718 718 )
719 719 fphid = uploadfile(oldfctx)
720 720 pchange.metadata[b'old:binary-phid'] = fphid
721 721 else:
722 722 # If it's left as IMAGE/BINARY web UI might try to display it
723 723 pchange.fileType = DiffFileType.TEXT
724 724 pchange.copynewmetadatatoold()
725 725
726 726
727 727 def makebinary(pchange, fctx):
728 728 """populate the phabchange for a binary file"""
729 729 pchange.fileType = DiffFileType.BINARY
730 730 fphid = uploadfile(fctx)
731 731 pchange.metadata[b'new:binary-phid'] = fphid
732 732 pchange.metadata[b'new:file:size'] = fctx.size()
733 733 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
734 734 if mimeguess:
735 735 mimeguess = pycompat.bytestr(mimeguess)
736 736 pchange.metadata[b'new:file:mime-type'] = mimeguess
737 737 if mimeguess.startswith(b'image/'):
738 738 pchange.fileType = DiffFileType.IMAGE
739 739
740 740
741 741 # Copied from mercurial/patch.py
742 742 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
743 743
744 744
745 745 def notutf8(fctx):
746 746 """detect non-UTF-8 text files since Phabricator requires them to be marked
747 747 as binary
748 748 """
749 749 try:
750 750 fctx.data().decode('utf-8')
751 751 if fctx.parents():
752 752 fctx.p1().data().decode('utf-8')
753 753 return False
754 754 except UnicodeDecodeError:
755 755 fctx.repo().ui.write(
756 756 _(b'file %s detected as non-UTF-8, marked as binary\n')
757 757 % fctx.path()
758 758 )
759 759 return True
760 760
761 761
762 762 def addremoved(pdiff, ctx, removed):
763 763 """add removed files to the phabdiff. Shouldn't include moves"""
764 764 for fname in removed:
765 765 pchange = phabchange(
766 766 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
767 767 )
768 768 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
769 769 fctx = ctx.p1()[fname]
770 770 if not (fctx.isbinary() or notutf8(fctx)):
771 771 maketext(pchange, ctx, fname)
772 772
773 773 pdiff.addchange(pchange)
774 774
775 775
776 776 def addmodified(pdiff, ctx, modified):
777 777 """add modified files to the phabdiff"""
778 778 for fname in modified:
779 779 fctx = ctx[fname]
780 780 pchange = phabchange(currentPath=fname, oldPath=fname)
781 781 filemode = gitmode[ctx[fname].flags()]
782 782 originalmode = gitmode[ctx.p1()[fname].flags()]
783 783 if filemode != originalmode:
784 784 pchange.addoldmode(originalmode)
785 785 pchange.addnewmode(filemode)
786 786
787 787 if fctx.isbinary() or notutf8(fctx):
788 788 makebinary(pchange, fctx)
789 789 addoldbinary(pchange, fctx, fname)
790 790 else:
791 791 maketext(pchange, ctx, fname)
792 792
793 793 pdiff.addchange(pchange)
794 794
795 795
796 796 def addadded(pdiff, ctx, added, removed):
797 797 """add file adds to the phabdiff, both new files and copies/moves"""
798 798 # Keep track of files that've been recorded as moved/copied, so if there are
799 799 # additional copies we can mark them (moves get removed from removed)
800 800 copiedchanges = {}
801 801 movedchanges = {}
802 802 for fname in added:
803 803 fctx = ctx[fname]
804 804 pchange = phabchange(currentPath=fname)
805 805
806 806 filemode = gitmode[ctx[fname].flags()]
807 807 renamed = fctx.renamed()
808 808
809 809 if renamed:
810 810 originalfname = renamed[0]
811 811 originalmode = gitmode[ctx.p1()[originalfname].flags()]
812 812 pchange.oldPath = originalfname
813 813
814 814 if originalfname in removed:
815 815 origpchange = phabchange(
816 816 currentPath=originalfname,
817 817 oldPath=originalfname,
818 818 type=DiffChangeType.MOVE_AWAY,
819 819 awayPaths=[fname],
820 820 )
821 821 movedchanges[originalfname] = origpchange
822 822 removed.remove(originalfname)
823 823 pchange.type = DiffChangeType.MOVE_HERE
824 824 elif originalfname in movedchanges:
825 825 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
826 826 movedchanges[originalfname].awayPaths.append(fname)
827 827 pchange.type = DiffChangeType.COPY_HERE
828 828 else: # pure copy
829 829 if originalfname not in copiedchanges:
830 830 origpchange = phabchange(
831 831 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
832 832 )
833 833 copiedchanges[originalfname] = origpchange
834 834 else:
835 835 origpchange = copiedchanges[originalfname]
836 836 origpchange.awayPaths.append(fname)
837 837 pchange.type = DiffChangeType.COPY_HERE
838 838
839 839 if filemode != originalmode:
840 840 pchange.addoldmode(originalmode)
841 841 pchange.addnewmode(filemode)
842 842 else: # Brand-new file
843 843 pchange.addnewmode(gitmode[fctx.flags()])
844 844 pchange.type = DiffChangeType.ADD
845 845
846 846 if fctx.isbinary() or notutf8(fctx):
847 847 makebinary(pchange, fctx)
848 848 if renamed:
849 849 addoldbinary(pchange, fctx, originalfname)
850 850 else:
851 851 maketext(pchange, ctx, fname)
852 852
853 853 pdiff.addchange(pchange)
854 854
855 855 for _path, copiedchange in copiedchanges.items():
856 856 pdiff.addchange(copiedchange)
857 857 for _path, movedchange in movedchanges.items():
858 858 pdiff.addchange(movedchange)
859 859
860 860
861 861 def creatediff(ctx):
862 862 """create a Differential Diff"""
863 863 repo = ctx.repo()
864 864 repophid = getrepophid(repo)
865 865 # Create a "Differential Diff" via "differential.creatediff" API
866 866 pdiff = phabdiff(
867 867 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
868 868 branch=b'%s' % ctx.branch(),
869 869 )
870 870 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
871 871 # addadded will remove moved files from removed, so addremoved won't get
872 872 # them
873 873 addadded(pdiff, ctx, added, removed)
874 874 addmodified(pdiff, ctx, modified)
875 875 addremoved(pdiff, ctx, removed)
876 876 if repophid:
877 877 pdiff.repositoryPHID = repophid
878 878 diff = callconduit(
879 879 repo.ui,
880 880 b'differential.creatediff',
881 881 pycompat.byteskwargs(attr.asdict(pdiff)),
882 882 )
883 883 if not diff:
884 884 raise error.Abort(_(b'cannot create diff for %s') % ctx)
885 885 return diff
886 886
887 887
888 888 def writediffproperties(ctx, diff):
889 889 """write metadata to diff so patches could be applied losslessly"""
890 890 # creatediff returns with a diffid but query returns with an id
891 891 diffid = diff.get(b'diffid', diff.get(b'id'))
892 892 params = {
893 893 b'diff_id': diffid,
894 894 b'name': b'hg:meta',
895 895 b'data': templatefilters.json(
896 896 {
897 897 b'user': ctx.user(),
898 898 b'date': b'%d %d' % ctx.date(),
899 899 b'branch': ctx.branch(),
900 900 b'node': ctx.hex(),
901 901 b'parent': ctx.p1().hex(),
902 902 }
903 903 ),
904 904 }
905 905 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
906 906
907 907 params = {
908 908 b'diff_id': diffid,
909 909 b'name': b'local:commits',
910 910 b'data': templatefilters.json(
911 911 {
912 912 ctx.hex(): {
913 913 b'author': stringutil.person(ctx.user()),
914 914 b'authorEmail': stringutil.email(ctx.user()),
915 915 b'time': int(ctx.date()[0]),
916 916 b'commit': ctx.hex(),
917 917 b'parents': [ctx.p1().hex()],
918 918 b'branch': ctx.branch(),
919 919 },
920 920 }
921 921 ),
922 922 }
923 923 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
924 924
925 925
926 926 def createdifferentialrevision(
927 927 ctx,
928 928 revid=None,
929 929 parentrevphid=None,
930 930 oldnode=None,
931 931 olddiff=None,
932 932 actions=None,
933 933 comment=None,
934 934 ):
935 935 """create or update a Differential Revision
936 936
937 937 If revid is None, create a new Differential Revision, otherwise update
938 938 revid. If parentrevphid is not None, set it as a dependency.
939 939
940 940 If oldnode is not None, check if the patch content (without commit message
941 941 and metadata) has changed before creating another diff.
942 942
943 943 If actions is not None, they will be appended to the transaction.
944 944 """
945 945 repo = ctx.repo()
946 946 if oldnode:
947 947 diffopts = mdiff.diffopts(git=True, context=32767)
948 948 oldctx = repo.unfiltered()[oldnode]
949 949 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
950 950 else:
951 951 neednewdiff = True
952 952
953 953 transactions = []
954 954 if neednewdiff:
955 955 diff = creatediff(ctx)
956 956 transactions.append({b'type': b'update', b'value': diff[b'phid']})
957 957 if comment:
958 958 transactions.append({b'type': b'comment', b'value': comment})
959 959 else:
960 960 # Even if we don't need to upload a new diff because the patch content
961 961 # does not change. We might still need to update its metadata so
962 962 # pushers could know the correct node metadata.
963 963 assert olddiff
964 964 diff = olddiff
965 965 writediffproperties(ctx, diff)
966 966
967 967 # Set the parent Revision every time, so commit re-ordering is picked-up
968 968 if parentrevphid:
969 969 transactions.append(
970 970 {b'type': b'parents.set', b'value': [parentrevphid]}
971 971 )
972 972
973 973 if actions:
974 974 transactions += actions
975 975
976 976 # Parse commit message and update related fields.
977 977 desc = ctx.description()
978 978 info = callconduit(
979 979 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
980 980 )
981 981 for k, v in info[b'fields'].items():
982 982 if k in [b'title', b'summary', b'testPlan']:
983 983 transactions.append({b'type': k, b'value': v})
984 984
985 985 params = {b'transactions': transactions}
986 986 if revid is not None:
987 987 # Update an existing Differential Revision
988 988 params[b'objectIdentifier'] = revid
989 989
990 990 revision = callconduit(repo.ui, b'differential.revision.edit', params)
991 991 if not revision:
992 992 raise error.Abort(_(b'cannot create revision for %s') % ctx)
993 993
994 994 return revision, diff
995 995
996 996
997 997 def userphids(repo, names):
998 998 """convert user names to PHIDs"""
999 999 names = [name.lower() for name in names]
1000 1000 query = {b'constraints': {b'usernames': names}}
1001 1001 result = callconduit(repo.ui, b'user.search', query)
1002 1002 # username not found is not an error of the API. So check if we have missed
1003 1003 # some names here.
1004 1004 data = result[b'data']
1005 1005 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1006 1006 unresolved = set(names) - resolved
1007 1007 if unresolved:
1008 1008 raise error.Abort(
1009 1009 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1010 1010 )
1011 1011 return [entry[b'phid'] for entry in data]
1012 1012
1013 1013
1014 1014 @vcrcommand(
1015 1015 b'phabsend',
1016 1016 [
1017 1017 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1018 1018 (b'', b'amend', True, _(b'update commit messages')),
1019 1019 (b'', b'reviewer', [], _(b'specify reviewers')),
1020 1020 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1021 1021 (
1022 1022 b'm',
1023 1023 b'comment',
1024 1024 b'',
1025 1025 _(b'add a comment to Revisions with new/updated Diffs'),
1026 1026 ),
1027 1027 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1028 1028 ],
1029 1029 _(b'REV [OPTIONS]'),
1030 1030 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1031 1031 )
1032 1032 def phabsend(ui, repo, *revs, **opts):
1033 1033 """upload changesets to Phabricator
1034 1034
1035 1035 If there are multiple revisions specified, they will be send as a stack
1036 1036 with a linear dependencies relationship using the order specified by the
1037 1037 revset.
1038 1038
1039 1039 For the first time uploading changesets, local tags will be created to
1040 1040 maintain the association. After the first time, phabsend will check
1041 1041 obsstore and tags information so it can figure out whether to update an
1042 1042 existing Differential Revision, or create a new one.
1043 1043
1044 1044 If --amend is set, update commit messages so they have the
1045 1045 ``Differential Revision`` URL, remove related tags. This is similar to what
1046 1046 arcanist will do, and is more desired in author-push workflows. Otherwise,
1047 1047 use local tags to record the ``Differential Revision`` association.
1048 1048
1049 1049 The --confirm option lets you confirm changesets before sending them. You
1050 1050 can also add following to your configuration file to make it default
1051 1051 behaviour::
1052 1052
1053 1053 [phabsend]
1054 1054 confirm = true
1055 1055
1056 1056 phabsend will check obsstore and the above association to decide whether to
1057 1057 update an existing Differential Revision, or create a new one.
1058 1058 """
1059 1059 opts = pycompat.byteskwargs(opts)
1060 1060 revs = list(revs) + opts.get(b'rev', [])
1061 1061 revs = scmutil.revrange(repo, revs)
1062 1062
1063 1063 if not revs:
1064 1064 raise error.Abort(_(b'phabsend requires at least one changeset'))
1065 1065 if opts.get(b'amend'):
1066 1066 cmdutil.checkunfinished(repo)
1067 1067
1068 1068 # {newnode: (oldnode, olddiff, olddrev}
1069 1069 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1070 1070
1071 1071 confirm = ui.configbool(b'phabsend', b'confirm')
1072 1072 confirm |= bool(opts.get(b'confirm'))
1073 1073 if confirm:
1074 1074 confirmed = _confirmbeforesend(repo, revs, oldmap)
1075 1075 if not confirmed:
1076 1076 raise error.Abort(_(b'phabsend cancelled'))
1077 1077
1078 1078 actions = []
1079 1079 reviewers = opts.get(b'reviewer', [])
1080 1080 blockers = opts.get(b'blocker', [])
1081 1081 phids = []
1082 1082 if reviewers:
1083 1083 phids.extend(userphids(repo, reviewers))
1084 1084 if blockers:
1085 1085 phids.extend(
1086 1086 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1087 1087 )
1088 1088 if phids:
1089 1089 actions.append({b'type': b'reviewers.add', b'value': phids})
1090 1090
1091 1091 drevids = [] # [int]
1092 1092 diffmap = {} # {newnode: diff}
1093 1093
1094 1094 # Send patches one by one so we know their Differential Revision PHIDs and
1095 1095 # can provide dependency relationship
1096 1096 lastrevphid = None
1097 1097 for rev in revs:
1098 1098 ui.debug(b'sending rev %d\n' % rev)
1099 1099 ctx = repo[rev]
1100 1100
1101 1101 # Get Differential Revision ID
1102 1102 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1103 1103 if oldnode != ctx.node() or opts.get(b'amend'):
1104 1104 # Create or update Differential Revision
1105 1105 revision, diff = createdifferentialrevision(
1106 1106 ctx,
1107 1107 revid,
1108 1108 lastrevphid,
1109 1109 oldnode,
1110 1110 olddiff,
1111 1111 actions,
1112 1112 opts.get(b'comment'),
1113 1113 )
1114 1114 diffmap[ctx.node()] = diff
1115 1115 newrevid = int(revision[b'object'][b'id'])
1116 1116 newrevphid = revision[b'object'][b'phid']
1117 1117 if revid:
1118 1118 action = b'updated'
1119 1119 else:
1120 1120 action = b'created'
1121 1121
1122 1122 # Create a local tag to note the association, if commit message
1123 1123 # does not have it already
1124 1124 m = _differentialrevisiondescre.search(ctx.description())
1125 1125 if not m or int(m.group('id')) != newrevid:
1126 1126 tagname = b'D%d' % newrevid
1127 1127 tags.tag(
1128 1128 repo,
1129 1129 tagname,
1130 1130 ctx.node(),
1131 1131 message=None,
1132 1132 user=None,
1133 1133 date=None,
1134 1134 local=True,
1135 1135 )
1136 1136 else:
1137 1137 # Nothing changed. But still set "newrevphid" so the next revision
1138 1138 # could depend on this one and "newrevid" for the summary line.
1139 1139 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1140 1140 newrevid = revid
1141 1141 action = b'skipped'
1142 1142
1143 1143 actiondesc = ui.label(
1144 1144 {
1145 1145 b'created': _(b'created'),
1146 1146 b'skipped': _(b'skipped'),
1147 1147 b'updated': _(b'updated'),
1148 1148 }[action],
1149 1149 b'phabricator.action.%s' % action,
1150 1150 )
1151 1151 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1152 1152 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1153 1153 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1154 1154 ui.write(
1155 1155 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1156 1156 )
1157 1157 drevids.append(newrevid)
1158 1158 lastrevphid = newrevphid
1159 1159
1160 1160 # Update commit messages and remove tags
1161 1161 if opts.get(b'amend'):
1162 1162 unfi = repo.unfiltered()
1163 1163 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1164 1164 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1165 1165 wnode = unfi[b'.'].node()
1166 1166 mapping = {} # {oldnode: [newnode]}
1167 1167 for i, rev in enumerate(revs):
1168 1168 old = unfi[rev]
1169 1169 drevid = drevids[i]
1170 1170 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1171 1171 newdesc = getdescfromdrev(drev)
1172 1172 # Make sure commit message contain "Differential Revision"
1173 1173 if old.description() != newdesc:
1174 1174 if old.phase() == phases.public:
1175 1175 ui.warn(
1176 1176 _(b"warning: not updating public commit %s\n")
1177 1177 % scmutil.formatchangeid(old)
1178 1178 )
1179 1179 continue
1180 1180 parents = [
1181 1181 mapping.get(old.p1().node(), (old.p1(),))[0],
1182 1182 mapping.get(old.p2().node(), (old.p2(),))[0],
1183 1183 ]
1184 1184 new = context.metadataonlyctx(
1185 1185 repo,
1186 1186 old,
1187 1187 parents=parents,
1188 1188 text=newdesc,
1189 1189 user=old.user(),
1190 1190 date=old.date(),
1191 1191 extra=old.extra(),
1192 1192 )
1193 1193
1194 1194 newnode = new.commit()
1195 1195
1196 1196 mapping[old.node()] = [newnode]
1197 1197 # Update diff property
1198 1198 # If it fails just warn and keep going, otherwise the DREV
1199 1199 # associations will be lost
1200 1200 try:
1201 1201 writediffproperties(unfi[newnode], diffmap[old.node()])
1202 1202 except util.urlerr.urlerror:
1203 1203 ui.warnnoi18n(
1204 1204 b'Failed to update metadata for D%d\n' % drevid
1205 1205 )
1206 1206 # Remove local tags since it's no longer necessary
1207 1207 tagname = b'D%d' % drevid
1208 1208 if tagname in repo.tags():
1209 1209 tags.tag(
1210 1210 repo,
1211 1211 tagname,
1212 1212 nullid,
1213 1213 message=None,
1214 1214 user=None,
1215 1215 date=None,
1216 1216 local=True,
1217 1217 )
1218 1218 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1219 1219 if wnode in mapping:
1220 1220 unfi.setparents(mapping[wnode][0])
1221 1221
1222 1222
1223 1223 # Map from "hg:meta" keys to header understood by "hg import". The order is
1224 1224 # consistent with "hg export" output.
1225 1225 _metanamemap = util.sortdict(
1226 1226 [
1227 1227 (b'user', b'User'),
1228 1228 (b'date', b'Date'),
1229 1229 (b'branch', b'Branch'),
1230 1230 (b'node', b'Node ID'),
1231 1231 (b'parent', b'Parent '),
1232 1232 ]
1233 1233 )
1234 1234
1235 1235
1236 1236 def _confirmbeforesend(repo, revs, oldmap):
1237 1237 url, token = readurltoken(repo.ui)
1238 1238 ui = repo.ui
1239 1239 for rev in revs:
1240 1240 ctx = repo[rev]
1241 1241 desc = ctx.description().splitlines()[0]
1242 1242 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1243 1243 if drevid:
1244 1244 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1245 1245 else:
1246 1246 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1247 1247
1248 1248 ui.write(
1249 1249 _(b'%s - %s: %s\n')
1250 1250 % (
1251 1251 drevdesc,
1252 1252 ui.label(bytes(ctx), b'phabricator.node'),
1253 1253 ui.label(desc, b'phabricator.desc'),
1254 1254 )
1255 1255 )
1256 1256
1257 1257 if ui.promptchoice(
1258 1258 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1259 1259 ):
1260 1260 return False
1261 1261
1262 1262 return True
1263 1263
1264 1264
1265 1265 _knownstatusnames = {
1266 1266 b'accepted',
1267 1267 b'needsreview',
1268 1268 b'needsrevision',
1269 1269 b'closed',
1270 1270 b'abandoned',
1271 1271 }
1272 1272
1273 1273
1274 1274 def _getstatusname(drev):
1275 1275 """get normalized status name from a Differential Revision"""
1276 1276 return drev[b'statusName'].replace(b' ', b'').lower()
1277 1277
1278 1278
1279 1279 # Small language to specify differential revisions. Support symbols: (), :X,
1280 1280 # +, and -.
1281 1281
1282 1282 _elements = {
1283 1283 # token-type: binding-strength, primary, prefix, infix, suffix
1284 1284 b'(': (12, None, (b'group', 1, b')'), None, None),
1285 1285 b':': (8, None, (b'ancestors', 8), None, None),
1286 1286 b'&': (5, None, None, (b'and_', 5), None),
1287 1287 b'+': (4, None, None, (b'add', 4), None),
1288 1288 b'-': (4, None, None, (b'sub', 4), None),
1289 1289 b')': (0, None, None, None, None),
1290 1290 b'symbol': (0, b'symbol', None, None, None),
1291 1291 b'end': (0, None, None, None, None),
1292 1292 }
1293 1293
1294 1294
1295 1295 def _tokenize(text):
1296 1296 view = memoryview(text) # zero-copy slice
1297 1297 special = b'():+-& '
1298 1298 pos = 0
1299 1299 length = len(text)
1300 1300 while pos < length:
1301 1301 symbol = b''.join(
1302 1302 itertools.takewhile(
1303 1303 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1304 1304 )
1305 1305 )
1306 1306 if symbol:
1307 1307 yield (b'symbol', symbol, pos)
1308 1308 pos += len(symbol)
1309 1309 else: # special char, ignore space
1310 1310 if text[pos : pos + 1] != b' ':
1311 1311 yield (text[pos : pos + 1], None, pos)
1312 1312 pos += 1
1313 1313 yield (b'end', None, pos)
1314 1314
1315 1315
1316 1316 def _parse(text):
1317 1317 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1318 1318 if pos != len(text):
1319 1319 raise error.ParseError(b'invalid token', pos)
1320 1320 return tree
1321 1321
1322 1322
1323 1323 def _parsedrev(symbol):
1324 1324 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1325 1325 if symbol.startswith(b'D') and symbol[1:].isdigit():
1326 1326 return int(symbol[1:])
1327 1327 if symbol.isdigit():
1328 1328 return int(symbol)
1329 1329
1330 1330
1331 1331 def _prefetchdrevs(tree):
1332 1332 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1333 1333 drevs = set()
1334 1334 ancestordrevs = set()
1335 1335 op = tree[0]
1336 1336 if op == b'symbol':
1337 1337 r = _parsedrev(tree[1])
1338 1338 if r:
1339 1339 drevs.add(r)
1340 1340 elif op == b'ancestors':
1341 1341 r, a = _prefetchdrevs(tree[1])
1342 1342 drevs.update(r)
1343 1343 ancestordrevs.update(r)
1344 1344 ancestordrevs.update(a)
1345 1345 else:
1346 1346 for t in tree[1:]:
1347 1347 r, a = _prefetchdrevs(t)
1348 1348 drevs.update(r)
1349 1349 ancestordrevs.update(a)
1350 1350 return drevs, ancestordrevs
1351 1351
1352 1352
1353 1353 def querydrev(repo, spec):
1354 1354 """return a list of "Differential Revision" dicts
1355 1355
1356 1356 spec is a string using a simple query language, see docstring in phabread
1357 1357 for details.
1358 1358
1359 1359 A "Differential Revision dict" looks like:
1360 1360
1361 1361 {
1362 1362 "id": "2",
1363 1363 "phid": "PHID-DREV-672qvysjcczopag46qty",
1364 1364 "title": "example",
1365 1365 "uri": "https://phab.example.com/D2",
1366 1366 "dateCreated": "1499181406",
1367 1367 "dateModified": "1499182103",
1368 1368 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1369 1369 "status": "0",
1370 1370 "statusName": "Needs Review",
1371 1371 "properties": [],
1372 1372 "branch": null,
1373 1373 "summary": "",
1374 1374 "testPlan": "",
1375 1375 "lineCount": "2",
1376 1376 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1377 1377 "diffs": [
1378 1378 "3",
1379 1379 "4",
1380 1380 ],
1381 1381 "commits": [],
1382 1382 "reviewers": [],
1383 1383 "ccs": [],
1384 1384 "hashes": [],
1385 1385 "auxiliary": {
1386 1386 "phabricator:projects": [],
1387 1387 "phabricator:depends-on": [
1388 1388 "PHID-DREV-gbapp366kutjebt7agcd"
1389 1389 ]
1390 1390 },
1391 1391 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1392 1392 "sourcePath": null
1393 1393 }
1394 1394 """
1395 1395
1396 1396 def fetch(params):
1397 1397 """params -> single drev or None"""
1398 1398 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1399 1399 if key in prefetched:
1400 1400 return prefetched[key]
1401 1401 drevs = callconduit(repo.ui, b'differential.query', params)
1402 1402 # Fill prefetched with the result
1403 1403 for drev in drevs:
1404 1404 prefetched[drev[b'phid']] = drev
1405 1405 prefetched[int(drev[b'id'])] = drev
1406 1406 if key not in prefetched:
1407 1407 raise error.Abort(
1408 1408 _(b'cannot get Differential Revision %r') % params
1409 1409 )
1410 1410 return prefetched[key]
1411 1411
1412 1412 def getstack(topdrevids):
1413 1413 """given a top, get a stack from the bottom, [id] -> [id]"""
1414 1414 visited = set()
1415 1415 result = []
1416 1416 queue = [{b'ids': [i]} for i in topdrevids]
1417 1417 while queue:
1418 1418 params = queue.pop()
1419 1419 drev = fetch(params)
1420 1420 if drev[b'id'] in visited:
1421 1421 continue
1422 1422 visited.add(drev[b'id'])
1423 1423 result.append(int(drev[b'id']))
1424 1424 auxiliary = drev.get(b'auxiliary', {})
1425 1425 depends = auxiliary.get(b'phabricator:depends-on', [])
1426 1426 for phid in depends:
1427 1427 queue.append({b'phids': [phid]})
1428 1428 result.reverse()
1429 1429 return smartset.baseset(result)
1430 1430
1431 1431 # Initialize prefetch cache
1432 1432 prefetched = {} # {id or phid: drev}
1433 1433
1434 1434 tree = _parse(spec)
1435 1435 drevs, ancestordrevs = _prefetchdrevs(tree)
1436 1436
1437 1437 # developer config: phabricator.batchsize
1438 1438 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1439 1439
1440 1440 # Prefetch Differential Revisions in batch
1441 1441 tofetch = set(drevs)
1442 1442 for r in ancestordrevs:
1443 1443 tofetch.update(range(max(1, r - batchsize), r + 1))
1444 1444 if drevs:
1445 1445 fetch({b'ids': list(tofetch)})
1446 1446 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1447 1447
1448 1448 # Walk through the tree, return smartsets
1449 1449 def walk(tree):
1450 1450 op = tree[0]
1451 1451 if op == b'symbol':
1452 1452 drev = _parsedrev(tree[1])
1453 1453 if drev:
1454 1454 return smartset.baseset([drev])
1455 1455 elif tree[1] in _knownstatusnames:
1456 1456 drevs = [
1457 1457 r
1458 1458 for r in validids
1459 1459 if _getstatusname(prefetched[r]) == tree[1]
1460 1460 ]
1461 1461 return smartset.baseset(drevs)
1462 1462 else:
1463 1463 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1464 1464 elif op in {b'and_', b'add', b'sub'}:
1465 1465 assert len(tree) == 3
1466 1466 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1467 1467 elif op == b'group':
1468 1468 return walk(tree[1])
1469 1469 elif op == b'ancestors':
1470 1470 return getstack(walk(tree[1]))
1471 1471 else:
1472 1472 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1473 1473
1474 1474 return [prefetched[r] for r in walk(tree)]
1475 1475
1476 1476
1477 1477 def getdescfromdrev(drev):
1478 1478 """get description (commit message) from "Differential Revision"
1479 1479
1480 1480 This is similar to differential.getcommitmessage API. But we only care
1481 1481 about limited fields: title, summary, test plan, and URL.
1482 1482 """
1483 1483 title = drev[b'title']
1484 1484 summary = drev[b'summary'].rstrip()
1485 1485 testplan = drev[b'testPlan'].rstrip()
1486 1486 if testplan:
1487 1487 testplan = b'Test Plan:\n%s' % testplan
1488 1488 uri = b'Differential Revision: %s' % drev[b'uri']
1489 1489 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1490 1490
1491 1491
1492 1492 def getdiffmeta(diff):
1493 1493 """get commit metadata (date, node, user, p1) from a diff object
1494 1494
1495 1495 The metadata could be "hg:meta", sent by phabsend, like:
1496 1496
1497 1497 "properties": {
1498 1498 "hg:meta": {
1499 1499 "date": "1499571514 25200",
1500 1500 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1501 1501 "user": "Foo Bar <foo@example.com>",
1502 1502 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1503 1503 }
1504 1504 }
1505 1505
1506 1506 Or converted from "local:commits", sent by "arc", like:
1507 1507
1508 1508 "properties": {
1509 1509 "local:commits": {
1510 1510 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1511 1511 "author": "Foo Bar",
1512 1512 "time": 1499546314,
1513 1513 "branch": "default",
1514 1514 "tag": "",
1515 1515 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1516 1516 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1517 1517 "local": "1000",
1518 1518 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1519 1519 "summary": "...",
1520 1520 "message": "...",
1521 1521 "authorEmail": "foo@example.com"
1522 1522 }
1523 1523 }
1524 1524 }
1525 1525
1526 1526 Note: metadata extracted from "local:commits" will lose time zone
1527 1527 information.
1528 1528 """
1529 1529 props = diff.get(b'properties') or {}
1530 1530 meta = props.get(b'hg:meta')
1531 1531 if not meta:
1532 1532 if props.get(b'local:commits'):
1533 1533 commit = sorted(props[b'local:commits'].values())[0]
1534 1534 meta = {}
1535 1535 if b'author' in commit and b'authorEmail' in commit:
1536 1536 meta[b'user'] = b'%s <%s>' % (
1537 1537 commit[b'author'],
1538 1538 commit[b'authorEmail'],
1539 1539 )
1540 1540 if b'time' in commit:
1541 1541 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1542 1542 if b'branch' in commit:
1543 1543 meta[b'branch'] = commit[b'branch']
1544 1544 node = commit.get(b'commit', commit.get(b'rev'))
1545 1545 if node:
1546 1546 meta[b'node'] = node
1547 1547 if len(commit.get(b'parents', ())) >= 1:
1548 1548 meta[b'parent'] = commit[b'parents'][0]
1549 1549 else:
1550 1550 meta = {}
1551 1551 if b'date' not in meta and b'dateCreated' in diff:
1552 1552 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1553 1553 if b'branch' not in meta and diff.get(b'branch'):
1554 1554 meta[b'branch'] = diff[b'branch']
1555 1555 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1556 1556 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1557 1557 return meta
1558 1558
1559 1559
1560 1560 def readpatch(repo, drevs, write):
1561 1561 """generate plain-text patch readable by 'hg import'
1562 1562
1563 1563 write is usually ui.write. drevs is what "querydrev" returns, results of
1564 1564 "differential.query".
1565 1565 """
1566 1566 # Prefetch hg:meta property for all diffs
1567 1567 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1568 1568 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1569 1569
1570 1570 # Generate patch for each drev
1571 1571 for drev in drevs:
1572 1572 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1573 1573
1574 1574 diffid = max(int(v) for v in drev[b'diffs'])
1575 1575 body = callconduit(
1576 1576 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1577 1577 )
1578 1578 desc = getdescfromdrev(drev)
1579 1579 header = b'# HG changeset patch\n'
1580 1580
1581 1581 # Try to preserve metadata from hg:meta property. Write hg patch
1582 1582 # headers that can be read by the "import" command. See patchheadermap
1583 1583 # and extract in mercurial/patch.py for supported headers.
1584 1584 meta = getdiffmeta(diffs[b'%d' % diffid])
1585 1585 for k in _metanamemap.keys():
1586 1586 if k in meta:
1587 1587 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1588 1588
1589 1589 content = b'%s%s\n%s' % (header, desc, body)
1590 1590 write(content)
1591 1591
1592 1592
1593 1593 @vcrcommand(
1594 1594 b'phabread',
1595 1595 [(b'', b'stack', False, _(b'read dependencies'))],
1596 1596 _(b'DREVSPEC [OPTIONS]'),
1597 1597 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1598 1598 )
1599 1599 def phabread(ui, repo, spec, **opts):
1600 1600 """print patches from Phabricator suitable for importing
1601 1601
1602 1602 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1603 1603 the number ``123``. It could also have common operators like ``+``, ``-``,
1604 1604 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1605 1605 select a stack.
1606 1606
1607 1607 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1608 1608 could be used to filter patches by status. For performance reason, they
1609 1609 only represent a subset of non-status selections and cannot be used alone.
1610 1610
1611 1611 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1612 1612 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1613 1613 stack up to D9.
1614 1614
1615 1615 If --stack is given, follow dependencies information and read all patches.
1616 1616 It is equivalent to the ``:`` operator.
1617 1617 """
1618 1618 opts = pycompat.byteskwargs(opts)
1619 1619 if opts.get(b'stack'):
1620 1620 spec = b':(%s)' % spec
1621 1621 drevs = querydrev(repo, spec)
1622 1622 readpatch(repo, drevs, ui.write)
1623 1623
1624 1624
1625 1625 @vcrcommand(
1626 1626 b'phabupdate',
1627 1627 [
1628 1628 (b'', b'accept', False, _(b'accept revisions')),
1629 1629 (b'', b'reject', False, _(b'reject revisions')),
1630 1630 (b'', b'abandon', False, _(b'abandon revisions')),
1631 1631 (b'', b'reclaim', False, _(b'reclaim revisions')),
1632 1632 (b'm', b'comment', b'', _(b'comment on the last revision')),
1633 1633 ],
1634 1634 _(b'DREVSPEC [OPTIONS]'),
1635 1635 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1636 1636 )
1637 1637 def phabupdate(ui, repo, spec, **opts):
1638 1638 """update Differential Revision in batch
1639 1639
1640 1640 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1641 1641 """
1642 1642 opts = pycompat.byteskwargs(opts)
1643 1643 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1644 1644 if len(flags) > 1:
1645 1645 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1646 1646
1647 1647 actions = []
1648 1648 for f in flags:
1649 1649 actions.append({b'type': f, b'value': True})
1650 1650
1651 1651 drevs = querydrev(repo, spec)
1652 1652 for i, drev in enumerate(drevs):
1653 1653 if i + 1 == len(drevs) and opts.get(b'comment'):
1654 1654 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1655 1655 if actions:
1656 1656 params = {
1657 1657 b'objectIdentifier': drev[b'phid'],
1658 1658 b'transactions': actions,
1659 1659 }
1660 1660 callconduit(ui, b'differential.revision.edit', params)
1661 1661
1662 1662
1663 1663 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1664 1664 def template_review(context, mapping):
1665 1665 """:phabreview: Object describing the review for this changeset.
1666 1666 Has attributes `url` and `id`.
1667 1667 """
1668 1668 ctx = context.resource(mapping, b'ctx')
1669 1669 m = _differentialrevisiondescre.search(ctx.description())
1670 1670 if m:
1671 1671 return templateutil.hybriddict(
1672 1672 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1673 1673 )
1674 1674 else:
1675 1675 tags = ctx.repo().nodetags(ctx.node())
1676 1676 for t in tags:
1677 1677 if _differentialrevisiontagre.match(t):
1678 1678 url = ctx.repo().ui.config(b'phabricator', b'url')
1679 1679 if not url.endswith(b'/'):
1680 1680 url += b'/'
1681 1681 url += t
1682 1682
1683 1683 return templateutil.hybriddict({b'url': url, b'id': t,})
1684 1684 return None
1685 1685
1686 1686
1687 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1688 def template_status(context, mapping):
1689 """:phabstatus: String. Status of Phabricator differential.
1690 """
1691 ctx = context.resource(mapping, b'ctx')
1692 repo = context.resource(mapping, b'repo')
1693 ui = context.resource(mapping, b'ui')
1694
1695 rev = ctx.rev()
1696 try:
1697 drevid = getdrevmap(repo, [rev])[rev]
1698 except KeyError:
1699 return None
1700 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1701 for drev in drevs:
1702 if int(drev[b'id']) == drevid:
1703 return templateutil.hybriddict(
1704 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1705 )
1706 return None
1707
1708
1687 1709 @show.showview(b'phabstatus', csettopic=b'work')
1688 1710 def phabstatusshowview(ui, repo, displayer):
1689 1711 """Phabricator differiential status"""
1690 1712 revs = repo.revs('sort(_underway(), topo)')
1691 1713 drevmap = getdrevmap(repo, revs)
1692 1714 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1693 1715 for rev, drevid in pycompat.iteritems(drevmap):
1694 1716 if drevid is not None:
1695 1717 drevids.add(drevid)
1696 1718 revsbydrevid.setdefault(drevid, set([])).add(rev)
1697 1719 else:
1698 1720 unknownrevs.append(rev)
1699 1721
1700 1722 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1701 1723 drevsbyrev = {}
1702 1724 for drev in drevs:
1703 1725 for rev in revsbydrevid[int(drev[b'id'])]:
1704 1726 drevsbyrev[rev] = drev
1705 1727
1706 1728 def phabstatus(ctx):
1707 1729 drev = drevsbyrev[ctx.rev()]
1708 1730 ui.write(b"\n%(uri)s %(statusName)s\n" % drev)
1709 1731
1710 1732 revs -= smartset.baseset(unknownrevs)
1711 1733 revdag = graphmod.dagwalker(repo, revs)
1712 1734
1713 1735 ui.setconfig(b'experimental', b'graphshorten', True)
1714 1736 displayer._exthook = phabstatus
1715 1737 nodelen = show.longestshortest(repo, revs)
1716 1738 logcmdutil.displaygraph(
1717 1739 ui,
1718 1740 repo,
1719 1741 revdag,
1720 1742 displayer,
1721 1743 graphmod.asciiedges,
1722 1744 props={b'nodelen': nodelen},
1723 1745 )
General Comments 0
You need to be logged in to leave comments. Login now