##// END OF EJS Templates
phabricator: use exthelper to register commands, config, and templates...
Matt Harbison -
r43243:24bf7a3d default
parent child Browse files
Show More
@@ -1,1094 +1,1093 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import contextlib
45 45 import itertools
46 46 import json
47 47 import operator
48 48 import re
49 49
50 50 from mercurial.node import bin, nullid
51 51 from mercurial.i18n import _
52 52 from mercurial import (
53 53 cmdutil,
54 54 context,
55 55 encoding,
56 56 error,
57 exthelper,
57 58 httpconnection as httpconnectionmod,
58 59 mdiff,
59 60 obsutil,
60 61 parser,
61 62 patch,
62 63 phases,
63 64 pycompat,
64 registrar,
65 65 scmutil,
66 66 smartset,
67 67 tags,
68 68 templatefilters,
69 69 templateutil,
70 70 url as urlmod,
71 71 util,
72 72 )
73 73 from mercurial.utils import (
74 74 procutil,
75 75 stringutil,
76 76 )
77 77
78 78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 80 # be specifying the version(s) of Mercurial they are tested with, or
81 81 # leave the attribute unspecified.
82 82 testedwith = 'ships-with-hg-core'
83 83
84 cmdtable = {}
85 command = registrar.command(cmdtable)
84 eh = exthelper.exthelper()
86 85
87 configtable = {}
88 configitem = registrar.configitem(configtable)
86 cmdtable = eh.cmdtable
87 command = eh.command
88 configtable = eh.configtable
89 templatekeyword = eh.templatekeyword
89 90
90 91 # developer config: phabricator.batchsize
91 configitem(b'phabricator', b'batchsize',
92 eh.configitem(b'phabricator', b'batchsize',
92 93 default=12,
93 94 )
94 configitem(b'phabricator', b'callsign',
95 eh.configitem(b'phabricator', b'callsign',
95 96 default=None,
96 97 )
97 configitem(b'phabricator', b'curlcmd',
98 eh.configitem(b'phabricator', b'curlcmd',
98 99 default=None,
99 100 )
100 101 # developer config: phabricator.repophid
101 configitem(b'phabricator', b'repophid',
102 eh.configitem(b'phabricator', b'repophid',
102 103 default=None,
103 104 )
104 configitem(b'phabricator', b'url',
105 eh.configitem(b'phabricator', b'url',
105 106 default=None,
106 107 )
107 configitem(b'phabsend', b'confirm',
108 eh.configitem(b'phabsend', b'confirm',
108 109 default=False,
109 110 )
110 111
111 112 colortable = {
112 113 b'phabricator.action.created': b'green',
113 114 b'phabricator.action.skipped': b'magenta',
114 115 b'phabricator.action.updated': b'magenta',
115 116 b'phabricator.desc': b'',
116 117 b'phabricator.drev': b'bold',
117 118 b'phabricator.node': b'',
118 119 }
119 120
120 121 _VCR_FLAGS = [
121 122 (b'', b'test-vcr', b'',
122 123 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
123 124 b', otherwise will mock all http requests using the specified vcr file.'
124 125 b' (ADVANCED)'
125 126 )),
126 127 ]
127 128
128 129 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
129 130 fullflags = flags + _VCR_FLAGS
130 131 def hgmatcher(r1, r2):
131 132 if r1.uri != r2.uri or r1.method != r2.method:
132 133 return False
133 134 r1params = r1.body.split(b'&')
134 135 r2params = r2.body.split(b'&')
135 136 return set(r1params) == set(r2params)
136 137
137 138 def sanitiserequest(request):
138 139 request.body = re.sub(
139 140 br'cli-[a-z0-9]+',
140 141 br'cli-hahayouwish',
141 142 request.body
142 143 )
143 144 return request
144 145
145 146 def sanitiseresponse(response):
146 147 if r'set-cookie' in response[r'headers']:
147 148 del response[r'headers'][r'set-cookie']
148 149 return response
149 150
150 151 def decorate(fn):
151 152 def inner(*args, **kwargs):
152 153 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
153 154 if cassette:
154 155 import hgdemandimport
155 156 with hgdemandimport.deactivated():
156 157 import vcr as vcrmod
157 158 import vcr.stubs as stubs
158 159 vcr = vcrmod.VCR(
159 160 serializer=r'json',
160 161 before_record_request=sanitiserequest,
161 162 before_record_response=sanitiseresponse,
162 163 custom_patches=[
163 164 (urlmod, r'httpconnection',
164 165 stubs.VCRHTTPConnection),
165 166 (urlmod, r'httpsconnection',
166 167 stubs.VCRHTTPSConnection),
167 168 ])
168 169 vcr.register_matcher(r'hgmatcher', hgmatcher)
169 170 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
170 171 return fn(*args, **kwargs)
171 172 return fn(*args, **kwargs)
172 173 inner.__name__ = fn.__name__
173 174 inner.__doc__ = fn.__doc__
174 175 return command(name, fullflags, spec, helpcategory=helpcategory,
175 176 optionalrepo=optionalrepo)(inner)
176 177 return decorate
177 178
178 179 def urlencodenested(params):
179 180 """like urlencode, but works with nested parameters.
180 181
181 182 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
182 183 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
183 184 urlencode. Note: the encoding is consistent with PHP's http_build_query.
184 185 """
185 186 flatparams = util.sortdict()
186 187 def process(prefix, obj):
187 188 if isinstance(obj, bool):
188 189 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
189 190 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
190 191 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
191 192 if items is None:
192 193 flatparams[prefix] = obj
193 194 else:
194 195 for k, v in items(obj):
195 196 if prefix:
196 197 process(b'%s[%s]' % (prefix, k), v)
197 198 else:
198 199 process(k, v)
199 200 process(b'', params)
200 201 return util.urlreq.urlencode(flatparams)
201 202
202 203 def readurltoken(ui):
203 204 """return conduit url, token and make sure they exist
204 205
205 206 Currently read from [auth] config section. In the future, it might
206 207 make sense to read from .arcconfig and .arcrc as well.
207 208 """
208 209 url = ui.config(b'phabricator', b'url')
209 210 if not url:
210 211 raise error.Abort(_(b'config %s.%s is required')
211 212 % (b'phabricator', b'url'))
212 213
213 214 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
214 215 token = None
215 216
216 217 if res:
217 218 group, auth = res
218 219
219 220 ui.debug(b"using auth.%s.* for authentication\n" % group)
220 221
221 222 token = auth.get(b'phabtoken')
222 223
223 224 if not token:
224 225 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
225 226 % (url,))
226 227
227 228 return url, token
228 229
229 230 def callconduit(ui, name, params):
230 231 """call Conduit API, params is a dict. return json.loads result, or None"""
231 232 host, token = readurltoken(ui)
232 233 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
233 234 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
234 235 params = params.copy()
235 236 params[b'api.token'] = token
236 237 data = urlencodenested(params)
237 238 curlcmd = ui.config(b'phabricator', b'curlcmd')
238 239 if curlcmd:
239 240 sin, sout = procutil.popen2(b'%s -d @- %s'
240 241 % (curlcmd, procutil.shellquote(url)))
241 242 sin.write(data)
242 243 sin.close()
243 244 body = sout.read()
244 245 else:
245 246 urlopener = urlmod.opener(ui, authinfo)
246 247 request = util.urlreq.request(pycompat.strurl(url), data=data)
247 248 with contextlib.closing(urlopener.open(request)) as rsp:
248 249 body = rsp.read()
249 250 ui.debug(b'Conduit Response: %s\n' % body)
250 251 parsed = pycompat.rapply(
251 252 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
252 253 else x,
253 254 json.loads(body)
254 255 )
255 256 if parsed.get(b'error_code'):
256 257 msg = (_(b'Conduit Error (%s): %s')
257 258 % (parsed[b'error_code'], parsed[b'error_info']))
258 259 raise error.Abort(msg)
259 260 return parsed[b'result']
260 261
261 262 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
262 263 def debugcallconduit(ui, repo, name):
263 264 """call Conduit API
264 265
265 266 Call parameters are read from stdin as a JSON blob. Result will be written
266 267 to stdout as a JSON blob.
267 268 """
268 269 # json.loads only accepts bytes from 3.6+
269 270 rawparams = encoding.unifromlocal(ui.fin.read())
270 271 # json.loads only returns unicode strings
271 272 params = pycompat.rapply(lambda x:
272 273 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
273 274 json.loads(rawparams)
274 275 )
275 276 # json.dumps only accepts unicode strings
276 277 result = pycompat.rapply(lambda x:
277 278 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
278 279 callconduit(ui, name, params)
279 280 )
280 281 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
281 282 ui.write(b'%s\n' % encoding.unitolocal(s))
282 283
283 284 def getrepophid(repo):
284 285 """given callsign, return repository PHID or None"""
285 286 # developer config: phabricator.repophid
286 287 repophid = repo.ui.config(b'phabricator', b'repophid')
287 288 if repophid:
288 289 return repophid
289 290 callsign = repo.ui.config(b'phabricator', b'callsign')
290 291 if not callsign:
291 292 return None
292 293 query = callconduit(repo.ui, b'diffusion.repository.search',
293 294 {b'constraints': {b'callsigns': [callsign]}})
294 295 if len(query[b'data']) == 0:
295 296 return None
296 297 repophid = query[b'data'][0][b'phid']
297 298 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
298 299 return repophid
299 300
300 301 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
301 302 _differentialrevisiondescre = re.compile(
302 303 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
303 304
304 305 def getoldnodedrevmap(repo, nodelist):
305 306 """find previous nodes that has been sent to Phabricator
306 307
307 308 return {node: (oldnode, Differential diff, Differential Revision ID)}
308 309 for node in nodelist with known previous sent versions, or associated
309 310 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
310 311 be ``None``.
311 312
312 313 Examines commit messages like "Differential Revision:" to get the
313 314 association information.
314 315
315 316 If such commit message line is not found, examines all precursors and their
316 317 tags. Tags with format like "D1234" are considered a match and the node
317 318 with that tag, and the number after "D" (ex. 1234) will be returned.
318 319
319 320 The ``old node``, if not None, is guaranteed to be the last diff of
320 321 corresponding Differential Revision, and exist in the repo.
321 322 """
322 323 unfi = repo.unfiltered()
323 324 nodemap = unfi.changelog.nodemap
324 325
325 326 result = {} # {node: (oldnode?, lastdiff?, drev)}
326 327 toconfirm = {} # {node: (force, {precnode}, drev)}
327 328 for node in nodelist:
328 329 ctx = unfi[node]
329 330 # For tags like "D123", put them into "toconfirm" to verify later
330 331 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
331 332 for n in precnodes:
332 333 if n in nodemap:
333 334 for tag in unfi.nodetags(n):
334 335 m = _differentialrevisiontagre.match(tag)
335 336 if m:
336 337 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
337 338 continue
338 339
339 340 # Check commit message
340 341 m = _differentialrevisiondescre.search(ctx.description())
341 342 if m:
342 343 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
343 344
344 345 # Double check if tags are genuine by collecting all old nodes from
345 346 # Phabricator, and expect precursors overlap with it.
346 347 if toconfirm:
347 348 drevs = [drev for force, precs, drev in toconfirm.values()]
348 349 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
349 350 {b'revisionIDs': drevs})
350 351 getnode = lambda d: bin(
351 352 getdiffmeta(d).get(b'node', b'')) or None
352 353 for newnode, (force, precset, drev) in toconfirm.items():
353 354 diffs = [d for d in alldiffs.values()
354 355 if int(d[b'revisionID']) == drev]
355 356
356 357 # "precursors" as known by Phabricator
357 358 phprecset = set(getnode(d) for d in diffs)
358 359
359 360 # Ignore if precursors (Phabricator and local repo) do not overlap,
360 361 # and force is not set (when commit message says nothing)
361 362 if not force and not bool(phprecset & precset):
362 363 tagname = b'D%d' % drev
363 364 tags.tag(repo, tagname, nullid, message=None, user=None,
364 365 date=None, local=True)
365 366 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
366 367 b'Differential history\n') % drev)
367 368 continue
368 369
369 370 # Find the last node using Phabricator metadata, and make sure it
370 371 # exists in the repo
371 372 oldnode = lastdiff = None
372 373 if diffs:
373 374 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
374 375 oldnode = getnode(lastdiff)
375 376 if oldnode and oldnode not in nodemap:
376 377 oldnode = None
377 378
378 379 result[newnode] = (oldnode, lastdiff, drev)
379 380
380 381 return result
381 382
382 383 def getdiff(ctx, diffopts):
383 384 """plain-text diff without header (user, commit message, etc)"""
384 385 output = util.stringio()
385 386 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
386 387 None, opts=diffopts):
387 388 output.write(chunk)
388 389 return output.getvalue()
389 390
390 391 def creatediff(ctx):
391 392 """create a Differential Diff"""
392 393 repo = ctx.repo()
393 394 repophid = getrepophid(repo)
394 395 # Create a "Differential Diff" via "differential.createrawdiff" API
395 396 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
396 397 if repophid:
397 398 params[b'repositoryPHID'] = repophid
398 399 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
399 400 if not diff:
400 401 raise error.Abort(_(b'cannot create diff for %s') % ctx)
401 402 return diff
402 403
403 404 def writediffproperties(ctx, diff):
404 405 """write metadata to diff so patches could be applied losslessly"""
405 406 params = {
406 407 b'diff_id': diff[b'id'],
407 408 b'name': b'hg:meta',
408 409 b'data': templatefilters.json({
409 410 b'user': ctx.user(),
410 411 b'date': b'%d %d' % ctx.date(),
411 412 b'branch': ctx.branch(),
412 413 b'node': ctx.hex(),
413 414 b'parent': ctx.p1().hex(),
414 415 }),
415 416 }
416 417 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
417 418
418 419 params = {
419 420 b'diff_id': diff[b'id'],
420 421 b'name': b'local:commits',
421 422 b'data': templatefilters.json({
422 423 ctx.hex(): {
423 424 b'author': stringutil.person(ctx.user()),
424 425 b'authorEmail': stringutil.email(ctx.user()),
425 426 b'time': int(ctx.date()[0]),
426 427 b'commit': ctx.hex(),
427 428 b'parents': [ctx.p1().hex()],
428 429 b'branch': ctx.branch(),
429 430 },
430 431 }),
431 432 }
432 433 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
433 434
434 435 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
435 436 oldnode=None, olddiff=None, actions=None,
436 437 comment=None):
437 438 """create or update a Differential Revision
438 439
439 440 If revid is None, create a new Differential Revision, otherwise update
440 441 revid. If parentrevphid is not None, set it as a dependency.
441 442
442 443 If oldnode is not None, check if the patch content (without commit message
443 444 and metadata) has changed before creating another diff.
444 445
445 446 If actions is not None, they will be appended to the transaction.
446 447 """
447 448 repo = ctx.repo()
448 449 if oldnode:
449 450 diffopts = mdiff.diffopts(git=True, context=32767)
450 451 oldctx = repo.unfiltered()[oldnode]
451 452 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
452 453 else:
453 454 neednewdiff = True
454 455
455 456 transactions = []
456 457 if neednewdiff:
457 458 diff = creatediff(ctx)
458 459 transactions.append({b'type': b'update', b'value': diff[b'phid']})
459 460 if comment:
460 461 transactions.append({b'type': b'comment', b'value': comment})
461 462 else:
462 463 # Even if we don't need to upload a new diff because the patch content
463 464 # does not change. We might still need to update its metadata so
464 465 # pushers could know the correct node metadata.
465 466 assert olddiff
466 467 diff = olddiff
467 468 writediffproperties(ctx, diff)
468 469
469 470 # Set the parent Revision every time, so commit re-ordering is picked-up
470 471 if parentrevphid:
471 472 transactions.append({b'type': b'parents.set',
472 473 b'value': [parentrevphid]})
473 474
474 475 if actions:
475 476 transactions += actions
476 477
477 478 # Parse commit message and update related fields.
478 479 desc = ctx.description()
479 480 info = callconduit(repo.ui, b'differential.parsecommitmessage',
480 481 {b'corpus': desc})
481 482 for k, v in info[b'fields'].items():
482 483 if k in [b'title', b'summary', b'testPlan']:
483 484 transactions.append({b'type': k, b'value': v})
484 485
485 486 params = {b'transactions': transactions}
486 487 if revid is not None:
487 488 # Update an existing Differential Revision
488 489 params[b'objectIdentifier'] = revid
489 490
490 491 revision = callconduit(repo.ui, b'differential.revision.edit', params)
491 492 if not revision:
492 493 raise error.Abort(_(b'cannot create revision for %s') % ctx)
493 494
494 495 return revision, diff
495 496
496 497 def userphids(repo, names):
497 498 """convert user names to PHIDs"""
498 499 names = [name.lower() for name in names]
499 500 query = {b'constraints': {b'usernames': names}}
500 501 result = callconduit(repo.ui, b'user.search', query)
501 502 # username not found is not an error of the API. So check if we have missed
502 503 # some names here.
503 504 data = result[b'data']
504 505 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
505 506 unresolved = set(names) - resolved
506 507 if unresolved:
507 508 raise error.Abort(_(b'unknown username: %s')
508 509 % b' '.join(sorted(unresolved)))
509 510 return [entry[b'phid'] for entry in data]
510 511
511 512 @vcrcommand(b'phabsend',
512 513 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
513 514 (b'', b'amend', True, _(b'update commit messages')),
514 515 (b'', b'reviewer', [], _(b'specify reviewers')),
515 516 (b'', b'blocker', [], _(b'specify blocking reviewers')),
516 517 (b'm', b'comment', b'',
517 518 _(b'add a comment to Revisions with new/updated Diffs')),
518 519 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
519 520 _(b'REV [OPTIONS]'),
520 521 helpcategory=command.CATEGORY_IMPORT_EXPORT)
521 522 def phabsend(ui, repo, *revs, **opts):
522 523 """upload changesets to Phabricator
523 524
524 525 If there are multiple revisions specified, they will be send as a stack
525 526 with a linear dependencies relationship using the order specified by the
526 527 revset.
527 528
528 529 For the first time uploading changesets, local tags will be created to
529 530 maintain the association. After the first time, phabsend will check
530 531 obsstore and tags information so it can figure out whether to update an
531 532 existing Differential Revision, or create a new one.
532 533
533 534 If --amend is set, update commit messages so they have the
534 535 ``Differential Revision`` URL, remove related tags. This is similar to what
535 536 arcanist will do, and is more desired in author-push workflows. Otherwise,
536 537 use local tags to record the ``Differential Revision`` association.
537 538
538 539 The --confirm option lets you confirm changesets before sending them. You
539 540 can also add following to your configuration file to make it default
540 541 behaviour::
541 542
542 543 [phabsend]
543 544 confirm = true
544 545
545 546 phabsend will check obsstore and the above association to decide whether to
546 547 update an existing Differential Revision, or create a new one.
547 548 """
548 549 opts = pycompat.byteskwargs(opts)
549 550 revs = list(revs) + opts.get(b'rev', [])
550 551 revs = scmutil.revrange(repo, revs)
551 552
552 553 if not revs:
553 554 raise error.Abort(_(b'phabsend requires at least one changeset'))
554 555 if opts.get(b'amend'):
555 556 cmdutil.checkunfinished(repo)
556 557
557 558 # {newnode: (oldnode, olddiff, olddrev}
558 559 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
559 560
560 561 confirm = ui.configbool(b'phabsend', b'confirm')
561 562 confirm |= bool(opts.get(b'confirm'))
562 563 if confirm:
563 564 confirmed = _confirmbeforesend(repo, revs, oldmap)
564 565 if not confirmed:
565 566 raise error.Abort(_(b'phabsend cancelled'))
566 567
567 568 actions = []
568 569 reviewers = opts.get(b'reviewer', [])
569 570 blockers = opts.get(b'blocker', [])
570 571 phids = []
571 572 if reviewers:
572 573 phids.extend(userphids(repo, reviewers))
573 574 if blockers:
574 575 phids.extend(map(
575 576 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
576 577 ))
577 578 if phids:
578 579 actions.append({b'type': b'reviewers.add', b'value': phids})
579 580
580 581 drevids = [] # [int]
581 582 diffmap = {} # {newnode: diff}
582 583
583 584 # Send patches one by one so we know their Differential Revision PHIDs and
584 585 # can provide dependency relationship
585 586 lastrevphid = None
586 587 for rev in revs:
587 588 ui.debug(b'sending rev %d\n' % rev)
588 589 ctx = repo[rev]
589 590
590 591 # Get Differential Revision ID
591 592 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
592 593 if oldnode != ctx.node() or opts.get(b'amend'):
593 594 # Create or update Differential Revision
594 595 revision, diff = createdifferentialrevision(
595 596 ctx, revid, lastrevphid, oldnode, olddiff, actions,
596 597 opts.get(b'comment'))
597 598 diffmap[ctx.node()] = diff
598 599 newrevid = int(revision[b'object'][b'id'])
599 600 newrevphid = revision[b'object'][b'phid']
600 601 if revid:
601 602 action = b'updated'
602 603 else:
603 604 action = b'created'
604 605
605 606 # Create a local tag to note the association, if commit message
606 607 # does not have it already
607 608 m = _differentialrevisiondescre.search(ctx.description())
608 609 if not m or int(m.group(r'id')) != newrevid:
609 610 tagname = b'D%d' % newrevid
610 611 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
611 612 date=None, local=True)
612 613 else:
613 614 # Nothing changed. But still set "newrevphid" so the next revision
614 615 # could depend on this one and "newrevid" for the summary line.
615 616 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
616 617 newrevid = revid
617 618 action = b'skipped'
618 619
619 620 actiondesc = ui.label(
620 621 {b'created': _(b'created'),
621 622 b'skipped': _(b'skipped'),
622 623 b'updated': _(b'updated')}[action],
623 624 b'phabricator.action.%s' % action)
624 625 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
625 626 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
626 627 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
627 628 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
628 629 desc))
629 630 drevids.append(newrevid)
630 631 lastrevphid = newrevphid
631 632
632 633 # Update commit messages and remove tags
633 634 if opts.get(b'amend'):
634 635 unfi = repo.unfiltered()
635 636 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
636 637 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
637 638 wnode = unfi[b'.'].node()
638 639 mapping = {} # {oldnode: [newnode]}
639 640 for i, rev in enumerate(revs):
640 641 old = unfi[rev]
641 642 drevid = drevids[i]
642 643 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
643 644 newdesc = getdescfromdrev(drev)
644 645 # Make sure commit message contain "Differential Revision"
645 646 if old.description() != newdesc:
646 647 if old.phase() == phases.public:
647 648 ui.warn(_("warning: not updating public commit %s\n")
648 649 % scmutil.formatchangeid(old))
649 650 continue
650 651 parents = [
651 652 mapping.get(old.p1().node(), (old.p1(),))[0],
652 653 mapping.get(old.p2().node(), (old.p2(),))[0],
653 654 ]
654 655 new = context.metadataonlyctx(
655 656 repo, old, parents=parents, text=newdesc,
656 657 user=old.user(), date=old.date(), extra=old.extra())
657 658
658 659 newnode = new.commit()
659 660
660 661 mapping[old.node()] = [newnode]
661 662 # Update diff property
662 663 # If it fails just warn and keep going, otherwise the DREV
663 664 # associations will be lost
664 665 try:
665 666 writediffproperties(unfi[newnode], diffmap[old.node()])
666 667 except util.urlerr.urlerror:
667 668 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
668 669 # Remove local tags since it's no longer necessary
669 670 tagname = b'D%d' % drevid
670 671 if tagname in repo.tags():
671 672 tags.tag(repo, tagname, nullid, message=None, user=None,
672 673 date=None, local=True)
673 674 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
674 675 if wnode in mapping:
675 676 unfi.setparents(mapping[wnode][0])
676 677
677 678 # Map from "hg:meta" keys to header understood by "hg import". The order is
678 679 # consistent with "hg export" output.
679 680 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
680 681 (b'branch', b'Branch'), (b'node', b'Node ID'),
681 682 (b'parent', b'Parent ')])
682 683
683 684 def _confirmbeforesend(repo, revs, oldmap):
684 685 url, token = readurltoken(repo.ui)
685 686 ui = repo.ui
686 687 for rev in revs:
687 688 ctx = repo[rev]
688 689 desc = ctx.description().splitlines()[0]
689 690 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
690 691 if drevid:
691 692 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
692 693 else:
693 694 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
694 695
695 696 ui.write(_(b'%s - %s: %s\n')
696 697 % (drevdesc,
697 698 ui.label(bytes(ctx), b'phabricator.node'),
698 699 ui.label(desc, b'phabricator.desc')))
699 700
700 701 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
701 702 b'$$ &Yes $$ &No') % url):
702 703 return False
703 704
704 705 return True
705 706
706 707 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
707 708 b'abandoned'}
708 709
709 710 def _getstatusname(drev):
710 711 """get normalized status name from a Differential Revision"""
711 712 return drev[b'statusName'].replace(b' ', b'').lower()
712 713
713 714 # Small language to specify differential revisions. Support symbols: (), :X,
714 715 # +, and -.
715 716
716 717 _elements = {
717 718 # token-type: binding-strength, primary, prefix, infix, suffix
718 719 b'(': (12, None, (b'group', 1, b')'), None, None),
719 720 b':': (8, None, (b'ancestors', 8), None, None),
720 721 b'&': (5, None, None, (b'and_', 5), None),
721 722 b'+': (4, None, None, (b'add', 4), None),
722 723 b'-': (4, None, None, (b'sub', 4), None),
723 724 b')': (0, None, None, None, None),
724 725 b'symbol': (0, b'symbol', None, None, None),
725 726 b'end': (0, None, None, None, None),
726 727 }
727 728
728 729 def _tokenize(text):
729 730 view = memoryview(text) # zero-copy slice
730 731 special = b'():+-& '
731 732 pos = 0
732 733 length = len(text)
733 734 while pos < length:
734 735 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
735 736 pycompat.iterbytestr(view[pos:])))
736 737 if symbol:
737 738 yield (b'symbol', symbol, pos)
738 739 pos += len(symbol)
739 740 else: # special char, ignore space
740 741 if text[pos] != b' ':
741 742 yield (text[pos], None, pos)
742 743 pos += 1
743 744 yield (b'end', None, pos)
744 745
745 746 def _parse(text):
746 747 tree, pos = parser.parser(_elements).parse(_tokenize(text))
747 748 if pos != len(text):
748 749 raise error.ParseError(b'invalid token', pos)
749 750 return tree
750 751
751 752 def _parsedrev(symbol):
752 753 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
753 754 if symbol.startswith(b'D') and symbol[1:].isdigit():
754 755 return int(symbol[1:])
755 756 if symbol.isdigit():
756 757 return int(symbol)
757 758
758 759 def _prefetchdrevs(tree):
759 760 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
760 761 drevs = set()
761 762 ancestordrevs = set()
762 763 op = tree[0]
763 764 if op == b'symbol':
764 765 r = _parsedrev(tree[1])
765 766 if r:
766 767 drevs.add(r)
767 768 elif op == b'ancestors':
768 769 r, a = _prefetchdrevs(tree[1])
769 770 drevs.update(r)
770 771 ancestordrevs.update(r)
771 772 ancestordrevs.update(a)
772 773 else:
773 774 for t in tree[1:]:
774 775 r, a = _prefetchdrevs(t)
775 776 drevs.update(r)
776 777 ancestordrevs.update(a)
777 778 return drevs, ancestordrevs
778 779
779 780 def querydrev(repo, spec):
780 781 """return a list of "Differential Revision" dicts
781 782
782 783 spec is a string using a simple query language, see docstring in phabread
783 784 for details.
784 785
785 786 A "Differential Revision dict" looks like:
786 787
787 788 {
788 789 "id": "2",
789 790 "phid": "PHID-DREV-672qvysjcczopag46qty",
790 791 "title": "example",
791 792 "uri": "https://phab.example.com/D2",
792 793 "dateCreated": "1499181406",
793 794 "dateModified": "1499182103",
794 795 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
795 796 "status": "0",
796 797 "statusName": "Needs Review",
797 798 "properties": [],
798 799 "branch": null,
799 800 "summary": "",
800 801 "testPlan": "",
801 802 "lineCount": "2",
802 803 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
803 804 "diffs": [
804 805 "3",
805 806 "4",
806 807 ],
807 808 "commits": [],
808 809 "reviewers": [],
809 810 "ccs": [],
810 811 "hashes": [],
811 812 "auxiliary": {
812 813 "phabricator:projects": [],
813 814 "phabricator:depends-on": [
814 815 "PHID-DREV-gbapp366kutjebt7agcd"
815 816 ]
816 817 },
817 818 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
818 819 "sourcePath": null
819 820 }
820 821 """
821 822 def fetch(params):
822 823 """params -> single drev or None"""
823 824 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
824 825 if key in prefetched:
825 826 return prefetched[key]
826 827 drevs = callconduit(repo.ui, b'differential.query', params)
827 828 # Fill prefetched with the result
828 829 for drev in drevs:
829 830 prefetched[drev[b'phid']] = drev
830 831 prefetched[int(drev[b'id'])] = drev
831 832 if key not in prefetched:
832 833 raise error.Abort(_(b'cannot get Differential Revision %r')
833 834 % params)
834 835 return prefetched[key]
835 836
836 837 def getstack(topdrevids):
837 838 """given a top, get a stack from the bottom, [id] -> [id]"""
838 839 visited = set()
839 840 result = []
840 841 queue = [{b'ids': [i]} for i in topdrevids]
841 842 while queue:
842 843 params = queue.pop()
843 844 drev = fetch(params)
844 845 if drev[b'id'] in visited:
845 846 continue
846 847 visited.add(drev[b'id'])
847 848 result.append(int(drev[b'id']))
848 849 auxiliary = drev.get(b'auxiliary', {})
849 850 depends = auxiliary.get(b'phabricator:depends-on', [])
850 851 for phid in depends:
851 852 queue.append({b'phids': [phid]})
852 853 result.reverse()
853 854 return smartset.baseset(result)
854 855
855 856 # Initialize prefetch cache
856 857 prefetched = {} # {id or phid: drev}
857 858
858 859 tree = _parse(spec)
859 860 drevs, ancestordrevs = _prefetchdrevs(tree)
860 861
861 862 # developer config: phabricator.batchsize
862 863 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
863 864
864 865 # Prefetch Differential Revisions in batch
865 866 tofetch = set(drevs)
866 867 for r in ancestordrevs:
867 868 tofetch.update(range(max(1, r - batchsize), r + 1))
868 869 if drevs:
869 870 fetch({b'ids': list(tofetch)})
870 871 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
871 872
872 873 # Walk through the tree, return smartsets
873 874 def walk(tree):
874 875 op = tree[0]
875 876 if op == b'symbol':
876 877 drev = _parsedrev(tree[1])
877 878 if drev:
878 879 return smartset.baseset([drev])
879 880 elif tree[1] in _knownstatusnames:
880 881 drevs = [r for r in validids
881 882 if _getstatusname(prefetched[r]) == tree[1]]
882 883 return smartset.baseset(drevs)
883 884 else:
884 885 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
885 886 elif op in {b'and_', b'add', b'sub'}:
886 887 assert len(tree) == 3
887 888 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
888 889 elif op == b'group':
889 890 return walk(tree[1])
890 891 elif op == b'ancestors':
891 892 return getstack(walk(tree[1]))
892 893 else:
893 894 raise error.ProgrammingError(b'illegal tree: %r' % tree)
894 895
895 896 return [prefetched[r] for r in walk(tree)]
896 897
897 898 def getdescfromdrev(drev):
898 899 """get description (commit message) from "Differential Revision"
899 900
900 901 This is similar to differential.getcommitmessage API. But we only care
901 902 about limited fields: title, summary, test plan, and URL.
902 903 """
903 904 title = drev[b'title']
904 905 summary = drev[b'summary'].rstrip()
905 906 testplan = drev[b'testPlan'].rstrip()
906 907 if testplan:
907 908 testplan = b'Test Plan:\n%s' % testplan
908 909 uri = b'Differential Revision: %s' % drev[b'uri']
909 910 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
910 911
911 912 def getdiffmeta(diff):
912 913 """get commit metadata (date, node, user, p1) from a diff object
913 914
914 915 The metadata could be "hg:meta", sent by phabsend, like:
915 916
916 917 "properties": {
917 918 "hg:meta": {
918 919 "date": "1499571514 25200",
919 920 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
920 921 "user": "Foo Bar <foo@example.com>",
921 922 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
922 923 }
923 924 }
924 925
925 926 Or converted from "local:commits", sent by "arc", like:
926 927
927 928 "properties": {
928 929 "local:commits": {
929 930 "98c08acae292b2faf60a279b4189beb6cff1414d": {
930 931 "author": "Foo Bar",
931 932 "time": 1499546314,
932 933 "branch": "default",
933 934 "tag": "",
934 935 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
935 936 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
936 937 "local": "1000",
937 938 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
938 939 "summary": "...",
939 940 "message": "...",
940 941 "authorEmail": "foo@example.com"
941 942 }
942 943 }
943 944 }
944 945
945 946 Note: metadata extracted from "local:commits" will lose time zone
946 947 information.
947 948 """
948 949 props = diff.get(b'properties') or {}
949 950 meta = props.get(b'hg:meta')
950 951 if not meta:
951 952 if props.get(b'local:commits'):
952 953 commit = sorted(props[b'local:commits'].values())[0]
953 954 meta = {}
954 955 if b'author' in commit and b'authorEmail' in commit:
955 956 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
956 957 commit[b'authorEmail'])
957 958 if b'time' in commit:
958 959 meta[b'date'] = b'%d 0' % int(commit[b'time'])
959 960 if b'branch' in commit:
960 961 meta[b'branch'] = commit[b'branch']
961 962 node = commit.get(b'commit', commit.get(b'rev'))
962 963 if node:
963 964 meta[b'node'] = node
964 965 if len(commit.get(b'parents', ())) >= 1:
965 966 meta[b'parent'] = commit[b'parents'][0]
966 967 else:
967 968 meta = {}
968 969 if b'date' not in meta and b'dateCreated' in diff:
969 970 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
970 971 if b'branch' not in meta and diff.get(b'branch'):
971 972 meta[b'branch'] = diff[b'branch']
972 973 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
973 974 meta[b'parent'] = diff[b'sourceControlBaseRevision']
974 975 return meta
975 976
976 977 def readpatch(repo, drevs, write):
977 978 """generate plain-text patch readable by 'hg import'
978 979
979 980 write is usually ui.write. drevs is what "querydrev" returns, results of
980 981 "differential.query".
981 982 """
982 983 # Prefetch hg:meta property for all diffs
983 984 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
984 985 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
985 986
986 987 # Generate patch for each drev
987 988 for drev in drevs:
988 989 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
989 990
990 991 diffid = max(int(v) for v in drev[b'diffs'])
991 992 body = callconduit(repo.ui, b'differential.getrawdiff',
992 993 {b'diffID': diffid})
993 994 desc = getdescfromdrev(drev)
994 995 header = b'# HG changeset patch\n'
995 996
996 997 # Try to preserve metadata from hg:meta property. Write hg patch
997 998 # headers that can be read by the "import" command. See patchheadermap
998 999 # and extract in mercurial/patch.py for supported headers.
999 1000 meta = getdiffmeta(diffs[b'%d' % diffid])
1000 1001 for k in _metanamemap.keys():
1001 1002 if k in meta:
1002 1003 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1003 1004
1004 1005 content = b'%s%s\n%s' % (header, desc, body)
1005 1006 write(content)
1006 1007
1007 1008 @vcrcommand(b'phabread',
1008 1009 [(b'', b'stack', False, _(b'read dependencies'))],
1009 1010 _(b'DREVSPEC [OPTIONS]'),
1010 1011 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1011 1012 def phabread(ui, repo, spec, **opts):
1012 1013 """print patches from Phabricator suitable for importing
1013 1014
1014 1015 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1015 1016 the number ``123``. It could also have common operators like ``+``, ``-``,
1016 1017 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1017 1018 select a stack.
1018 1019
1019 1020 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1020 1021 could be used to filter patches by status. For performance reason, they
1021 1022 only represent a subset of non-status selections and cannot be used alone.
1022 1023
1023 1024 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1024 1025 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1025 1026 stack up to D9.
1026 1027
1027 1028 If --stack is given, follow dependencies information and read all patches.
1028 1029 It is equivalent to the ``:`` operator.
1029 1030 """
1030 1031 opts = pycompat.byteskwargs(opts)
1031 1032 if opts.get(b'stack'):
1032 1033 spec = b':(%s)' % spec
1033 1034 drevs = querydrev(repo, spec)
1034 1035 readpatch(repo, drevs, ui.write)
1035 1036
1036 1037 @vcrcommand(b'phabupdate',
1037 1038 [(b'', b'accept', False, _(b'accept revisions')),
1038 1039 (b'', b'reject', False, _(b'reject revisions')),
1039 1040 (b'', b'abandon', False, _(b'abandon revisions')),
1040 1041 (b'', b'reclaim', False, _(b'reclaim revisions')),
1041 1042 (b'm', b'comment', b'', _(b'comment on the last revision')),
1042 1043 ], _(b'DREVSPEC [OPTIONS]'),
1043 1044 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1044 1045 def phabupdate(ui, repo, spec, **opts):
1045 1046 """update Differential Revision in batch
1046 1047
1047 1048 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1048 1049 """
1049 1050 opts = pycompat.byteskwargs(opts)
1050 1051 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1051 1052 if len(flags) > 1:
1052 1053 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1053 1054
1054 1055 actions = []
1055 1056 for f in flags:
1056 1057 actions.append({b'type': f, b'value': b'true'})
1057 1058
1058 1059 drevs = querydrev(repo, spec)
1059 1060 for i, drev in enumerate(drevs):
1060 1061 if i + 1 == len(drevs) and opts.get(b'comment'):
1061 1062 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1062 1063 if actions:
1063 1064 params = {b'objectIdentifier': drev[b'phid'],
1064 1065 b'transactions': actions}
1065 1066 callconduit(ui, b'differential.revision.edit', params)
1066 1067
1067 templatekeyword = registrar.templatekeyword()
1068
1069 @templatekeyword(b'phabreview', requires={b'ctx'})
1068 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1070 1069 def template_review(context, mapping):
1071 1070 """:phabreview: Object describing the review for this changeset.
1072 1071 Has attributes `url` and `id`.
1073 1072 """
1074 1073 ctx = context.resource(mapping, b'ctx')
1075 1074 m = _differentialrevisiondescre.search(ctx.description())
1076 1075 if m:
1077 1076 return templateutil.hybriddict({
1078 1077 b'url': m.group(r'url'),
1079 1078 b'id': b"D%s" % m.group(r'id'),
1080 1079 })
1081 1080 else:
1082 1081 tags = ctx.repo().nodetags(ctx.node())
1083 1082 for t in tags:
1084 1083 if _differentialrevisiontagre.match(t):
1085 1084 url = ctx.repo().ui.config(b'phabricator', b'url')
1086 1085 if not url.endswith(b'/'):
1087 1086 url += b'/'
1088 1087 url += t
1089 1088
1090 1089 return templateutil.hybriddict({
1091 1090 b'url': url,
1092 1091 b'id': t,
1093 1092 })
1094 1093 return None
General Comments 0
You need to be logged in to leave comments. Login now