##// END OF EJS Templates
py3: fix phabricator's use of json.loads() for py3.5...
Ian Moody -
r43317:0f90c2d2 default
parent child Browse files
Show More
@@ -1,1093 +1,1094 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import contextlib
45 45 import itertools
46 46 import json
47 47 import operator
48 48 import re
49 49
50 50 from mercurial.node import bin, nullid
51 51 from mercurial.i18n import _
52 52 from mercurial import (
53 53 cmdutil,
54 54 context,
55 55 encoding,
56 56 error,
57 57 exthelper,
58 58 httpconnection as httpconnectionmod,
59 59 mdiff,
60 60 obsutil,
61 61 parser,
62 62 patch,
63 63 phases,
64 64 pycompat,
65 65 scmutil,
66 66 smartset,
67 67 tags,
68 68 templatefilters,
69 69 templateutil,
70 70 url as urlmod,
71 71 util,
72 72 )
73 73 from mercurial.utils import (
74 74 procutil,
75 75 stringutil,
76 76 )
77 77
78 78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 80 # be specifying the version(s) of Mercurial they are tested with, or
81 81 # leave the attribute unspecified.
82 82 testedwith = 'ships-with-hg-core'
83 83
84 84 eh = exthelper.exthelper()
85 85
86 86 cmdtable = eh.cmdtable
87 87 command = eh.command
88 88 configtable = eh.configtable
89 89 templatekeyword = eh.templatekeyword
90 90
91 91 # developer config: phabricator.batchsize
92 92 eh.configitem(b'phabricator', b'batchsize',
93 93 default=12,
94 94 )
95 95 eh.configitem(b'phabricator', b'callsign',
96 96 default=None,
97 97 )
98 98 eh.configitem(b'phabricator', b'curlcmd',
99 99 default=None,
100 100 )
101 101 # developer config: phabricator.repophid
102 102 eh.configitem(b'phabricator', b'repophid',
103 103 default=None,
104 104 )
105 105 eh.configitem(b'phabricator', b'url',
106 106 default=None,
107 107 )
108 108 eh.configitem(b'phabsend', b'confirm',
109 109 default=False,
110 110 )
111 111
112 112 colortable = {
113 113 b'phabricator.action.created': b'green',
114 114 b'phabricator.action.skipped': b'magenta',
115 115 b'phabricator.action.updated': b'magenta',
116 116 b'phabricator.desc': b'',
117 117 b'phabricator.drev': b'bold',
118 118 b'phabricator.node': b'',
119 119 }
120 120
121 121 _VCR_FLAGS = [
122 122 (b'', b'test-vcr', b'',
123 123 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
124 124 b', otherwise will mock all http requests using the specified vcr file.'
125 125 b' (ADVANCED)'
126 126 )),
127 127 ]
128 128
129 129 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
130 130 fullflags = flags + _VCR_FLAGS
131 131 def hgmatcher(r1, r2):
132 132 if r1.uri != r2.uri or r1.method != r2.method:
133 133 return False
134 134 r1params = r1.body.split(b'&')
135 135 r2params = r2.body.split(b'&')
136 136 return set(r1params) == set(r2params)
137 137
138 138 def sanitiserequest(request):
139 139 request.body = re.sub(
140 140 br'cli-[a-z0-9]+',
141 141 br'cli-hahayouwish',
142 142 request.body
143 143 )
144 144 return request
145 145
146 146 def sanitiseresponse(response):
147 147 if r'set-cookie' in response[r'headers']:
148 148 del response[r'headers'][r'set-cookie']
149 149 return response
150 150
151 151 def decorate(fn):
152 152 def inner(*args, **kwargs):
153 153 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
154 154 if cassette:
155 155 import hgdemandimport
156 156 with hgdemandimport.deactivated():
157 157 import vcr as vcrmod
158 158 import vcr.stubs as stubs
159 159 vcr = vcrmod.VCR(
160 160 serializer=r'json',
161 161 before_record_request=sanitiserequest,
162 162 before_record_response=sanitiseresponse,
163 163 custom_patches=[
164 164 (urlmod, r'httpconnection',
165 165 stubs.VCRHTTPConnection),
166 166 (urlmod, r'httpsconnection',
167 167 stubs.VCRHTTPSConnection),
168 168 ])
169 169 vcr.register_matcher(r'hgmatcher', hgmatcher)
170 170 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
171 171 return fn(*args, **kwargs)
172 172 return fn(*args, **kwargs)
173 173 inner.__name__ = fn.__name__
174 174 inner.__doc__ = fn.__doc__
175 175 return command(name, fullflags, spec, helpcategory=helpcategory,
176 176 optionalrepo=optionalrepo)(inner)
177 177 return decorate
178 178
179 179 def urlencodenested(params):
180 180 """like urlencode, but works with nested parameters.
181 181
182 182 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
183 183 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
184 184 urlencode. Note: the encoding is consistent with PHP's http_build_query.
185 185 """
186 186 flatparams = util.sortdict()
187 187 def process(prefix, obj):
188 188 if isinstance(obj, bool):
189 189 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
190 190 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
191 191 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
192 192 if items is None:
193 193 flatparams[prefix] = obj
194 194 else:
195 195 for k, v in items(obj):
196 196 if prefix:
197 197 process(b'%s[%s]' % (prefix, k), v)
198 198 else:
199 199 process(k, v)
200 200 process(b'', params)
201 201 return util.urlreq.urlencode(flatparams)
202 202
203 203 def readurltoken(ui):
204 204 """return conduit url, token and make sure they exist
205 205
206 206 Currently read from [auth] config section. In the future, it might
207 207 make sense to read from .arcconfig and .arcrc as well.
208 208 """
209 209 url = ui.config(b'phabricator', b'url')
210 210 if not url:
211 211 raise error.Abort(_(b'config %s.%s is required')
212 212 % (b'phabricator', b'url'))
213 213
214 214 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
215 215 token = None
216 216
217 217 if res:
218 218 group, auth = res
219 219
220 220 ui.debug(b"using auth.%s.* for authentication\n" % group)
221 221
222 222 token = auth.get(b'phabtoken')
223 223
224 224 if not token:
225 225 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
226 226 % (url,))
227 227
228 228 return url, token
229 229
230 230 def callconduit(ui, name, params):
231 231 """call Conduit API, params is a dict. return json.loads result, or None"""
232 232 host, token = readurltoken(ui)
233 233 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
234 234 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
235 235 params = params.copy()
236 236 params[b'api.token'] = token
237 237 data = urlencodenested(params)
238 238 curlcmd = ui.config(b'phabricator', b'curlcmd')
239 239 if curlcmd:
240 240 sin, sout = procutil.popen2(b'%s -d @- %s'
241 241 % (curlcmd, procutil.shellquote(url)))
242 242 sin.write(data)
243 243 sin.close()
244 244 body = sout.read()
245 245 else:
246 246 urlopener = urlmod.opener(ui, authinfo)
247 247 request = util.urlreq.request(pycompat.strurl(url), data=data)
248 248 with contextlib.closing(urlopener.open(request)) as rsp:
249 249 body = rsp.read()
250 250 ui.debug(b'Conduit Response: %s\n' % body)
251 251 parsed = pycompat.rapply(
252 252 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
253 253 else x,
254 json.loads(body)
254 # json.loads only accepts bytes from py3.6+
255 json.loads(encoding.unifromlocal(body))
255 256 )
256 257 if parsed.get(b'error_code'):
257 258 msg = (_(b'Conduit Error (%s): %s')
258 259 % (parsed[b'error_code'], parsed[b'error_info']))
259 260 raise error.Abort(msg)
260 261 return parsed[b'result']
261 262
262 263 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
263 264 def debugcallconduit(ui, repo, name):
264 265 """call Conduit API
265 266
266 267 Call parameters are read from stdin as a JSON blob. Result will be written
267 268 to stdout as a JSON blob.
268 269 """
269 270 # json.loads only accepts bytes from 3.6+
270 271 rawparams = encoding.unifromlocal(ui.fin.read())
271 272 # json.loads only returns unicode strings
272 273 params = pycompat.rapply(lambda x:
273 274 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
274 275 json.loads(rawparams)
275 276 )
276 277 # json.dumps only accepts unicode strings
277 278 result = pycompat.rapply(lambda x:
278 279 encoding.unifromlocal(x) if isinstance(x, bytes) else x,
279 280 callconduit(ui, name, params)
280 281 )
281 282 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
282 283 ui.write(b'%s\n' % encoding.unitolocal(s))
283 284
284 285 def getrepophid(repo):
285 286 """given callsign, return repository PHID or None"""
286 287 # developer config: phabricator.repophid
287 288 repophid = repo.ui.config(b'phabricator', b'repophid')
288 289 if repophid:
289 290 return repophid
290 291 callsign = repo.ui.config(b'phabricator', b'callsign')
291 292 if not callsign:
292 293 return None
293 294 query = callconduit(repo.ui, b'diffusion.repository.search',
294 295 {b'constraints': {b'callsigns': [callsign]}})
295 296 if len(query[b'data']) == 0:
296 297 return None
297 298 repophid = query[b'data'][0][b'phid']
298 299 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
299 300 return repophid
300 301
301 302 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
302 303 _differentialrevisiondescre = re.compile(
303 304 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
304 305
305 306 def getoldnodedrevmap(repo, nodelist):
306 307 """find previous nodes that has been sent to Phabricator
307 308
308 309 return {node: (oldnode, Differential diff, Differential Revision ID)}
309 310 for node in nodelist with known previous sent versions, or associated
310 311 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
311 312 be ``None``.
312 313
313 314 Examines commit messages like "Differential Revision:" to get the
314 315 association information.
315 316
316 317 If such commit message line is not found, examines all precursors and their
317 318 tags. Tags with format like "D1234" are considered a match and the node
318 319 with that tag, and the number after "D" (ex. 1234) will be returned.
319 320
320 321 The ``old node``, if not None, is guaranteed to be the last diff of
321 322 corresponding Differential Revision, and exist in the repo.
322 323 """
323 324 unfi = repo.unfiltered()
324 325 nodemap = unfi.changelog.nodemap
325 326
326 327 result = {} # {node: (oldnode?, lastdiff?, drev)}
327 328 toconfirm = {} # {node: (force, {precnode}, drev)}
328 329 for node in nodelist:
329 330 ctx = unfi[node]
330 331 # For tags like "D123", put them into "toconfirm" to verify later
331 332 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
332 333 for n in precnodes:
333 334 if n in nodemap:
334 335 for tag in unfi.nodetags(n):
335 336 m = _differentialrevisiontagre.match(tag)
336 337 if m:
337 338 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
338 339 continue
339 340
340 341 # Check commit message
341 342 m = _differentialrevisiondescre.search(ctx.description())
342 343 if m:
343 344 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
344 345
345 346 # Double check if tags are genuine by collecting all old nodes from
346 347 # Phabricator, and expect precursors overlap with it.
347 348 if toconfirm:
348 349 drevs = [drev for force, precs, drev in toconfirm.values()]
349 350 alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
350 351 {b'revisionIDs': drevs})
351 352 getnode = lambda d: bin(
352 353 getdiffmeta(d).get(b'node', b'')) or None
353 354 for newnode, (force, precset, drev) in toconfirm.items():
354 355 diffs = [d for d in alldiffs.values()
355 356 if int(d[b'revisionID']) == drev]
356 357
357 358 # "precursors" as known by Phabricator
358 359 phprecset = set(getnode(d) for d in diffs)
359 360
360 361 # Ignore if precursors (Phabricator and local repo) do not overlap,
361 362 # and force is not set (when commit message says nothing)
362 363 if not force and not bool(phprecset & precset):
363 364 tagname = b'D%d' % drev
364 365 tags.tag(repo, tagname, nullid, message=None, user=None,
365 366 date=None, local=True)
366 367 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
367 368 b'Differential history\n') % drev)
368 369 continue
369 370
370 371 # Find the last node using Phabricator metadata, and make sure it
371 372 # exists in the repo
372 373 oldnode = lastdiff = None
373 374 if diffs:
374 375 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
375 376 oldnode = getnode(lastdiff)
376 377 if oldnode and oldnode not in nodemap:
377 378 oldnode = None
378 379
379 380 result[newnode] = (oldnode, lastdiff, drev)
380 381
381 382 return result
382 383
383 384 def getdiff(ctx, diffopts):
384 385 """plain-text diff without header (user, commit message, etc)"""
385 386 output = util.stringio()
386 387 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
387 388 None, opts=diffopts):
388 389 output.write(chunk)
389 390 return output.getvalue()
390 391
391 392 def creatediff(ctx):
392 393 """create a Differential Diff"""
393 394 repo = ctx.repo()
394 395 repophid = getrepophid(repo)
395 396 # Create a "Differential Diff" via "differential.createrawdiff" API
396 397 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
397 398 if repophid:
398 399 params[b'repositoryPHID'] = repophid
399 400 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
400 401 if not diff:
401 402 raise error.Abort(_(b'cannot create diff for %s') % ctx)
402 403 return diff
403 404
404 405 def writediffproperties(ctx, diff):
405 406 """write metadata to diff so patches could be applied losslessly"""
406 407 params = {
407 408 b'diff_id': diff[b'id'],
408 409 b'name': b'hg:meta',
409 410 b'data': templatefilters.json({
410 411 b'user': ctx.user(),
411 412 b'date': b'%d %d' % ctx.date(),
412 413 b'branch': ctx.branch(),
413 414 b'node': ctx.hex(),
414 415 b'parent': ctx.p1().hex(),
415 416 }),
416 417 }
417 418 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
418 419
419 420 params = {
420 421 b'diff_id': diff[b'id'],
421 422 b'name': b'local:commits',
422 423 b'data': templatefilters.json({
423 424 ctx.hex(): {
424 425 b'author': stringutil.person(ctx.user()),
425 426 b'authorEmail': stringutil.email(ctx.user()),
426 427 b'time': int(ctx.date()[0]),
427 428 b'commit': ctx.hex(),
428 429 b'parents': [ctx.p1().hex()],
429 430 b'branch': ctx.branch(),
430 431 },
431 432 }),
432 433 }
433 434 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
434 435
435 436 def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
436 437 oldnode=None, olddiff=None, actions=None,
437 438 comment=None):
438 439 """create or update a Differential Revision
439 440
440 441 If revid is None, create a new Differential Revision, otherwise update
441 442 revid. If parentrevphid is not None, set it as a dependency.
442 443
443 444 If oldnode is not None, check if the patch content (without commit message
444 445 and metadata) has changed before creating another diff.
445 446
446 447 If actions is not None, they will be appended to the transaction.
447 448 """
448 449 repo = ctx.repo()
449 450 if oldnode:
450 451 diffopts = mdiff.diffopts(git=True, context=32767)
451 452 oldctx = repo.unfiltered()[oldnode]
452 453 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
453 454 else:
454 455 neednewdiff = True
455 456
456 457 transactions = []
457 458 if neednewdiff:
458 459 diff = creatediff(ctx)
459 460 transactions.append({b'type': b'update', b'value': diff[b'phid']})
460 461 if comment:
461 462 transactions.append({b'type': b'comment', b'value': comment})
462 463 else:
463 464 # Even if we don't need to upload a new diff because the patch content
464 465 # does not change. We might still need to update its metadata so
465 466 # pushers could know the correct node metadata.
466 467 assert olddiff
467 468 diff = olddiff
468 469 writediffproperties(ctx, diff)
469 470
470 471 # Set the parent Revision every time, so commit re-ordering is picked-up
471 472 if parentrevphid:
472 473 transactions.append({b'type': b'parents.set',
473 474 b'value': [parentrevphid]})
474 475
475 476 if actions:
476 477 transactions += actions
477 478
478 479 # Parse commit message and update related fields.
479 480 desc = ctx.description()
480 481 info = callconduit(repo.ui, b'differential.parsecommitmessage',
481 482 {b'corpus': desc})
482 483 for k, v in info[b'fields'].items():
483 484 if k in [b'title', b'summary', b'testPlan']:
484 485 transactions.append({b'type': k, b'value': v})
485 486
486 487 params = {b'transactions': transactions}
487 488 if revid is not None:
488 489 # Update an existing Differential Revision
489 490 params[b'objectIdentifier'] = revid
490 491
491 492 revision = callconduit(repo.ui, b'differential.revision.edit', params)
492 493 if not revision:
493 494 raise error.Abort(_(b'cannot create revision for %s') % ctx)
494 495
495 496 return revision, diff
496 497
497 498 def userphids(repo, names):
498 499 """convert user names to PHIDs"""
499 500 names = [name.lower() for name in names]
500 501 query = {b'constraints': {b'usernames': names}}
501 502 result = callconduit(repo.ui, b'user.search', query)
502 503 # username not found is not an error of the API. So check if we have missed
503 504 # some names here.
504 505 data = result[b'data']
505 506 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
506 507 unresolved = set(names) - resolved
507 508 if unresolved:
508 509 raise error.Abort(_(b'unknown username: %s')
509 510 % b' '.join(sorted(unresolved)))
510 511 return [entry[b'phid'] for entry in data]
511 512
512 513 @vcrcommand(b'phabsend',
513 514 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
514 515 (b'', b'amend', True, _(b'update commit messages')),
515 516 (b'', b'reviewer', [], _(b'specify reviewers')),
516 517 (b'', b'blocker', [], _(b'specify blocking reviewers')),
517 518 (b'm', b'comment', b'',
518 519 _(b'add a comment to Revisions with new/updated Diffs')),
519 520 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
520 521 _(b'REV [OPTIONS]'),
521 522 helpcategory=command.CATEGORY_IMPORT_EXPORT)
522 523 def phabsend(ui, repo, *revs, **opts):
523 524 """upload changesets to Phabricator
524 525
525 526 If there are multiple revisions specified, they will be send as a stack
526 527 with a linear dependencies relationship using the order specified by the
527 528 revset.
528 529
529 530 For the first time uploading changesets, local tags will be created to
530 531 maintain the association. After the first time, phabsend will check
531 532 obsstore and tags information so it can figure out whether to update an
532 533 existing Differential Revision, or create a new one.
533 534
534 535 If --amend is set, update commit messages so they have the
535 536 ``Differential Revision`` URL, remove related tags. This is similar to what
536 537 arcanist will do, and is more desired in author-push workflows. Otherwise,
537 538 use local tags to record the ``Differential Revision`` association.
538 539
539 540 The --confirm option lets you confirm changesets before sending them. You
540 541 can also add following to your configuration file to make it default
541 542 behaviour::
542 543
543 544 [phabsend]
544 545 confirm = true
545 546
546 547 phabsend will check obsstore and the above association to decide whether to
547 548 update an existing Differential Revision, or create a new one.
548 549 """
549 550 opts = pycompat.byteskwargs(opts)
550 551 revs = list(revs) + opts.get(b'rev', [])
551 552 revs = scmutil.revrange(repo, revs)
552 553
553 554 if not revs:
554 555 raise error.Abort(_(b'phabsend requires at least one changeset'))
555 556 if opts.get(b'amend'):
556 557 cmdutil.checkunfinished(repo)
557 558
558 559 # {newnode: (oldnode, olddiff, olddrev}
559 560 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
560 561
561 562 confirm = ui.configbool(b'phabsend', b'confirm')
562 563 confirm |= bool(opts.get(b'confirm'))
563 564 if confirm:
564 565 confirmed = _confirmbeforesend(repo, revs, oldmap)
565 566 if not confirmed:
566 567 raise error.Abort(_(b'phabsend cancelled'))
567 568
568 569 actions = []
569 570 reviewers = opts.get(b'reviewer', [])
570 571 blockers = opts.get(b'blocker', [])
571 572 phids = []
572 573 if reviewers:
573 574 phids.extend(userphids(repo, reviewers))
574 575 if blockers:
575 576 phids.extend(map(
576 577 lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
577 578 ))
578 579 if phids:
579 580 actions.append({b'type': b'reviewers.add', b'value': phids})
580 581
581 582 drevids = [] # [int]
582 583 diffmap = {} # {newnode: diff}
583 584
584 585 # Send patches one by one so we know their Differential Revision PHIDs and
585 586 # can provide dependency relationship
586 587 lastrevphid = None
587 588 for rev in revs:
588 589 ui.debug(b'sending rev %d\n' % rev)
589 590 ctx = repo[rev]
590 591
591 592 # Get Differential Revision ID
592 593 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
593 594 if oldnode != ctx.node() or opts.get(b'amend'):
594 595 # Create or update Differential Revision
595 596 revision, diff = createdifferentialrevision(
596 597 ctx, revid, lastrevphid, oldnode, olddiff, actions,
597 598 opts.get(b'comment'))
598 599 diffmap[ctx.node()] = diff
599 600 newrevid = int(revision[b'object'][b'id'])
600 601 newrevphid = revision[b'object'][b'phid']
601 602 if revid:
602 603 action = b'updated'
603 604 else:
604 605 action = b'created'
605 606
606 607 # Create a local tag to note the association, if commit message
607 608 # does not have it already
608 609 m = _differentialrevisiondescre.search(ctx.description())
609 610 if not m or int(m.group(r'id')) != newrevid:
610 611 tagname = b'D%d' % newrevid
611 612 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
612 613 date=None, local=True)
613 614 else:
614 615 # Nothing changed. But still set "newrevphid" so the next revision
615 616 # could depend on this one and "newrevid" for the summary line.
616 617 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
617 618 newrevid = revid
618 619 action = b'skipped'
619 620
620 621 actiondesc = ui.label(
621 622 {b'created': _(b'created'),
622 623 b'skipped': _(b'skipped'),
623 624 b'updated': _(b'updated')}[action],
624 625 b'phabricator.action.%s' % action)
625 626 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
626 627 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
627 628 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
628 629 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
629 630 desc))
630 631 drevids.append(newrevid)
631 632 lastrevphid = newrevphid
632 633
633 634 # Update commit messages and remove tags
634 635 if opts.get(b'amend'):
635 636 unfi = repo.unfiltered()
636 637 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
637 638 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
638 639 wnode = unfi[b'.'].node()
639 640 mapping = {} # {oldnode: [newnode]}
640 641 for i, rev in enumerate(revs):
641 642 old = unfi[rev]
642 643 drevid = drevids[i]
643 644 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
644 645 newdesc = getdescfromdrev(drev)
645 646 # Make sure commit message contain "Differential Revision"
646 647 if old.description() != newdesc:
647 648 if old.phase() == phases.public:
648 649 ui.warn(_("warning: not updating public commit %s\n")
649 650 % scmutil.formatchangeid(old))
650 651 continue
651 652 parents = [
652 653 mapping.get(old.p1().node(), (old.p1(),))[0],
653 654 mapping.get(old.p2().node(), (old.p2(),))[0],
654 655 ]
655 656 new = context.metadataonlyctx(
656 657 repo, old, parents=parents, text=newdesc,
657 658 user=old.user(), date=old.date(), extra=old.extra())
658 659
659 660 newnode = new.commit()
660 661
661 662 mapping[old.node()] = [newnode]
662 663 # Update diff property
663 664 # If it fails just warn and keep going, otherwise the DREV
664 665 # associations will be lost
665 666 try:
666 667 writediffproperties(unfi[newnode], diffmap[old.node()])
667 668 except util.urlerr.urlerror:
668 669 ui.warn(b'Failed to update metadata for D%s\n' % drevid)
669 670 # Remove local tags since it's no longer necessary
670 671 tagname = b'D%d' % drevid
671 672 if tagname in repo.tags():
672 673 tags.tag(repo, tagname, nullid, message=None, user=None,
673 674 date=None, local=True)
674 675 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
675 676 if wnode in mapping:
676 677 unfi.setparents(mapping[wnode][0])
677 678
678 679 # Map from "hg:meta" keys to header understood by "hg import". The order is
679 680 # consistent with "hg export" output.
680 681 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
681 682 (b'branch', b'Branch'), (b'node', b'Node ID'),
682 683 (b'parent', b'Parent ')])
683 684
684 685 def _confirmbeforesend(repo, revs, oldmap):
685 686 url, token = readurltoken(repo.ui)
686 687 ui = repo.ui
687 688 for rev in revs:
688 689 ctx = repo[rev]
689 690 desc = ctx.description().splitlines()[0]
690 691 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
691 692 if drevid:
692 693 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
693 694 else:
694 695 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
695 696
696 697 ui.write(_(b'%s - %s: %s\n')
697 698 % (drevdesc,
698 699 ui.label(bytes(ctx), b'phabricator.node'),
699 700 ui.label(desc, b'phabricator.desc')))
700 701
701 702 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
702 703 b'$$ &Yes $$ &No') % url):
703 704 return False
704 705
705 706 return True
706 707
707 708 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
708 709 b'abandoned'}
709 710
710 711 def _getstatusname(drev):
711 712 """get normalized status name from a Differential Revision"""
712 713 return drev[b'statusName'].replace(b' ', b'').lower()
713 714
714 715 # Small language to specify differential revisions. Support symbols: (), :X,
715 716 # +, and -.
716 717
717 718 _elements = {
718 719 # token-type: binding-strength, primary, prefix, infix, suffix
719 720 b'(': (12, None, (b'group', 1, b')'), None, None),
720 721 b':': (8, None, (b'ancestors', 8), None, None),
721 722 b'&': (5, None, None, (b'and_', 5), None),
722 723 b'+': (4, None, None, (b'add', 4), None),
723 724 b'-': (4, None, None, (b'sub', 4), None),
724 725 b')': (0, None, None, None, None),
725 726 b'symbol': (0, b'symbol', None, None, None),
726 727 b'end': (0, None, None, None, None),
727 728 }
728 729
729 730 def _tokenize(text):
730 731 view = memoryview(text) # zero-copy slice
731 732 special = b'():+-& '
732 733 pos = 0
733 734 length = len(text)
734 735 while pos < length:
735 736 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
736 737 pycompat.iterbytestr(view[pos:])))
737 738 if symbol:
738 739 yield (b'symbol', symbol, pos)
739 740 pos += len(symbol)
740 741 else: # special char, ignore space
741 742 if text[pos] != b' ':
742 743 yield (text[pos], None, pos)
743 744 pos += 1
744 745 yield (b'end', None, pos)
745 746
746 747 def _parse(text):
747 748 tree, pos = parser.parser(_elements).parse(_tokenize(text))
748 749 if pos != len(text):
749 750 raise error.ParseError(b'invalid token', pos)
750 751 return tree
751 752
752 753 def _parsedrev(symbol):
753 754 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
754 755 if symbol.startswith(b'D') and symbol[1:].isdigit():
755 756 return int(symbol[1:])
756 757 if symbol.isdigit():
757 758 return int(symbol)
758 759
759 760 def _prefetchdrevs(tree):
760 761 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
761 762 drevs = set()
762 763 ancestordrevs = set()
763 764 op = tree[0]
764 765 if op == b'symbol':
765 766 r = _parsedrev(tree[1])
766 767 if r:
767 768 drevs.add(r)
768 769 elif op == b'ancestors':
769 770 r, a = _prefetchdrevs(tree[1])
770 771 drevs.update(r)
771 772 ancestordrevs.update(r)
772 773 ancestordrevs.update(a)
773 774 else:
774 775 for t in tree[1:]:
775 776 r, a = _prefetchdrevs(t)
776 777 drevs.update(r)
777 778 ancestordrevs.update(a)
778 779 return drevs, ancestordrevs
779 780
780 781 def querydrev(repo, spec):
781 782 """return a list of "Differential Revision" dicts
782 783
783 784 spec is a string using a simple query language, see docstring in phabread
784 785 for details.
785 786
786 787 A "Differential Revision dict" looks like:
787 788
788 789 {
789 790 "id": "2",
790 791 "phid": "PHID-DREV-672qvysjcczopag46qty",
791 792 "title": "example",
792 793 "uri": "https://phab.example.com/D2",
793 794 "dateCreated": "1499181406",
794 795 "dateModified": "1499182103",
795 796 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
796 797 "status": "0",
797 798 "statusName": "Needs Review",
798 799 "properties": [],
799 800 "branch": null,
800 801 "summary": "",
801 802 "testPlan": "",
802 803 "lineCount": "2",
803 804 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
804 805 "diffs": [
805 806 "3",
806 807 "4",
807 808 ],
808 809 "commits": [],
809 810 "reviewers": [],
810 811 "ccs": [],
811 812 "hashes": [],
812 813 "auxiliary": {
813 814 "phabricator:projects": [],
814 815 "phabricator:depends-on": [
815 816 "PHID-DREV-gbapp366kutjebt7agcd"
816 817 ]
817 818 },
818 819 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
819 820 "sourcePath": null
820 821 }
821 822 """
822 823 def fetch(params):
823 824 """params -> single drev or None"""
824 825 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
825 826 if key in prefetched:
826 827 return prefetched[key]
827 828 drevs = callconduit(repo.ui, b'differential.query', params)
828 829 # Fill prefetched with the result
829 830 for drev in drevs:
830 831 prefetched[drev[b'phid']] = drev
831 832 prefetched[int(drev[b'id'])] = drev
832 833 if key not in prefetched:
833 834 raise error.Abort(_(b'cannot get Differential Revision %r')
834 835 % params)
835 836 return prefetched[key]
836 837
837 838 def getstack(topdrevids):
838 839 """given a top, get a stack from the bottom, [id] -> [id]"""
839 840 visited = set()
840 841 result = []
841 842 queue = [{b'ids': [i]} for i in topdrevids]
842 843 while queue:
843 844 params = queue.pop()
844 845 drev = fetch(params)
845 846 if drev[b'id'] in visited:
846 847 continue
847 848 visited.add(drev[b'id'])
848 849 result.append(int(drev[b'id']))
849 850 auxiliary = drev.get(b'auxiliary', {})
850 851 depends = auxiliary.get(b'phabricator:depends-on', [])
851 852 for phid in depends:
852 853 queue.append({b'phids': [phid]})
853 854 result.reverse()
854 855 return smartset.baseset(result)
855 856
856 857 # Initialize prefetch cache
857 858 prefetched = {} # {id or phid: drev}
858 859
859 860 tree = _parse(spec)
860 861 drevs, ancestordrevs = _prefetchdrevs(tree)
861 862
862 863 # developer config: phabricator.batchsize
863 864 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
864 865
865 866 # Prefetch Differential Revisions in batch
866 867 tofetch = set(drevs)
867 868 for r in ancestordrevs:
868 869 tofetch.update(range(max(1, r - batchsize), r + 1))
869 870 if drevs:
870 871 fetch({b'ids': list(tofetch)})
871 872 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
872 873
873 874 # Walk through the tree, return smartsets
874 875 def walk(tree):
875 876 op = tree[0]
876 877 if op == b'symbol':
877 878 drev = _parsedrev(tree[1])
878 879 if drev:
879 880 return smartset.baseset([drev])
880 881 elif tree[1] in _knownstatusnames:
881 882 drevs = [r for r in validids
882 883 if _getstatusname(prefetched[r]) == tree[1]]
883 884 return smartset.baseset(drevs)
884 885 else:
885 886 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
886 887 elif op in {b'and_', b'add', b'sub'}:
887 888 assert len(tree) == 3
888 889 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
889 890 elif op == b'group':
890 891 return walk(tree[1])
891 892 elif op == b'ancestors':
892 893 return getstack(walk(tree[1]))
893 894 else:
894 895 raise error.ProgrammingError(b'illegal tree: %r' % tree)
895 896
896 897 return [prefetched[r] for r in walk(tree)]
897 898
898 899 def getdescfromdrev(drev):
899 900 """get description (commit message) from "Differential Revision"
900 901
901 902 This is similar to differential.getcommitmessage API. But we only care
902 903 about limited fields: title, summary, test plan, and URL.
903 904 """
904 905 title = drev[b'title']
905 906 summary = drev[b'summary'].rstrip()
906 907 testplan = drev[b'testPlan'].rstrip()
907 908 if testplan:
908 909 testplan = b'Test Plan:\n%s' % testplan
909 910 uri = b'Differential Revision: %s' % drev[b'uri']
910 911 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
911 912
912 913 def getdiffmeta(diff):
913 914 """get commit metadata (date, node, user, p1) from a diff object
914 915
915 916 The metadata could be "hg:meta", sent by phabsend, like:
916 917
917 918 "properties": {
918 919 "hg:meta": {
919 920 "date": "1499571514 25200",
920 921 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
921 922 "user": "Foo Bar <foo@example.com>",
922 923 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
923 924 }
924 925 }
925 926
926 927 Or converted from "local:commits", sent by "arc", like:
927 928
928 929 "properties": {
929 930 "local:commits": {
930 931 "98c08acae292b2faf60a279b4189beb6cff1414d": {
931 932 "author": "Foo Bar",
932 933 "time": 1499546314,
933 934 "branch": "default",
934 935 "tag": "",
935 936 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
936 937 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
937 938 "local": "1000",
938 939 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
939 940 "summary": "...",
940 941 "message": "...",
941 942 "authorEmail": "foo@example.com"
942 943 }
943 944 }
944 945 }
945 946
946 947 Note: metadata extracted from "local:commits" will lose time zone
947 948 information.
948 949 """
949 950 props = diff.get(b'properties') or {}
950 951 meta = props.get(b'hg:meta')
951 952 if not meta:
952 953 if props.get(b'local:commits'):
953 954 commit = sorted(props[b'local:commits'].values())[0]
954 955 meta = {}
955 956 if b'author' in commit and b'authorEmail' in commit:
956 957 meta[b'user'] = b'%s <%s>' % (commit[b'author'],
957 958 commit[b'authorEmail'])
958 959 if b'time' in commit:
959 960 meta[b'date'] = b'%d 0' % int(commit[b'time'])
960 961 if b'branch' in commit:
961 962 meta[b'branch'] = commit[b'branch']
962 963 node = commit.get(b'commit', commit.get(b'rev'))
963 964 if node:
964 965 meta[b'node'] = node
965 966 if len(commit.get(b'parents', ())) >= 1:
966 967 meta[b'parent'] = commit[b'parents'][0]
967 968 else:
968 969 meta = {}
969 970 if b'date' not in meta and b'dateCreated' in diff:
970 971 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
971 972 if b'branch' not in meta and diff.get(b'branch'):
972 973 meta[b'branch'] = diff[b'branch']
973 974 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
974 975 meta[b'parent'] = diff[b'sourceControlBaseRevision']
975 976 return meta
976 977
977 978 def readpatch(repo, drevs, write):
978 979 """generate plain-text patch readable by 'hg import'
979 980
980 981 write is usually ui.write. drevs is what "querydrev" returns, results of
981 982 "differential.query".
982 983 """
983 984 # Prefetch hg:meta property for all diffs
984 985 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
985 986 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
986 987
987 988 # Generate patch for each drev
988 989 for drev in drevs:
989 990 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
990 991
991 992 diffid = max(int(v) for v in drev[b'diffs'])
992 993 body = callconduit(repo.ui, b'differential.getrawdiff',
993 994 {b'diffID': diffid})
994 995 desc = getdescfromdrev(drev)
995 996 header = b'# HG changeset patch\n'
996 997
997 998 # Try to preserve metadata from hg:meta property. Write hg patch
998 999 # headers that can be read by the "import" command. See patchheadermap
999 1000 # and extract in mercurial/patch.py for supported headers.
1000 1001 meta = getdiffmeta(diffs[b'%d' % diffid])
1001 1002 for k in _metanamemap.keys():
1002 1003 if k in meta:
1003 1004 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1004 1005
1005 1006 content = b'%s%s\n%s' % (header, desc, body)
1006 1007 write(content)
1007 1008
1008 1009 @vcrcommand(b'phabread',
1009 1010 [(b'', b'stack', False, _(b'read dependencies'))],
1010 1011 _(b'DREVSPEC [OPTIONS]'),
1011 1012 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1012 1013 def phabread(ui, repo, spec, **opts):
1013 1014 """print patches from Phabricator suitable for importing
1014 1015
1015 1016 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1016 1017 the number ``123``. It could also have common operators like ``+``, ``-``,
1017 1018 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1018 1019 select a stack.
1019 1020
1020 1021 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1021 1022 could be used to filter patches by status. For performance reason, they
1022 1023 only represent a subset of non-status selections and cannot be used alone.
1023 1024
1024 1025 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1025 1026 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1026 1027 stack up to D9.
1027 1028
1028 1029 If --stack is given, follow dependencies information and read all patches.
1029 1030 It is equivalent to the ``:`` operator.
1030 1031 """
1031 1032 opts = pycompat.byteskwargs(opts)
1032 1033 if opts.get(b'stack'):
1033 1034 spec = b':(%s)' % spec
1034 1035 drevs = querydrev(repo, spec)
1035 1036 readpatch(repo, drevs, ui.write)
1036 1037
1037 1038 @vcrcommand(b'phabupdate',
1038 1039 [(b'', b'accept', False, _(b'accept revisions')),
1039 1040 (b'', b'reject', False, _(b'reject revisions')),
1040 1041 (b'', b'abandon', False, _(b'abandon revisions')),
1041 1042 (b'', b'reclaim', False, _(b'reclaim revisions')),
1042 1043 (b'm', b'comment', b'', _(b'comment on the last revision')),
1043 1044 ], _(b'DREVSPEC [OPTIONS]'),
1044 1045 helpcategory=command.CATEGORY_IMPORT_EXPORT)
1045 1046 def phabupdate(ui, repo, spec, **opts):
1046 1047 """update Differential Revision in batch
1047 1048
1048 1049 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1049 1050 """
1050 1051 opts = pycompat.byteskwargs(opts)
1051 1052 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1052 1053 if len(flags) > 1:
1053 1054 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1054 1055
1055 1056 actions = []
1056 1057 for f in flags:
1057 1058 actions.append({b'type': f, b'value': b'true'})
1058 1059
1059 1060 drevs = querydrev(repo, spec)
1060 1061 for i, drev in enumerate(drevs):
1061 1062 if i + 1 == len(drevs) and opts.get(b'comment'):
1062 1063 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1063 1064 if actions:
1064 1065 params = {b'objectIdentifier': drev[b'phid'],
1065 1066 b'transactions': actions}
1066 1067 callconduit(ui, b'differential.revision.edit', params)
1067 1068
1068 1069 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1069 1070 def template_review(context, mapping):
1070 1071 """:phabreview: Object describing the review for this changeset.
1071 1072 Has attributes `url` and `id`.
1072 1073 """
1073 1074 ctx = context.resource(mapping, b'ctx')
1074 1075 m = _differentialrevisiondescre.search(ctx.description())
1075 1076 if m:
1076 1077 return templateutil.hybriddict({
1077 1078 b'url': m.group(r'url'),
1078 1079 b'id': b"D%s" % m.group(r'id'),
1079 1080 })
1080 1081 else:
1081 1082 tags = ctx.repo().nodetags(ctx.node())
1082 1083 for t in tags:
1083 1084 if _differentialrevisiontagre.match(t):
1084 1085 url = ctx.repo().ui.config(b'phabricator', b'url')
1085 1086 if not url.endswith(b'/'):
1086 1087 url += b'/'
1087 1088 url += t
1088 1089
1089 1090 return templateutil.hybriddict({
1090 1091 b'url': url,
1091 1092 b'id': t,
1092 1093 })
1093 1094 return None
General Comments 0
You need to be logged in to leave comments. Login now