##// END OF EJS Templates
phabricator: change conduit data format to match arcanist...
Ian Moody -
r43555:f5aa4a53 default
parent child Browse files
Show More
@@ -1,1600 +1,1607 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 46 import hashlib
47 47 import itertools
48 48 import json
49 49 import mimetypes
50 50 import operator
51 51 import re
52 52
53 53 from mercurial.node import bin, nullid
54 54 from mercurial.i18n import _
55 55 from mercurial.pycompat import getattr
56 56 from mercurial.thirdparty import attr
57 57 from mercurial import (
58 58 cmdutil,
59 59 context,
60 60 encoding,
61 61 error,
62 62 exthelper,
63 63 httpconnection as httpconnectionmod,
64 64 match,
65 65 mdiff,
66 66 obsutil,
67 67 parser,
68 68 patch,
69 69 phases,
70 70 pycompat,
71 71 scmutil,
72 72 smartset,
73 73 tags,
74 74 templatefilters,
75 75 templateutil,
76 76 url as urlmod,
77 77 util,
78 78 )
79 79 from mercurial.utils import (
80 80 procutil,
81 81 stringutil,
82 82 )
83 83
84 84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 86 # be specifying the version(s) of Mercurial they are tested with, or
87 87 # leave the attribute unspecified.
88 88 testedwith = b'ships-with-hg-core'
89 89
90 90 eh = exthelper.exthelper()
91 91
92 92 cmdtable = eh.cmdtable
93 93 command = eh.command
94 94 configtable = eh.configtable
95 95 templatekeyword = eh.templatekeyword
96 96
97 97 # developer config: phabricator.batchsize
98 98 eh.configitem(
99 99 b'phabricator', b'batchsize', default=12,
100 100 )
101 101 eh.configitem(
102 102 b'phabricator', b'callsign', default=None,
103 103 )
104 104 eh.configitem(
105 105 b'phabricator', b'curlcmd', default=None,
106 106 )
107 107 # developer config: phabricator.repophid
108 108 eh.configitem(
109 109 b'phabricator', b'repophid', default=None,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'url', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabsend', b'confirm', default=False,
116 116 )
117 117
118 118 colortable = {
119 119 b'phabricator.action.created': b'green',
120 120 b'phabricator.action.skipped': b'magenta',
121 121 b'phabricator.action.updated': b'magenta',
122 122 b'phabricator.desc': b'',
123 123 b'phabricator.drev': b'bold',
124 124 b'phabricator.node': b'',
125 125 }
126 126
127 127 _VCR_FLAGS = [
128 128 (
129 129 b'',
130 130 b'test-vcr',
131 131 b'',
132 132 _(
133 133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 134 b', otherwise will mock all http requests using the specified vcr file.'
135 135 b' (ADVANCED)'
136 136 ),
137 137 ),
138 138 ]
139 139
140 140
141 141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 142 fullflags = flags + _VCR_FLAGS
143 143
144 144 def hgmatcher(r1, r2):
145 145 if r1.uri != r2.uri or r1.method != r2.method:
146 146 return False
147 147 r1params = r1.body.split(b'&')
148 148 r2params = r2.body.split(b'&')
149 149 return set(r1params) == set(r2params)
150 150
151 151 def sanitiserequest(request):
152 152 request.body = re.sub(
153 153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 154 )
155 155 return request
156 156
157 157 def sanitiseresponse(response):
158 158 if r'set-cookie' in response[r'headers']:
159 159 del response[r'headers'][r'set-cookie']
160 160 return response
161 161
162 162 def decorate(fn):
163 163 def inner(*args, **kwargs):
164 164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 165 if cassette:
166 166 import hgdemandimport
167 167
168 168 with hgdemandimport.deactivated():
169 169 import vcr as vcrmod
170 170 import vcr.stubs as stubs
171 171
172 172 vcr = vcrmod.VCR(
173 173 serializer=r'json',
174 174 before_record_request=sanitiserequest,
175 175 before_record_response=sanitiseresponse,
176 176 custom_patches=[
177 177 (
178 178 urlmod,
179 179 r'httpconnection',
180 180 stubs.VCRHTTPConnection,
181 181 ),
182 182 (
183 183 urlmod,
184 184 r'httpsconnection',
185 185 stubs.VCRHTTPSConnection,
186 186 ),
187 187 ],
188 188 )
189 189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 191 return fn(*args, **kwargs)
192 192 return fn(*args, **kwargs)
193 193
194 194 inner.__name__ = fn.__name__
195 195 inner.__doc__ = fn.__doc__
196 196 return command(
197 197 name,
198 198 fullflags,
199 199 spec,
200 200 helpcategory=helpcategory,
201 201 optionalrepo=optionalrepo,
202 202 )(inner)
203 203
204 204 return decorate
205 205
206 206
207 207 def urlencodenested(params):
208 208 """like urlencode, but works with nested parameters.
209 209
210 210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 213 """
214 214 flatparams = util.sortdict()
215 215
216 216 def process(prefix, obj):
217 217 if isinstance(obj, bool):
218 218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 221 if items is None:
222 222 flatparams[prefix] = obj
223 223 else:
224 224 for k, v in items(obj):
225 225 if prefix:
226 226 process(b'%s[%s]' % (prefix, k), v)
227 227 else:
228 228 process(k, v)
229 229
230 230 process(b'', params)
231 231 return util.urlreq.urlencode(flatparams)
232 232
233 233
234 234 def readurltoken(ui):
235 235 """return conduit url, token and make sure they exist
236 236
237 237 Currently read from [auth] config section. In the future, it might
238 238 make sense to read from .arcconfig and .arcrc as well.
239 239 """
240 240 url = ui.config(b'phabricator', b'url')
241 241 if not url:
242 242 raise error.Abort(
243 243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 244 )
245 245
246 246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 247 token = None
248 248
249 249 if res:
250 250 group, auth = res
251 251
252 252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253 253
254 254 token = auth.get(b'phabtoken')
255 255
256 256 if not token:
257 257 raise error.Abort(
258 258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 259 )
260 260
261 261 return url, token
262 262
263 263
264 264 def callconduit(ui, name, params):
265 265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 266 host, token = readurltoken(ui)
267 267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 269 params = params.copy()
270 params[b'api.token'] = token
271 data = urlencodenested(params)
270 params[b'__conduit__'] = {
271 b'token': token,
272 }
273 rawdata = {
274 b'params': templatefilters.json(params),
275 b'output': b'json',
276 b'__conduit__': 1,
277 }
278 data = urlencodenested(rawdata)
272 279 curlcmd = ui.config(b'phabricator', b'curlcmd')
273 280 if curlcmd:
274 281 sin, sout = procutil.popen2(
275 282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
276 283 )
277 284 sin.write(data)
278 285 sin.close()
279 286 body = sout.read()
280 287 else:
281 288 urlopener = urlmod.opener(ui, authinfo)
282 289 request = util.urlreq.request(pycompat.strurl(url), data=data)
283 290 with contextlib.closing(urlopener.open(request)) as rsp:
284 291 body = rsp.read()
285 292 ui.debug(b'Conduit Response: %s\n' % body)
286 293 parsed = pycompat.rapply(
287 294 lambda x: encoding.unitolocal(x)
288 295 if isinstance(x, pycompat.unicode)
289 296 else x,
290 297 # json.loads only accepts bytes from py3.6+
291 298 json.loads(encoding.unifromlocal(body)),
292 299 )
293 300 if parsed.get(b'error_code'):
294 301 msg = _(b'Conduit Error (%s): %s') % (
295 302 parsed[b'error_code'],
296 303 parsed[b'error_info'],
297 304 )
298 305 raise error.Abort(msg)
299 306 return parsed[b'result']
300 307
301 308
302 309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
303 310 def debugcallconduit(ui, repo, name):
304 311 """call Conduit API
305 312
306 313 Call parameters are read from stdin as a JSON blob. Result will be written
307 314 to stdout as a JSON blob.
308 315 """
309 316 # json.loads only accepts bytes from 3.6+
310 317 rawparams = encoding.unifromlocal(ui.fin.read())
311 318 # json.loads only returns unicode strings
312 319 params = pycompat.rapply(
313 320 lambda x: encoding.unitolocal(x)
314 321 if isinstance(x, pycompat.unicode)
315 322 else x,
316 323 json.loads(rawparams),
317 324 )
318 325 # json.dumps only accepts unicode strings
319 326 result = pycompat.rapply(
320 327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
321 328 callconduit(ui, name, params),
322 329 )
323 330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
324 331 ui.write(b'%s\n' % encoding.unitolocal(s))
325 332
326 333
327 334 def getrepophid(repo):
328 335 """given callsign, return repository PHID or None"""
329 336 # developer config: phabricator.repophid
330 337 repophid = repo.ui.config(b'phabricator', b'repophid')
331 338 if repophid:
332 339 return repophid
333 340 callsign = repo.ui.config(b'phabricator', b'callsign')
334 341 if not callsign:
335 342 return None
336 343 query = callconduit(
337 344 repo.ui,
338 345 b'diffusion.repository.search',
339 346 {b'constraints': {b'callsigns': [callsign]}},
340 347 )
341 348 if len(query[b'data']) == 0:
342 349 return None
343 350 repophid = query[b'data'][0][b'phid']
344 351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
345 352 return repophid
346 353
347 354
348 355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
349 356 _differentialrevisiondescre = re.compile(
350 357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
351 358 )
352 359
353 360
354 361 def getoldnodedrevmap(repo, nodelist):
355 362 """find previous nodes that has been sent to Phabricator
356 363
357 364 return {node: (oldnode, Differential diff, Differential Revision ID)}
358 365 for node in nodelist with known previous sent versions, or associated
359 366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
360 367 be ``None``.
361 368
362 369 Examines commit messages like "Differential Revision:" to get the
363 370 association information.
364 371
365 372 If such commit message line is not found, examines all precursors and their
366 373 tags. Tags with format like "D1234" are considered a match and the node
367 374 with that tag, and the number after "D" (ex. 1234) will be returned.
368 375
369 376 The ``old node``, if not None, is guaranteed to be the last diff of
370 377 corresponding Differential Revision, and exist in the repo.
371 378 """
372 379 unfi = repo.unfiltered()
373 380 nodemap = unfi.changelog.nodemap
374 381
375 382 result = {} # {node: (oldnode?, lastdiff?, drev)}
376 383 toconfirm = {} # {node: (force, {precnode}, drev)}
377 384 for node in nodelist:
378 385 ctx = unfi[node]
379 386 # For tags like "D123", put them into "toconfirm" to verify later
380 387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
381 388 for n in precnodes:
382 389 if n in nodemap:
383 390 for tag in unfi.nodetags(n):
384 391 m = _differentialrevisiontagre.match(tag)
385 392 if m:
386 393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
387 394 continue
388 395
389 396 # Check commit message
390 397 m = _differentialrevisiondescre.search(ctx.description())
391 398 if m:
392 399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
393 400
394 401 # Double check if tags are genuine by collecting all old nodes from
395 402 # Phabricator, and expect precursors overlap with it.
396 403 if toconfirm:
397 404 drevs = [drev for force, precs, drev in toconfirm.values()]
398 405 alldiffs = callconduit(
399 406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
400 407 )
401 408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
402 409 for newnode, (force, precset, drev) in toconfirm.items():
403 410 diffs = [
404 411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
405 412 ]
406 413
407 414 # "precursors" as known by Phabricator
408 415 phprecset = set(getnode(d) for d in diffs)
409 416
410 417 # Ignore if precursors (Phabricator and local repo) do not overlap,
411 418 # and force is not set (when commit message says nothing)
412 419 if not force and not bool(phprecset & precset):
413 420 tagname = b'D%d' % drev
414 421 tags.tag(
415 422 repo,
416 423 tagname,
417 424 nullid,
418 425 message=None,
419 426 user=None,
420 427 date=None,
421 428 local=True,
422 429 )
423 430 unfi.ui.warn(
424 431 _(
425 432 b'D%s: local tag removed - does not match '
426 433 b'Differential history\n'
427 434 )
428 435 % drev
429 436 )
430 437 continue
431 438
432 439 # Find the last node using Phabricator metadata, and make sure it
433 440 # exists in the repo
434 441 oldnode = lastdiff = None
435 442 if diffs:
436 443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
437 444 oldnode = getnode(lastdiff)
438 445 if oldnode and oldnode not in nodemap:
439 446 oldnode = None
440 447
441 448 result[newnode] = (oldnode, lastdiff, drev)
442 449
443 450 return result
444 451
445 452
446 453 def getdiff(ctx, diffopts):
447 454 """plain-text diff without header (user, commit message, etc)"""
448 455 output = util.stringio()
449 456 for chunk, _label in patch.diffui(
450 457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
451 458 ):
452 459 output.write(chunk)
453 460 return output.getvalue()
454 461
455 462
456 463 class DiffChangeType(object):
457 464 ADD = 1
458 465 CHANGE = 2
459 466 DELETE = 3
460 467 MOVE_AWAY = 4
461 468 COPY_AWAY = 5
462 469 MOVE_HERE = 6
463 470 COPY_HERE = 7
464 471 MULTICOPY = 8
465 472
466 473
467 474 class DiffFileType(object):
468 475 TEXT = 1
469 476 IMAGE = 2
470 477 BINARY = 3
471 478
472 479
473 480 @attr.s
474 481 class phabhunk(dict):
475 482 """Represents a Differential hunk, which is owned by a Differential change
476 483 """
477 484
478 485 oldOffset = attr.ib(default=0) # camelcase-required
479 486 oldLength = attr.ib(default=0) # camelcase-required
480 487 newOffset = attr.ib(default=0) # camelcase-required
481 488 newLength = attr.ib(default=0) # camelcase-required
482 489 corpus = attr.ib(default='')
483 490 # These get added to the phabchange's equivalents
484 491 addLines = attr.ib(default=0) # camelcase-required
485 492 delLines = attr.ib(default=0) # camelcase-required
486 493
487 494
488 495 @attr.s
489 496 class phabchange(object):
490 497 """Represents a Differential change, owns Differential hunks and owned by a
491 498 Differential diff. Each one represents one file in a diff.
492 499 """
493 500
494 501 currentPath = attr.ib(default=None) # camelcase-required
495 502 oldPath = attr.ib(default=None) # camelcase-required
496 503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
497 504 metadata = attr.ib(default=attr.Factory(dict))
498 505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
499 506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
500 507 type = attr.ib(default=DiffChangeType.CHANGE)
501 508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
502 509 commitHash = attr.ib(default=None) # camelcase-required
503 510 addLines = attr.ib(default=0) # camelcase-required
504 511 delLines = attr.ib(default=0) # camelcase-required
505 512 hunks = attr.ib(default=attr.Factory(list))
506 513
507 514 def copynewmetadatatoold(self):
508 515 for key in list(self.metadata.keys()):
509 516 newkey = key.replace(b'new:', b'old:')
510 517 self.metadata[newkey] = self.metadata[key]
511 518
512 519 def addoldmode(self, value):
513 520 self.oldProperties[b'unix:filemode'] = value
514 521
515 522 def addnewmode(self, value):
516 523 self.newProperties[b'unix:filemode'] = value
517 524
518 525 def addhunk(self, hunk):
519 526 if not isinstance(hunk, phabhunk):
520 527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
521 528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
522 529 # It's useful to include these stats since the Phab web UI shows them,
523 530 # and uses them to estimate how large a change a Revision is. Also used
524 531 # in email subjects for the [+++--] bit.
525 532 self.addLines += hunk.addLines
526 533 self.delLines += hunk.delLines
527 534
528 535
529 536 @attr.s
530 537 class phabdiff(object):
531 538 """Represents a Differential diff, owns Differential changes. Corresponds
532 539 to a commit.
533 540 """
534 541
535 542 # Doesn't seem to be any reason to send this (output of uname -n)
536 543 sourceMachine = attr.ib(default=b'') # camelcase-required
537 544 sourcePath = attr.ib(default=b'/') # camelcase-required
538 545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
539 546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
540 547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
541 548 branch = attr.ib(default=b'default')
542 549 bookmark = attr.ib(default=None)
543 550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
544 551 lintStatus = attr.ib(default=b'none') # camelcase-required
545 552 unitStatus = attr.ib(default=b'none') # camelcase-required
546 553 changes = attr.ib(default=attr.Factory(dict))
547 554 repositoryPHID = attr.ib(default=None) # camelcase-required
548 555
549 556 def addchange(self, change):
550 557 if not isinstance(change, phabchange):
551 558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
552 559 self.changes[change.currentPath] = pycompat.byteskwargs(
553 560 attr.asdict(change)
554 561 )
555 562
556 563
557 564 def maketext(pchange, ctx, fname):
558 565 """populate the phabchange for a text file"""
559 566 repo = ctx.repo()
560 567 fmatcher = match.exact([fname])
561 568 diffopts = mdiff.diffopts(git=True, context=32767)
562 569 _pfctx, _fctx, header, fhunks = next(
563 570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
564 571 )
565 572
566 573 for fhunk in fhunks:
567 574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
568 575 corpus = b''.join(lines[1:])
569 576 shunk = list(header)
570 577 shunk.extend(lines)
571 578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
572 579 patch.diffstatdata(util.iterlines(shunk))
573 580 )
574 581 pchange.addhunk(
575 582 phabhunk(
576 583 oldOffset,
577 584 oldLength,
578 585 newOffset,
579 586 newLength,
580 587 corpus,
581 588 addLines,
582 589 delLines,
583 590 )
584 591 )
585 592
586 593
587 594 def uploadchunks(fctx, fphid):
588 595 """upload large binary files as separate chunks.
589 596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
590 597 """
591 598 ui = fctx.repo().ui
592 599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
593 600 progress = ui.makeprogress(
594 601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
595 602 )
596 603 for chunk in chunks:
597 604 progress.increment()
598 605 if chunk[b'complete']:
599 606 continue
600 607 bstart = int(chunk[b'byteStart'])
601 608 bend = int(chunk[b'byteEnd'])
602 609 callconduit(
603 610 ui,
604 611 b'file.uploadchunk',
605 612 {
606 613 b'filePHID': fphid,
607 614 b'byteStart': bstart,
608 615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
609 616 b'dataEncoding': b'base64',
610 617 },
611 618 )
612 619 progress.complete()
613 620
614 621
615 622 def uploadfile(fctx):
616 623 """upload binary files to Phabricator"""
617 624 repo = fctx.repo()
618 625 ui = repo.ui
619 626 fname = fctx.path()
620 627 size = fctx.size()
621 628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
622 629
623 630 # an allocate call is required first to see if an upload is even required
624 631 # (Phab might already have it) and to determine if chunking is needed
625 632 allocateparams = {
626 633 b'name': fname,
627 634 b'contentLength': size,
628 635 b'contentHash': fhash,
629 636 }
630 637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
631 638 fphid = filealloc[b'filePHID']
632 639
633 640 if filealloc[b'upload']:
634 641 ui.write(_(b'uploading %s\n') % bytes(fctx))
635 642 if not fphid:
636 643 uploadparams = {
637 644 b'name': fname,
638 645 b'data_base64': base64.b64encode(fctx.data()),
639 646 }
640 647 fphid = callconduit(ui, b'file.upload', uploadparams)
641 648 else:
642 649 uploadchunks(fctx, fphid)
643 650 else:
644 651 ui.debug(b'server already has %s\n' % bytes(fctx))
645 652
646 653 if not fphid:
647 654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
648 655
649 656 return fphid
650 657
651 658
652 659 def addoldbinary(pchange, fctx, originalfname):
653 660 """add the metadata for the previous version of a binary file to the
654 661 phabchange for the new version
655 662 """
656 663 oldfctx = fctx.p1()[originalfname]
657 664 if fctx.cmp(oldfctx):
658 665 # Files differ, add the old one
659 666 pchange.metadata[b'old:file:size'] = oldfctx.size()
660 667 mimeguess, _enc = mimetypes.guess_type(
661 668 encoding.unifromlocal(oldfctx.path())
662 669 )
663 670 if mimeguess:
664 671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
665 672 mimeguess
666 673 )
667 674 fphid = uploadfile(oldfctx)
668 675 pchange.metadata[b'old:binary-phid'] = fphid
669 676 else:
670 677 # If it's left as IMAGE/BINARY web UI might try to display it
671 678 pchange.fileType = DiffFileType.TEXT
672 679 pchange.copynewmetadatatoold()
673 680
674 681
675 682 def makebinary(pchange, fctx):
676 683 """populate the phabchange for a binary file"""
677 684 pchange.fileType = DiffFileType.BINARY
678 685 fphid = uploadfile(fctx)
679 686 pchange.metadata[b'new:binary-phid'] = fphid
680 687 pchange.metadata[b'new:file:size'] = fctx.size()
681 688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
682 689 if mimeguess:
683 690 mimeguess = pycompat.bytestr(mimeguess)
684 691 pchange.metadata[b'new:file:mime-type'] = mimeguess
685 692 if mimeguess.startswith(b'image/'):
686 693 pchange.fileType = DiffFileType.IMAGE
687 694
688 695
689 696 # Copied from mercurial/patch.py
690 697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
691 698
692 699
693 700 def addremoved(pdiff, ctx, removed):
694 701 """add removed files to the phabdiff. Shouldn't include moves"""
695 702 for fname in removed:
696 703 pchange = phabchange(
697 704 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
698 705 )
699 706 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
700 707 fctx = ctx.p1()[fname]
701 708 if not fctx.isbinary():
702 709 maketext(pchange, ctx, fname)
703 710
704 711 pdiff.addchange(pchange)
705 712
706 713
707 714 def addmodified(pdiff, ctx, modified):
708 715 """add modified files to the phabdiff"""
709 716 for fname in modified:
710 717 fctx = ctx[fname]
711 718 pchange = phabchange(currentPath=fname, oldPath=fname)
712 719 filemode = gitmode[ctx[fname].flags()]
713 720 originalmode = gitmode[ctx.p1()[fname].flags()]
714 721 if filemode != originalmode:
715 722 pchange.addoldmode(originalmode)
716 723 pchange.addnewmode(filemode)
717 724
718 725 if fctx.isbinary():
719 726 makebinary(pchange, fctx)
720 727 addoldbinary(pchange, fctx, fname)
721 728 else:
722 729 maketext(pchange, ctx, fname)
723 730
724 731 pdiff.addchange(pchange)
725 732
726 733
727 734 def addadded(pdiff, ctx, added, removed):
728 735 """add file adds to the phabdiff, both new files and copies/moves"""
729 736 # Keep track of files that've been recorded as moved/copied, so if there are
730 737 # additional copies we can mark them (moves get removed from removed)
731 738 copiedchanges = {}
732 739 movedchanges = {}
733 740 for fname in added:
734 741 fctx = ctx[fname]
735 742 pchange = phabchange(currentPath=fname)
736 743
737 744 filemode = gitmode[ctx[fname].flags()]
738 745 renamed = fctx.renamed()
739 746
740 747 if renamed:
741 748 originalfname = renamed[0]
742 749 originalmode = gitmode[ctx.p1()[originalfname].flags()]
743 750 pchange.oldPath = originalfname
744 751
745 752 if originalfname in removed:
746 753 origpchange = phabchange(
747 754 currentPath=originalfname,
748 755 oldPath=originalfname,
749 756 type=DiffChangeType.MOVE_AWAY,
750 757 awayPaths=[fname],
751 758 )
752 759 movedchanges[originalfname] = origpchange
753 760 removed.remove(originalfname)
754 761 pchange.type = DiffChangeType.MOVE_HERE
755 762 elif originalfname in movedchanges:
756 763 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
757 764 movedchanges[originalfname].awayPaths.append(fname)
758 765 pchange.type = DiffChangeType.COPY_HERE
759 766 else: # pure copy
760 767 if originalfname not in copiedchanges:
761 768 origpchange = phabchange(
762 769 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
763 770 )
764 771 copiedchanges[originalfname] = origpchange
765 772 else:
766 773 origpchange = copiedchanges[originalfname]
767 774 origpchange.awayPaths.append(fname)
768 775 pchange.type = DiffChangeType.COPY_HERE
769 776
770 777 if filemode != originalmode:
771 778 pchange.addoldmode(originalmode)
772 779 pchange.addnewmode(filemode)
773 780 else: # Brand-new file
774 781 pchange.addnewmode(gitmode[fctx.flags()])
775 782 pchange.type = DiffChangeType.ADD
776 783
777 784 if fctx.isbinary():
778 785 makebinary(pchange, fctx)
779 786 if renamed:
780 787 addoldbinary(pchange, fctx, originalfname)
781 788 else:
782 789 maketext(pchange, ctx, fname)
783 790
784 791 pdiff.addchange(pchange)
785 792
786 793 for _path, copiedchange in copiedchanges.items():
787 794 pdiff.addchange(copiedchange)
788 795 for _path, movedchange in movedchanges.items():
789 796 pdiff.addchange(movedchange)
790 797
791 798
792 799 def creatediff(ctx):
793 800 """create a Differential Diff"""
794 801 repo = ctx.repo()
795 802 repophid = getrepophid(repo)
796 803 # Create a "Differential Diff" via "differential.createrawdiff" API
797 804 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
798 805 if repophid:
799 806 params[b'repositoryPHID'] = repophid
800 807 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
801 808 if not diff:
802 809 raise error.Abort(_(b'cannot create diff for %s') % ctx)
803 810 return diff
804 811
805 812
806 813 def writediffproperties(ctx, diff):
807 814 """write metadata to diff so patches could be applied losslessly"""
808 815 params = {
809 816 b'diff_id': diff[b'id'],
810 817 b'name': b'hg:meta',
811 818 b'data': templatefilters.json(
812 819 {
813 820 b'user': ctx.user(),
814 821 b'date': b'%d %d' % ctx.date(),
815 822 b'branch': ctx.branch(),
816 823 b'node': ctx.hex(),
817 824 b'parent': ctx.p1().hex(),
818 825 }
819 826 ),
820 827 }
821 828 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
822 829
823 830 params = {
824 831 b'diff_id': diff[b'id'],
825 832 b'name': b'local:commits',
826 833 b'data': templatefilters.json(
827 834 {
828 835 ctx.hex(): {
829 836 b'author': stringutil.person(ctx.user()),
830 837 b'authorEmail': stringutil.email(ctx.user()),
831 838 b'time': int(ctx.date()[0]),
832 839 b'commit': ctx.hex(),
833 840 b'parents': [ctx.p1().hex()],
834 841 b'branch': ctx.branch(),
835 842 },
836 843 }
837 844 ),
838 845 }
839 846 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
840 847
841 848
842 849 def createdifferentialrevision(
843 850 ctx,
844 851 revid=None,
845 852 parentrevphid=None,
846 853 oldnode=None,
847 854 olddiff=None,
848 855 actions=None,
849 856 comment=None,
850 857 ):
851 858 """create or update a Differential Revision
852 859
853 860 If revid is None, create a new Differential Revision, otherwise update
854 861 revid. If parentrevphid is not None, set it as a dependency.
855 862
856 863 If oldnode is not None, check if the patch content (without commit message
857 864 and metadata) has changed before creating another diff.
858 865
859 866 If actions is not None, they will be appended to the transaction.
860 867 """
861 868 repo = ctx.repo()
862 869 if oldnode:
863 870 diffopts = mdiff.diffopts(git=True, context=32767)
864 871 oldctx = repo.unfiltered()[oldnode]
865 872 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
866 873 else:
867 874 neednewdiff = True
868 875
869 876 transactions = []
870 877 if neednewdiff:
871 878 diff = creatediff(ctx)
872 879 transactions.append({b'type': b'update', b'value': diff[b'phid']})
873 880 if comment:
874 881 transactions.append({b'type': b'comment', b'value': comment})
875 882 else:
876 883 # Even if we don't need to upload a new diff because the patch content
877 884 # does not change. We might still need to update its metadata so
878 885 # pushers could know the correct node metadata.
879 886 assert olddiff
880 887 diff = olddiff
881 888 writediffproperties(ctx, diff)
882 889
883 890 # Set the parent Revision every time, so commit re-ordering is picked-up
884 891 if parentrevphid:
885 892 transactions.append(
886 893 {b'type': b'parents.set', b'value': [parentrevphid]}
887 894 )
888 895
889 896 if actions:
890 897 transactions += actions
891 898
892 899 # Parse commit message and update related fields.
893 900 desc = ctx.description()
894 901 info = callconduit(
895 902 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
896 903 )
897 904 for k, v in info[b'fields'].items():
898 905 if k in [b'title', b'summary', b'testPlan']:
899 906 transactions.append({b'type': k, b'value': v})
900 907
901 908 params = {b'transactions': transactions}
902 909 if revid is not None:
903 910 # Update an existing Differential Revision
904 911 params[b'objectIdentifier'] = revid
905 912
906 913 revision = callconduit(repo.ui, b'differential.revision.edit', params)
907 914 if not revision:
908 915 raise error.Abort(_(b'cannot create revision for %s') % ctx)
909 916
910 917 return revision, diff
911 918
912 919
913 920 def userphids(repo, names):
914 921 """convert user names to PHIDs"""
915 922 names = [name.lower() for name in names]
916 923 query = {b'constraints': {b'usernames': names}}
917 924 result = callconduit(repo.ui, b'user.search', query)
918 925 # username not found is not an error of the API. So check if we have missed
919 926 # some names here.
920 927 data = result[b'data']
921 928 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
922 929 unresolved = set(names) - resolved
923 930 if unresolved:
924 931 raise error.Abort(
925 932 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
926 933 )
927 934 return [entry[b'phid'] for entry in data]
928 935
929 936
930 937 @vcrcommand(
931 938 b'phabsend',
932 939 [
933 940 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
934 941 (b'', b'amend', True, _(b'update commit messages')),
935 942 (b'', b'reviewer', [], _(b'specify reviewers')),
936 943 (b'', b'blocker', [], _(b'specify blocking reviewers')),
937 944 (
938 945 b'm',
939 946 b'comment',
940 947 b'',
941 948 _(b'add a comment to Revisions with new/updated Diffs'),
942 949 ),
943 950 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
944 951 ],
945 952 _(b'REV [OPTIONS]'),
946 953 helpcategory=command.CATEGORY_IMPORT_EXPORT,
947 954 )
948 955 def phabsend(ui, repo, *revs, **opts):
949 956 """upload changesets to Phabricator
950 957
951 958 If there are multiple revisions specified, they will be send as a stack
952 959 with a linear dependencies relationship using the order specified by the
953 960 revset.
954 961
955 962 For the first time uploading changesets, local tags will be created to
956 963 maintain the association. After the first time, phabsend will check
957 964 obsstore and tags information so it can figure out whether to update an
958 965 existing Differential Revision, or create a new one.
959 966
960 967 If --amend is set, update commit messages so they have the
961 968 ``Differential Revision`` URL, remove related tags. This is similar to what
962 969 arcanist will do, and is more desired in author-push workflows. Otherwise,
963 970 use local tags to record the ``Differential Revision`` association.
964 971
965 972 The --confirm option lets you confirm changesets before sending them. You
966 973 can also add following to your configuration file to make it default
967 974 behaviour::
968 975
969 976 [phabsend]
970 977 confirm = true
971 978
972 979 phabsend will check obsstore and the above association to decide whether to
973 980 update an existing Differential Revision, or create a new one.
974 981 """
975 982 opts = pycompat.byteskwargs(opts)
976 983 revs = list(revs) + opts.get(b'rev', [])
977 984 revs = scmutil.revrange(repo, revs)
978 985
979 986 if not revs:
980 987 raise error.Abort(_(b'phabsend requires at least one changeset'))
981 988 if opts.get(b'amend'):
982 989 cmdutil.checkunfinished(repo)
983 990
984 991 # {newnode: (oldnode, olddiff, olddrev}
985 992 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
986 993
987 994 confirm = ui.configbool(b'phabsend', b'confirm')
988 995 confirm |= bool(opts.get(b'confirm'))
989 996 if confirm:
990 997 confirmed = _confirmbeforesend(repo, revs, oldmap)
991 998 if not confirmed:
992 999 raise error.Abort(_(b'phabsend cancelled'))
993 1000
994 1001 actions = []
995 1002 reviewers = opts.get(b'reviewer', [])
996 1003 blockers = opts.get(b'blocker', [])
997 1004 phids = []
998 1005 if reviewers:
999 1006 phids.extend(userphids(repo, reviewers))
1000 1007 if blockers:
1001 1008 phids.extend(
1002 1009 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1003 1010 )
1004 1011 if phids:
1005 1012 actions.append({b'type': b'reviewers.add', b'value': phids})
1006 1013
1007 1014 drevids = [] # [int]
1008 1015 diffmap = {} # {newnode: diff}
1009 1016
1010 1017 # Send patches one by one so we know their Differential Revision PHIDs and
1011 1018 # can provide dependency relationship
1012 1019 lastrevphid = None
1013 1020 for rev in revs:
1014 1021 ui.debug(b'sending rev %d\n' % rev)
1015 1022 ctx = repo[rev]
1016 1023
1017 1024 # Get Differential Revision ID
1018 1025 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1019 1026 if oldnode != ctx.node() or opts.get(b'amend'):
1020 1027 # Create or update Differential Revision
1021 1028 revision, diff = createdifferentialrevision(
1022 1029 ctx,
1023 1030 revid,
1024 1031 lastrevphid,
1025 1032 oldnode,
1026 1033 olddiff,
1027 1034 actions,
1028 1035 opts.get(b'comment'),
1029 1036 )
1030 1037 diffmap[ctx.node()] = diff
1031 1038 newrevid = int(revision[b'object'][b'id'])
1032 1039 newrevphid = revision[b'object'][b'phid']
1033 1040 if revid:
1034 1041 action = b'updated'
1035 1042 else:
1036 1043 action = b'created'
1037 1044
1038 1045 # Create a local tag to note the association, if commit message
1039 1046 # does not have it already
1040 1047 m = _differentialrevisiondescre.search(ctx.description())
1041 1048 if not m or int(m.group(r'id')) != newrevid:
1042 1049 tagname = b'D%d' % newrevid
1043 1050 tags.tag(
1044 1051 repo,
1045 1052 tagname,
1046 1053 ctx.node(),
1047 1054 message=None,
1048 1055 user=None,
1049 1056 date=None,
1050 1057 local=True,
1051 1058 )
1052 1059 else:
1053 1060 # Nothing changed. But still set "newrevphid" so the next revision
1054 1061 # could depend on this one and "newrevid" for the summary line.
1055 1062 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1056 1063 newrevid = revid
1057 1064 action = b'skipped'
1058 1065
1059 1066 actiondesc = ui.label(
1060 1067 {
1061 1068 b'created': _(b'created'),
1062 1069 b'skipped': _(b'skipped'),
1063 1070 b'updated': _(b'updated'),
1064 1071 }[action],
1065 1072 b'phabricator.action.%s' % action,
1066 1073 )
1067 1074 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1068 1075 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1069 1076 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1070 1077 ui.write(
1071 1078 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1072 1079 )
1073 1080 drevids.append(newrevid)
1074 1081 lastrevphid = newrevphid
1075 1082
1076 1083 # Update commit messages and remove tags
1077 1084 if opts.get(b'amend'):
1078 1085 unfi = repo.unfiltered()
1079 1086 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1080 1087 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1081 1088 wnode = unfi[b'.'].node()
1082 1089 mapping = {} # {oldnode: [newnode]}
1083 1090 for i, rev in enumerate(revs):
1084 1091 old = unfi[rev]
1085 1092 drevid = drevids[i]
1086 1093 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1087 1094 newdesc = getdescfromdrev(drev)
1088 1095 # Make sure commit message contain "Differential Revision"
1089 1096 if old.description() != newdesc:
1090 1097 if old.phase() == phases.public:
1091 1098 ui.warn(
1092 1099 _(b"warning: not updating public commit %s\n")
1093 1100 % scmutil.formatchangeid(old)
1094 1101 )
1095 1102 continue
1096 1103 parents = [
1097 1104 mapping.get(old.p1().node(), (old.p1(),))[0],
1098 1105 mapping.get(old.p2().node(), (old.p2(),))[0],
1099 1106 ]
1100 1107 new = context.metadataonlyctx(
1101 1108 repo,
1102 1109 old,
1103 1110 parents=parents,
1104 1111 text=newdesc,
1105 1112 user=old.user(),
1106 1113 date=old.date(),
1107 1114 extra=old.extra(),
1108 1115 )
1109 1116
1110 1117 newnode = new.commit()
1111 1118
1112 1119 mapping[old.node()] = [newnode]
1113 1120 # Update diff property
1114 1121 # If it fails just warn and keep going, otherwise the DREV
1115 1122 # associations will be lost
1116 1123 try:
1117 1124 writediffproperties(unfi[newnode], diffmap[old.node()])
1118 1125 except util.urlerr.urlerror:
1119 1126 ui.warnnoi18n(
1120 1127 b'Failed to update metadata for D%s\n' % drevid
1121 1128 )
1122 1129 # Remove local tags since it's no longer necessary
1123 1130 tagname = b'D%d' % drevid
1124 1131 if tagname in repo.tags():
1125 1132 tags.tag(
1126 1133 repo,
1127 1134 tagname,
1128 1135 nullid,
1129 1136 message=None,
1130 1137 user=None,
1131 1138 date=None,
1132 1139 local=True,
1133 1140 )
1134 1141 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1135 1142 if wnode in mapping:
1136 1143 unfi.setparents(mapping[wnode][0])
1137 1144
1138 1145
1139 1146 # Map from "hg:meta" keys to header understood by "hg import". The order is
1140 1147 # consistent with "hg export" output.
1141 1148 _metanamemap = util.sortdict(
1142 1149 [
1143 1150 (b'user', b'User'),
1144 1151 (b'date', b'Date'),
1145 1152 (b'branch', b'Branch'),
1146 1153 (b'node', b'Node ID'),
1147 1154 (b'parent', b'Parent '),
1148 1155 ]
1149 1156 )
1150 1157
1151 1158
1152 1159 def _confirmbeforesend(repo, revs, oldmap):
1153 1160 url, token = readurltoken(repo.ui)
1154 1161 ui = repo.ui
1155 1162 for rev in revs:
1156 1163 ctx = repo[rev]
1157 1164 desc = ctx.description().splitlines()[0]
1158 1165 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1159 1166 if drevid:
1160 1167 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1161 1168 else:
1162 1169 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1163 1170
1164 1171 ui.write(
1165 1172 _(b'%s - %s: %s\n')
1166 1173 % (
1167 1174 drevdesc,
1168 1175 ui.label(bytes(ctx), b'phabricator.node'),
1169 1176 ui.label(desc, b'phabricator.desc'),
1170 1177 )
1171 1178 )
1172 1179
1173 1180 if ui.promptchoice(
1174 1181 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1175 1182 ):
1176 1183 return False
1177 1184
1178 1185 return True
1179 1186
1180 1187
1181 1188 _knownstatusnames = {
1182 1189 b'accepted',
1183 1190 b'needsreview',
1184 1191 b'needsrevision',
1185 1192 b'closed',
1186 1193 b'abandoned',
1187 1194 }
1188 1195
1189 1196
1190 1197 def _getstatusname(drev):
1191 1198 """get normalized status name from a Differential Revision"""
1192 1199 return drev[b'statusName'].replace(b' ', b'').lower()
1193 1200
1194 1201
1195 1202 # Small language to specify differential revisions. Support symbols: (), :X,
1196 1203 # +, and -.
1197 1204
1198 1205 _elements = {
1199 1206 # token-type: binding-strength, primary, prefix, infix, suffix
1200 1207 b'(': (12, None, (b'group', 1, b')'), None, None),
1201 1208 b':': (8, None, (b'ancestors', 8), None, None),
1202 1209 b'&': (5, None, None, (b'and_', 5), None),
1203 1210 b'+': (4, None, None, (b'add', 4), None),
1204 1211 b'-': (4, None, None, (b'sub', 4), None),
1205 1212 b')': (0, None, None, None, None),
1206 1213 b'symbol': (0, b'symbol', None, None, None),
1207 1214 b'end': (0, None, None, None, None),
1208 1215 }
1209 1216
1210 1217
1211 1218 def _tokenize(text):
1212 1219 view = memoryview(text) # zero-copy slice
1213 1220 special = b'():+-& '
1214 1221 pos = 0
1215 1222 length = len(text)
1216 1223 while pos < length:
1217 1224 symbol = b''.join(
1218 1225 itertools.takewhile(
1219 1226 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1220 1227 )
1221 1228 )
1222 1229 if symbol:
1223 1230 yield (b'symbol', symbol, pos)
1224 1231 pos += len(symbol)
1225 1232 else: # special char, ignore space
1226 1233 if text[pos] != b' ':
1227 1234 yield (text[pos], None, pos)
1228 1235 pos += 1
1229 1236 yield (b'end', None, pos)
1230 1237
1231 1238
1232 1239 def _parse(text):
1233 1240 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1234 1241 if pos != len(text):
1235 1242 raise error.ParseError(b'invalid token', pos)
1236 1243 return tree
1237 1244
1238 1245
1239 1246 def _parsedrev(symbol):
1240 1247 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1241 1248 if symbol.startswith(b'D') and symbol[1:].isdigit():
1242 1249 return int(symbol[1:])
1243 1250 if symbol.isdigit():
1244 1251 return int(symbol)
1245 1252
1246 1253
1247 1254 def _prefetchdrevs(tree):
1248 1255 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1249 1256 drevs = set()
1250 1257 ancestordrevs = set()
1251 1258 op = tree[0]
1252 1259 if op == b'symbol':
1253 1260 r = _parsedrev(tree[1])
1254 1261 if r:
1255 1262 drevs.add(r)
1256 1263 elif op == b'ancestors':
1257 1264 r, a = _prefetchdrevs(tree[1])
1258 1265 drevs.update(r)
1259 1266 ancestordrevs.update(r)
1260 1267 ancestordrevs.update(a)
1261 1268 else:
1262 1269 for t in tree[1:]:
1263 1270 r, a = _prefetchdrevs(t)
1264 1271 drevs.update(r)
1265 1272 ancestordrevs.update(a)
1266 1273 return drevs, ancestordrevs
1267 1274
1268 1275
1269 1276 def querydrev(repo, spec):
1270 1277 """return a list of "Differential Revision" dicts
1271 1278
1272 1279 spec is a string using a simple query language, see docstring in phabread
1273 1280 for details.
1274 1281
1275 1282 A "Differential Revision dict" looks like:
1276 1283
1277 1284 {
1278 1285 "id": "2",
1279 1286 "phid": "PHID-DREV-672qvysjcczopag46qty",
1280 1287 "title": "example",
1281 1288 "uri": "https://phab.example.com/D2",
1282 1289 "dateCreated": "1499181406",
1283 1290 "dateModified": "1499182103",
1284 1291 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1285 1292 "status": "0",
1286 1293 "statusName": "Needs Review",
1287 1294 "properties": [],
1288 1295 "branch": null,
1289 1296 "summary": "",
1290 1297 "testPlan": "",
1291 1298 "lineCount": "2",
1292 1299 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1293 1300 "diffs": [
1294 1301 "3",
1295 1302 "4",
1296 1303 ],
1297 1304 "commits": [],
1298 1305 "reviewers": [],
1299 1306 "ccs": [],
1300 1307 "hashes": [],
1301 1308 "auxiliary": {
1302 1309 "phabricator:projects": [],
1303 1310 "phabricator:depends-on": [
1304 1311 "PHID-DREV-gbapp366kutjebt7agcd"
1305 1312 ]
1306 1313 },
1307 1314 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1308 1315 "sourcePath": null
1309 1316 }
1310 1317 """
1311 1318
1312 1319 def fetch(params):
1313 1320 """params -> single drev or None"""
1314 1321 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1315 1322 if key in prefetched:
1316 1323 return prefetched[key]
1317 1324 drevs = callconduit(repo.ui, b'differential.query', params)
1318 1325 # Fill prefetched with the result
1319 1326 for drev in drevs:
1320 1327 prefetched[drev[b'phid']] = drev
1321 1328 prefetched[int(drev[b'id'])] = drev
1322 1329 if key not in prefetched:
1323 1330 raise error.Abort(
1324 1331 _(b'cannot get Differential Revision %r') % params
1325 1332 )
1326 1333 return prefetched[key]
1327 1334
1328 1335 def getstack(topdrevids):
1329 1336 """given a top, get a stack from the bottom, [id] -> [id]"""
1330 1337 visited = set()
1331 1338 result = []
1332 1339 queue = [{b'ids': [i]} for i in topdrevids]
1333 1340 while queue:
1334 1341 params = queue.pop()
1335 1342 drev = fetch(params)
1336 1343 if drev[b'id'] in visited:
1337 1344 continue
1338 1345 visited.add(drev[b'id'])
1339 1346 result.append(int(drev[b'id']))
1340 1347 auxiliary = drev.get(b'auxiliary', {})
1341 1348 depends = auxiliary.get(b'phabricator:depends-on', [])
1342 1349 for phid in depends:
1343 1350 queue.append({b'phids': [phid]})
1344 1351 result.reverse()
1345 1352 return smartset.baseset(result)
1346 1353
1347 1354 # Initialize prefetch cache
1348 1355 prefetched = {} # {id or phid: drev}
1349 1356
1350 1357 tree = _parse(spec)
1351 1358 drevs, ancestordrevs = _prefetchdrevs(tree)
1352 1359
1353 1360 # developer config: phabricator.batchsize
1354 1361 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1355 1362
1356 1363 # Prefetch Differential Revisions in batch
1357 1364 tofetch = set(drevs)
1358 1365 for r in ancestordrevs:
1359 1366 tofetch.update(range(max(1, r - batchsize), r + 1))
1360 1367 if drevs:
1361 1368 fetch({b'ids': list(tofetch)})
1362 1369 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1363 1370
1364 1371 # Walk through the tree, return smartsets
1365 1372 def walk(tree):
1366 1373 op = tree[0]
1367 1374 if op == b'symbol':
1368 1375 drev = _parsedrev(tree[1])
1369 1376 if drev:
1370 1377 return smartset.baseset([drev])
1371 1378 elif tree[1] in _knownstatusnames:
1372 1379 drevs = [
1373 1380 r
1374 1381 for r in validids
1375 1382 if _getstatusname(prefetched[r]) == tree[1]
1376 1383 ]
1377 1384 return smartset.baseset(drevs)
1378 1385 else:
1379 1386 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1380 1387 elif op in {b'and_', b'add', b'sub'}:
1381 1388 assert len(tree) == 3
1382 1389 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1383 1390 elif op == b'group':
1384 1391 return walk(tree[1])
1385 1392 elif op == b'ancestors':
1386 1393 return getstack(walk(tree[1]))
1387 1394 else:
1388 1395 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1389 1396
1390 1397 return [prefetched[r] for r in walk(tree)]
1391 1398
1392 1399
1393 1400 def getdescfromdrev(drev):
1394 1401 """get description (commit message) from "Differential Revision"
1395 1402
1396 1403 This is similar to differential.getcommitmessage API. But we only care
1397 1404 about limited fields: title, summary, test plan, and URL.
1398 1405 """
1399 1406 title = drev[b'title']
1400 1407 summary = drev[b'summary'].rstrip()
1401 1408 testplan = drev[b'testPlan'].rstrip()
1402 1409 if testplan:
1403 1410 testplan = b'Test Plan:\n%s' % testplan
1404 1411 uri = b'Differential Revision: %s' % drev[b'uri']
1405 1412 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1406 1413
1407 1414
1408 1415 def getdiffmeta(diff):
1409 1416 """get commit metadata (date, node, user, p1) from a diff object
1410 1417
1411 1418 The metadata could be "hg:meta", sent by phabsend, like:
1412 1419
1413 1420 "properties": {
1414 1421 "hg:meta": {
1415 1422 "date": "1499571514 25200",
1416 1423 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1417 1424 "user": "Foo Bar <foo@example.com>",
1418 1425 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1419 1426 }
1420 1427 }
1421 1428
1422 1429 Or converted from "local:commits", sent by "arc", like:
1423 1430
1424 1431 "properties": {
1425 1432 "local:commits": {
1426 1433 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1427 1434 "author": "Foo Bar",
1428 1435 "time": 1499546314,
1429 1436 "branch": "default",
1430 1437 "tag": "",
1431 1438 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1432 1439 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1433 1440 "local": "1000",
1434 1441 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1435 1442 "summary": "...",
1436 1443 "message": "...",
1437 1444 "authorEmail": "foo@example.com"
1438 1445 }
1439 1446 }
1440 1447 }
1441 1448
1442 1449 Note: metadata extracted from "local:commits" will lose time zone
1443 1450 information.
1444 1451 """
1445 1452 props = diff.get(b'properties') or {}
1446 1453 meta = props.get(b'hg:meta')
1447 1454 if not meta:
1448 1455 if props.get(b'local:commits'):
1449 1456 commit = sorted(props[b'local:commits'].values())[0]
1450 1457 meta = {}
1451 1458 if b'author' in commit and b'authorEmail' in commit:
1452 1459 meta[b'user'] = b'%s <%s>' % (
1453 1460 commit[b'author'],
1454 1461 commit[b'authorEmail'],
1455 1462 )
1456 1463 if b'time' in commit:
1457 1464 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1458 1465 if b'branch' in commit:
1459 1466 meta[b'branch'] = commit[b'branch']
1460 1467 node = commit.get(b'commit', commit.get(b'rev'))
1461 1468 if node:
1462 1469 meta[b'node'] = node
1463 1470 if len(commit.get(b'parents', ())) >= 1:
1464 1471 meta[b'parent'] = commit[b'parents'][0]
1465 1472 else:
1466 1473 meta = {}
1467 1474 if b'date' not in meta and b'dateCreated' in diff:
1468 1475 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1469 1476 if b'branch' not in meta and diff.get(b'branch'):
1470 1477 meta[b'branch'] = diff[b'branch']
1471 1478 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1472 1479 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1473 1480 return meta
1474 1481
1475 1482
1476 1483 def readpatch(repo, drevs, write):
1477 1484 """generate plain-text patch readable by 'hg import'
1478 1485
1479 1486 write is usually ui.write. drevs is what "querydrev" returns, results of
1480 1487 "differential.query".
1481 1488 """
1482 1489 # Prefetch hg:meta property for all diffs
1483 1490 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1484 1491 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1485 1492
1486 1493 # Generate patch for each drev
1487 1494 for drev in drevs:
1488 1495 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1489 1496
1490 1497 diffid = max(int(v) for v in drev[b'diffs'])
1491 1498 body = callconduit(
1492 1499 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1493 1500 )
1494 1501 desc = getdescfromdrev(drev)
1495 1502 header = b'# HG changeset patch\n'
1496 1503
1497 1504 # Try to preserve metadata from hg:meta property. Write hg patch
1498 1505 # headers that can be read by the "import" command. See patchheadermap
1499 1506 # and extract in mercurial/patch.py for supported headers.
1500 1507 meta = getdiffmeta(diffs[b'%d' % diffid])
1501 1508 for k in _metanamemap.keys():
1502 1509 if k in meta:
1503 1510 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1504 1511
1505 1512 content = b'%s%s\n%s' % (header, desc, body)
1506 1513 write(content)
1507 1514
1508 1515
1509 1516 @vcrcommand(
1510 1517 b'phabread',
1511 1518 [(b'', b'stack', False, _(b'read dependencies'))],
1512 1519 _(b'DREVSPEC [OPTIONS]'),
1513 1520 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1514 1521 )
1515 1522 def phabread(ui, repo, spec, **opts):
1516 1523 """print patches from Phabricator suitable for importing
1517 1524
1518 1525 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1519 1526 the number ``123``. It could also have common operators like ``+``, ``-``,
1520 1527 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1521 1528 select a stack.
1522 1529
1523 1530 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1524 1531 could be used to filter patches by status. For performance reason, they
1525 1532 only represent a subset of non-status selections and cannot be used alone.
1526 1533
1527 1534 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1528 1535 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1529 1536 stack up to D9.
1530 1537
1531 1538 If --stack is given, follow dependencies information and read all patches.
1532 1539 It is equivalent to the ``:`` operator.
1533 1540 """
1534 1541 opts = pycompat.byteskwargs(opts)
1535 1542 if opts.get(b'stack'):
1536 1543 spec = b':(%s)' % spec
1537 1544 drevs = querydrev(repo, spec)
1538 1545 readpatch(repo, drevs, ui.write)
1539 1546
1540 1547
1541 1548 @vcrcommand(
1542 1549 b'phabupdate',
1543 1550 [
1544 1551 (b'', b'accept', False, _(b'accept revisions')),
1545 1552 (b'', b'reject', False, _(b'reject revisions')),
1546 1553 (b'', b'abandon', False, _(b'abandon revisions')),
1547 1554 (b'', b'reclaim', False, _(b'reclaim revisions')),
1548 1555 (b'm', b'comment', b'', _(b'comment on the last revision')),
1549 1556 ],
1550 1557 _(b'DREVSPEC [OPTIONS]'),
1551 1558 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1552 1559 )
1553 1560 def phabupdate(ui, repo, spec, **opts):
1554 1561 """update Differential Revision in batch
1555 1562
1556 1563 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1557 1564 """
1558 1565 opts = pycompat.byteskwargs(opts)
1559 1566 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1560 1567 if len(flags) > 1:
1561 1568 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1562 1569
1563 1570 actions = []
1564 1571 for f in flags:
1565 1572 actions.append({b'type': f, b'value': b'true'})
1566 1573
1567 1574 drevs = querydrev(repo, spec)
1568 1575 for i, drev in enumerate(drevs):
1569 1576 if i + 1 == len(drevs) and opts.get(b'comment'):
1570 1577 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1571 1578 if actions:
1572 1579 params = {
1573 1580 b'objectIdentifier': drev[b'phid'],
1574 1581 b'transactions': actions,
1575 1582 }
1576 1583 callconduit(ui, b'differential.revision.edit', params)
1577 1584
1578 1585
1579 1586 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1580 1587 def template_review(context, mapping):
1581 1588 """:phabreview: Object describing the review for this changeset.
1582 1589 Has attributes `url` and `id`.
1583 1590 """
1584 1591 ctx = context.resource(mapping, b'ctx')
1585 1592 m = _differentialrevisiondescre.search(ctx.description())
1586 1593 if m:
1587 1594 return templateutil.hybriddict(
1588 1595 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1589 1596 )
1590 1597 else:
1591 1598 tags = ctx.repo().nodetags(ctx.node())
1592 1599 for t in tags:
1593 1600 if _differentialrevisiontagre.match(t):
1594 1601 url = ctx.repo().ui.config(b'phabricator', b'url')
1595 1602 if not url.endswith(b'/'):
1596 1603 url += b'/'
1597 1604 url += t
1598 1605
1599 1606 return templateutil.hybriddict({b'url': url, b'id': t,})
1600 1607 return None
General Comments 0
You need to be logged in to leave comments. Login now