##// END OF EJS Templates
phabricator: switch to the creatediff endpoint...
Ian Moody -
r43556:af067d29 default
parent child Browse files
Show More
@@ -1,1607 +1,1622
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 46 import hashlib
47 47 import itertools
48 48 import json
49 49 import mimetypes
50 50 import operator
51 51 import re
52 52
53 53 from mercurial.node import bin, nullid
54 54 from mercurial.i18n import _
55 55 from mercurial.pycompat import getattr
56 56 from mercurial.thirdparty import attr
57 57 from mercurial import (
58 58 cmdutil,
59 59 context,
60 60 encoding,
61 61 error,
62 62 exthelper,
63 63 httpconnection as httpconnectionmod,
64 64 match,
65 65 mdiff,
66 66 obsutil,
67 67 parser,
68 68 patch,
69 69 phases,
70 70 pycompat,
71 71 scmutil,
72 72 smartset,
73 73 tags,
74 74 templatefilters,
75 75 templateutil,
76 76 url as urlmod,
77 77 util,
78 78 )
79 79 from mercurial.utils import (
80 80 procutil,
81 81 stringutil,
82 82 )
83 83
84 84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 86 # be specifying the version(s) of Mercurial they are tested with, or
87 87 # leave the attribute unspecified.
88 88 testedwith = b'ships-with-hg-core'
89 89
90 90 eh = exthelper.exthelper()
91 91
92 92 cmdtable = eh.cmdtable
93 93 command = eh.command
94 94 configtable = eh.configtable
95 95 templatekeyword = eh.templatekeyword
96 96
97 97 # developer config: phabricator.batchsize
98 98 eh.configitem(
99 99 b'phabricator', b'batchsize', default=12,
100 100 )
101 101 eh.configitem(
102 102 b'phabricator', b'callsign', default=None,
103 103 )
104 104 eh.configitem(
105 105 b'phabricator', b'curlcmd', default=None,
106 106 )
107 107 # developer config: phabricator.repophid
108 108 eh.configitem(
109 109 b'phabricator', b'repophid', default=None,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'url', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabsend', b'confirm', default=False,
116 116 )
117 117
118 118 colortable = {
119 119 b'phabricator.action.created': b'green',
120 120 b'phabricator.action.skipped': b'magenta',
121 121 b'phabricator.action.updated': b'magenta',
122 122 b'phabricator.desc': b'',
123 123 b'phabricator.drev': b'bold',
124 124 b'phabricator.node': b'',
125 125 }
126 126
127 127 _VCR_FLAGS = [
128 128 (
129 129 b'',
130 130 b'test-vcr',
131 131 b'',
132 132 _(
133 133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 134 b', otherwise will mock all http requests using the specified vcr file.'
135 135 b' (ADVANCED)'
136 136 ),
137 137 ),
138 138 ]
139 139
140 140
141 141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 142 fullflags = flags + _VCR_FLAGS
143 143
144 144 def hgmatcher(r1, r2):
145 145 if r1.uri != r2.uri or r1.method != r2.method:
146 146 return False
147 147 r1params = r1.body.split(b'&')
148 148 r2params = r2.body.split(b'&')
149 149 return set(r1params) == set(r2params)
150 150
151 151 def sanitiserequest(request):
152 152 request.body = re.sub(
153 153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 154 )
155 155 return request
156 156
157 157 def sanitiseresponse(response):
158 158 if r'set-cookie' in response[r'headers']:
159 159 del response[r'headers'][r'set-cookie']
160 160 return response
161 161
162 162 def decorate(fn):
163 163 def inner(*args, **kwargs):
164 164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 165 if cassette:
166 166 import hgdemandimport
167 167
168 168 with hgdemandimport.deactivated():
169 169 import vcr as vcrmod
170 170 import vcr.stubs as stubs
171 171
172 172 vcr = vcrmod.VCR(
173 173 serializer=r'json',
174 174 before_record_request=sanitiserequest,
175 175 before_record_response=sanitiseresponse,
176 176 custom_patches=[
177 177 (
178 178 urlmod,
179 179 r'httpconnection',
180 180 stubs.VCRHTTPConnection,
181 181 ),
182 182 (
183 183 urlmod,
184 184 r'httpsconnection',
185 185 stubs.VCRHTTPSConnection,
186 186 ),
187 187 ],
188 188 )
189 189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 191 return fn(*args, **kwargs)
192 192 return fn(*args, **kwargs)
193 193
194 194 inner.__name__ = fn.__name__
195 195 inner.__doc__ = fn.__doc__
196 196 return command(
197 197 name,
198 198 fullflags,
199 199 spec,
200 200 helpcategory=helpcategory,
201 201 optionalrepo=optionalrepo,
202 202 )(inner)
203 203
204 204 return decorate
205 205
206 206
207 207 def urlencodenested(params):
208 208 """like urlencode, but works with nested parameters.
209 209
210 210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 213 """
214 214 flatparams = util.sortdict()
215 215
216 216 def process(prefix, obj):
217 217 if isinstance(obj, bool):
218 218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 221 if items is None:
222 222 flatparams[prefix] = obj
223 223 else:
224 224 for k, v in items(obj):
225 225 if prefix:
226 226 process(b'%s[%s]' % (prefix, k), v)
227 227 else:
228 228 process(k, v)
229 229
230 230 process(b'', params)
231 231 return util.urlreq.urlencode(flatparams)
232 232
233 233
234 234 def readurltoken(ui):
235 235 """return conduit url, token and make sure they exist
236 236
237 237 Currently read from [auth] config section. In the future, it might
238 238 make sense to read from .arcconfig and .arcrc as well.
239 239 """
240 240 url = ui.config(b'phabricator', b'url')
241 241 if not url:
242 242 raise error.Abort(
243 243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 244 )
245 245
246 246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 247 token = None
248 248
249 249 if res:
250 250 group, auth = res
251 251
252 252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253 253
254 254 token = auth.get(b'phabtoken')
255 255
256 256 if not token:
257 257 raise error.Abort(
258 258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 259 )
260 260
261 261 return url, token
262 262
263 263
264 264 def callconduit(ui, name, params):
265 265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 266 host, token = readurltoken(ui)
267 267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 269 params = params.copy()
270 270 params[b'__conduit__'] = {
271 271 b'token': token,
272 272 }
273 273 rawdata = {
274 274 b'params': templatefilters.json(params),
275 275 b'output': b'json',
276 276 b'__conduit__': 1,
277 277 }
278 278 data = urlencodenested(rawdata)
279 279 curlcmd = ui.config(b'phabricator', b'curlcmd')
280 280 if curlcmd:
281 281 sin, sout = procutil.popen2(
282 282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
283 283 )
284 284 sin.write(data)
285 285 sin.close()
286 286 body = sout.read()
287 287 else:
288 288 urlopener = urlmod.opener(ui, authinfo)
289 289 request = util.urlreq.request(pycompat.strurl(url), data=data)
290 290 with contextlib.closing(urlopener.open(request)) as rsp:
291 291 body = rsp.read()
292 292 ui.debug(b'Conduit Response: %s\n' % body)
293 293 parsed = pycompat.rapply(
294 294 lambda x: encoding.unitolocal(x)
295 295 if isinstance(x, pycompat.unicode)
296 296 else x,
297 297 # json.loads only accepts bytes from py3.6+
298 298 json.loads(encoding.unifromlocal(body)),
299 299 )
300 300 if parsed.get(b'error_code'):
301 301 msg = _(b'Conduit Error (%s): %s') % (
302 302 parsed[b'error_code'],
303 303 parsed[b'error_info'],
304 304 )
305 305 raise error.Abort(msg)
306 306 return parsed[b'result']
307 307
308 308
309 309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
310 310 def debugcallconduit(ui, repo, name):
311 311 """call Conduit API
312 312
313 313 Call parameters are read from stdin as a JSON blob. Result will be written
314 314 to stdout as a JSON blob.
315 315 """
316 316 # json.loads only accepts bytes from 3.6+
317 317 rawparams = encoding.unifromlocal(ui.fin.read())
318 318 # json.loads only returns unicode strings
319 319 params = pycompat.rapply(
320 320 lambda x: encoding.unitolocal(x)
321 321 if isinstance(x, pycompat.unicode)
322 322 else x,
323 323 json.loads(rawparams),
324 324 )
325 325 # json.dumps only accepts unicode strings
326 326 result = pycompat.rapply(
327 327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
328 328 callconduit(ui, name, params),
329 329 )
330 330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
331 331 ui.write(b'%s\n' % encoding.unitolocal(s))
332 332
333 333
334 334 def getrepophid(repo):
335 335 """given callsign, return repository PHID or None"""
336 336 # developer config: phabricator.repophid
337 337 repophid = repo.ui.config(b'phabricator', b'repophid')
338 338 if repophid:
339 339 return repophid
340 340 callsign = repo.ui.config(b'phabricator', b'callsign')
341 341 if not callsign:
342 342 return None
343 343 query = callconduit(
344 344 repo.ui,
345 345 b'diffusion.repository.search',
346 346 {b'constraints': {b'callsigns': [callsign]}},
347 347 )
348 348 if len(query[b'data']) == 0:
349 349 return None
350 350 repophid = query[b'data'][0][b'phid']
351 351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
352 352 return repophid
353 353
354 354
355 355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
356 356 _differentialrevisiondescre = re.compile(
357 357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
358 358 )
359 359
360 360
361 361 def getoldnodedrevmap(repo, nodelist):
362 362 """find previous nodes that has been sent to Phabricator
363 363
364 364 return {node: (oldnode, Differential diff, Differential Revision ID)}
365 365 for node in nodelist with known previous sent versions, or associated
366 366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
367 367 be ``None``.
368 368
369 369 Examines commit messages like "Differential Revision:" to get the
370 370 association information.
371 371
372 372 If such commit message line is not found, examines all precursors and their
373 373 tags. Tags with format like "D1234" are considered a match and the node
374 374 with that tag, and the number after "D" (ex. 1234) will be returned.
375 375
376 376 The ``old node``, if not None, is guaranteed to be the last diff of
377 377 corresponding Differential Revision, and exist in the repo.
378 378 """
379 379 unfi = repo.unfiltered()
380 380 nodemap = unfi.changelog.nodemap
381 381
382 382 result = {} # {node: (oldnode?, lastdiff?, drev)}
383 383 toconfirm = {} # {node: (force, {precnode}, drev)}
384 384 for node in nodelist:
385 385 ctx = unfi[node]
386 386 # For tags like "D123", put them into "toconfirm" to verify later
387 387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
388 388 for n in precnodes:
389 389 if n in nodemap:
390 390 for tag in unfi.nodetags(n):
391 391 m = _differentialrevisiontagre.match(tag)
392 392 if m:
393 393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
394 394 continue
395 395
396 396 # Check commit message
397 397 m = _differentialrevisiondescre.search(ctx.description())
398 398 if m:
399 399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
400 400
401 401 # Double check if tags are genuine by collecting all old nodes from
402 402 # Phabricator, and expect precursors overlap with it.
403 403 if toconfirm:
404 404 drevs = [drev for force, precs, drev in toconfirm.values()]
405 405 alldiffs = callconduit(
406 406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
407 407 )
408 408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
409 409 for newnode, (force, precset, drev) in toconfirm.items():
410 410 diffs = [
411 411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
412 412 ]
413 413
414 414 # "precursors" as known by Phabricator
415 415 phprecset = set(getnode(d) for d in diffs)
416 416
417 417 # Ignore if precursors (Phabricator and local repo) do not overlap,
418 418 # and force is not set (when commit message says nothing)
419 419 if not force and not bool(phprecset & precset):
420 420 tagname = b'D%d' % drev
421 421 tags.tag(
422 422 repo,
423 423 tagname,
424 424 nullid,
425 425 message=None,
426 426 user=None,
427 427 date=None,
428 428 local=True,
429 429 )
430 430 unfi.ui.warn(
431 431 _(
432 432 b'D%s: local tag removed - does not match '
433 433 b'Differential history\n'
434 434 )
435 435 % drev
436 436 )
437 437 continue
438 438
439 439 # Find the last node using Phabricator metadata, and make sure it
440 440 # exists in the repo
441 441 oldnode = lastdiff = None
442 442 if diffs:
443 443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
444 444 oldnode = getnode(lastdiff)
445 445 if oldnode and oldnode not in nodemap:
446 446 oldnode = None
447 447
448 448 result[newnode] = (oldnode, lastdiff, drev)
449 449
450 450 return result
451 451
452 452
453 453 def getdiff(ctx, diffopts):
454 454 """plain-text diff without header (user, commit message, etc)"""
455 455 output = util.stringio()
456 456 for chunk, _label in patch.diffui(
457 457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
458 458 ):
459 459 output.write(chunk)
460 460 return output.getvalue()
461 461
462 462
463 463 class DiffChangeType(object):
464 464 ADD = 1
465 465 CHANGE = 2
466 466 DELETE = 3
467 467 MOVE_AWAY = 4
468 468 COPY_AWAY = 5
469 469 MOVE_HERE = 6
470 470 COPY_HERE = 7
471 471 MULTICOPY = 8
472 472
473 473
474 474 class DiffFileType(object):
475 475 TEXT = 1
476 476 IMAGE = 2
477 477 BINARY = 3
478 478
479 479
480 480 @attr.s
481 481 class phabhunk(dict):
482 482 """Represents a Differential hunk, which is owned by a Differential change
483 483 """
484 484
485 485 oldOffset = attr.ib(default=0) # camelcase-required
486 486 oldLength = attr.ib(default=0) # camelcase-required
487 487 newOffset = attr.ib(default=0) # camelcase-required
488 488 newLength = attr.ib(default=0) # camelcase-required
489 489 corpus = attr.ib(default='')
490 490 # These get added to the phabchange's equivalents
491 491 addLines = attr.ib(default=0) # camelcase-required
492 492 delLines = attr.ib(default=0) # camelcase-required
493 493
494 494
495 495 @attr.s
496 496 class phabchange(object):
497 497 """Represents a Differential change, owns Differential hunks and owned by a
498 498 Differential diff. Each one represents one file in a diff.
499 499 """
500 500
501 501 currentPath = attr.ib(default=None) # camelcase-required
502 502 oldPath = attr.ib(default=None) # camelcase-required
503 503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
504 504 metadata = attr.ib(default=attr.Factory(dict))
505 505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
507 507 type = attr.ib(default=DiffChangeType.CHANGE)
508 508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
509 509 commitHash = attr.ib(default=None) # camelcase-required
510 510 addLines = attr.ib(default=0) # camelcase-required
511 511 delLines = attr.ib(default=0) # camelcase-required
512 512 hunks = attr.ib(default=attr.Factory(list))
513 513
514 514 def copynewmetadatatoold(self):
515 515 for key in list(self.metadata.keys()):
516 516 newkey = key.replace(b'new:', b'old:')
517 517 self.metadata[newkey] = self.metadata[key]
518 518
519 519 def addoldmode(self, value):
520 520 self.oldProperties[b'unix:filemode'] = value
521 521
522 522 def addnewmode(self, value):
523 523 self.newProperties[b'unix:filemode'] = value
524 524
525 525 def addhunk(self, hunk):
526 526 if not isinstance(hunk, phabhunk):
527 527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
528 528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
529 529 # It's useful to include these stats since the Phab web UI shows them,
530 530 # and uses them to estimate how large a change a Revision is. Also used
531 531 # in email subjects for the [+++--] bit.
532 532 self.addLines += hunk.addLines
533 533 self.delLines += hunk.delLines
534 534
535 535
536 536 @attr.s
537 537 class phabdiff(object):
538 538 """Represents a Differential diff, owns Differential changes. Corresponds
539 539 to a commit.
540 540 """
541 541
542 542 # Doesn't seem to be any reason to send this (output of uname -n)
543 543 sourceMachine = attr.ib(default=b'') # camelcase-required
544 544 sourcePath = attr.ib(default=b'/') # camelcase-required
545 545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
546 546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
547 547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
548 548 branch = attr.ib(default=b'default')
549 549 bookmark = attr.ib(default=None)
550 550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
551 551 lintStatus = attr.ib(default=b'none') # camelcase-required
552 552 unitStatus = attr.ib(default=b'none') # camelcase-required
553 553 changes = attr.ib(default=attr.Factory(dict))
554 554 repositoryPHID = attr.ib(default=None) # camelcase-required
555 555
556 556 def addchange(self, change):
557 557 if not isinstance(change, phabchange):
558 558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
559 559 self.changes[change.currentPath] = pycompat.byteskwargs(
560 560 attr.asdict(change)
561 561 )
562 562
563 563
564 564 def maketext(pchange, ctx, fname):
565 565 """populate the phabchange for a text file"""
566 566 repo = ctx.repo()
567 567 fmatcher = match.exact([fname])
568 568 diffopts = mdiff.diffopts(git=True, context=32767)
569 569 _pfctx, _fctx, header, fhunks = next(
570 570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
571 571 )
572 572
573 573 for fhunk in fhunks:
574 574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
575 575 corpus = b''.join(lines[1:])
576 576 shunk = list(header)
577 577 shunk.extend(lines)
578 578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
579 579 patch.diffstatdata(util.iterlines(shunk))
580 580 )
581 581 pchange.addhunk(
582 582 phabhunk(
583 583 oldOffset,
584 584 oldLength,
585 585 newOffset,
586 586 newLength,
587 587 corpus,
588 588 addLines,
589 589 delLines,
590 590 )
591 591 )
592 592
593 593
594 594 def uploadchunks(fctx, fphid):
595 595 """upload large binary files as separate chunks.
596 596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
597 597 """
598 598 ui = fctx.repo().ui
599 599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
600 600 progress = ui.makeprogress(
601 601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
602 602 )
603 603 for chunk in chunks:
604 604 progress.increment()
605 605 if chunk[b'complete']:
606 606 continue
607 607 bstart = int(chunk[b'byteStart'])
608 608 bend = int(chunk[b'byteEnd'])
609 609 callconduit(
610 610 ui,
611 611 b'file.uploadchunk',
612 612 {
613 613 b'filePHID': fphid,
614 614 b'byteStart': bstart,
615 615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
616 616 b'dataEncoding': b'base64',
617 617 },
618 618 )
619 619 progress.complete()
620 620
621 621
622 622 def uploadfile(fctx):
623 623 """upload binary files to Phabricator"""
624 624 repo = fctx.repo()
625 625 ui = repo.ui
626 626 fname = fctx.path()
627 627 size = fctx.size()
628 628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
629 629
630 630 # an allocate call is required first to see if an upload is even required
631 631 # (Phab might already have it) and to determine if chunking is needed
632 632 allocateparams = {
633 633 b'name': fname,
634 634 b'contentLength': size,
635 635 b'contentHash': fhash,
636 636 }
637 637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
638 638 fphid = filealloc[b'filePHID']
639 639
640 640 if filealloc[b'upload']:
641 641 ui.write(_(b'uploading %s\n') % bytes(fctx))
642 642 if not fphid:
643 643 uploadparams = {
644 644 b'name': fname,
645 645 b'data_base64': base64.b64encode(fctx.data()),
646 646 }
647 647 fphid = callconduit(ui, b'file.upload', uploadparams)
648 648 else:
649 649 uploadchunks(fctx, fphid)
650 650 else:
651 651 ui.debug(b'server already has %s\n' % bytes(fctx))
652 652
653 653 if not fphid:
654 654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
655 655
656 656 return fphid
657 657
658 658
659 659 def addoldbinary(pchange, fctx, originalfname):
660 660 """add the metadata for the previous version of a binary file to the
661 661 phabchange for the new version
662 662 """
663 663 oldfctx = fctx.p1()[originalfname]
664 664 if fctx.cmp(oldfctx):
665 665 # Files differ, add the old one
666 666 pchange.metadata[b'old:file:size'] = oldfctx.size()
667 667 mimeguess, _enc = mimetypes.guess_type(
668 668 encoding.unifromlocal(oldfctx.path())
669 669 )
670 670 if mimeguess:
671 671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
672 672 mimeguess
673 673 )
674 674 fphid = uploadfile(oldfctx)
675 675 pchange.metadata[b'old:binary-phid'] = fphid
676 676 else:
677 677 # If it's left as IMAGE/BINARY web UI might try to display it
678 678 pchange.fileType = DiffFileType.TEXT
679 679 pchange.copynewmetadatatoold()
680 680
681 681
682 682 def makebinary(pchange, fctx):
683 683 """populate the phabchange for a binary file"""
684 684 pchange.fileType = DiffFileType.BINARY
685 685 fphid = uploadfile(fctx)
686 686 pchange.metadata[b'new:binary-phid'] = fphid
687 687 pchange.metadata[b'new:file:size'] = fctx.size()
688 688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
689 689 if mimeguess:
690 690 mimeguess = pycompat.bytestr(mimeguess)
691 691 pchange.metadata[b'new:file:mime-type'] = mimeguess
692 692 if mimeguess.startswith(b'image/'):
693 693 pchange.fileType = DiffFileType.IMAGE
694 694
695 695
696 696 # Copied from mercurial/patch.py
697 697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
698 698
699 699
700 700 def addremoved(pdiff, ctx, removed):
701 701 """add removed files to the phabdiff. Shouldn't include moves"""
702 702 for fname in removed:
703 703 pchange = phabchange(
704 704 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
705 705 )
706 706 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
707 707 fctx = ctx.p1()[fname]
708 708 if not fctx.isbinary():
709 709 maketext(pchange, ctx, fname)
710 710
711 711 pdiff.addchange(pchange)
712 712
713 713
714 714 def addmodified(pdiff, ctx, modified):
715 715 """add modified files to the phabdiff"""
716 716 for fname in modified:
717 717 fctx = ctx[fname]
718 718 pchange = phabchange(currentPath=fname, oldPath=fname)
719 719 filemode = gitmode[ctx[fname].flags()]
720 720 originalmode = gitmode[ctx.p1()[fname].flags()]
721 721 if filemode != originalmode:
722 722 pchange.addoldmode(originalmode)
723 723 pchange.addnewmode(filemode)
724 724
725 725 if fctx.isbinary():
726 726 makebinary(pchange, fctx)
727 727 addoldbinary(pchange, fctx, fname)
728 728 else:
729 729 maketext(pchange, ctx, fname)
730 730
731 731 pdiff.addchange(pchange)
732 732
733 733
734 734 def addadded(pdiff, ctx, added, removed):
735 735 """add file adds to the phabdiff, both new files and copies/moves"""
736 736 # Keep track of files that've been recorded as moved/copied, so if there are
737 737 # additional copies we can mark them (moves get removed from removed)
738 738 copiedchanges = {}
739 739 movedchanges = {}
740 740 for fname in added:
741 741 fctx = ctx[fname]
742 742 pchange = phabchange(currentPath=fname)
743 743
744 744 filemode = gitmode[ctx[fname].flags()]
745 745 renamed = fctx.renamed()
746 746
747 747 if renamed:
748 748 originalfname = renamed[0]
749 749 originalmode = gitmode[ctx.p1()[originalfname].flags()]
750 750 pchange.oldPath = originalfname
751 751
752 752 if originalfname in removed:
753 753 origpchange = phabchange(
754 754 currentPath=originalfname,
755 755 oldPath=originalfname,
756 756 type=DiffChangeType.MOVE_AWAY,
757 757 awayPaths=[fname],
758 758 )
759 759 movedchanges[originalfname] = origpchange
760 760 removed.remove(originalfname)
761 761 pchange.type = DiffChangeType.MOVE_HERE
762 762 elif originalfname in movedchanges:
763 763 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
764 764 movedchanges[originalfname].awayPaths.append(fname)
765 765 pchange.type = DiffChangeType.COPY_HERE
766 766 else: # pure copy
767 767 if originalfname not in copiedchanges:
768 768 origpchange = phabchange(
769 769 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
770 770 )
771 771 copiedchanges[originalfname] = origpchange
772 772 else:
773 773 origpchange = copiedchanges[originalfname]
774 774 origpchange.awayPaths.append(fname)
775 775 pchange.type = DiffChangeType.COPY_HERE
776 776
777 777 if filemode != originalmode:
778 778 pchange.addoldmode(originalmode)
779 779 pchange.addnewmode(filemode)
780 780 else: # Brand-new file
781 781 pchange.addnewmode(gitmode[fctx.flags()])
782 782 pchange.type = DiffChangeType.ADD
783 783
784 784 if fctx.isbinary():
785 785 makebinary(pchange, fctx)
786 786 if renamed:
787 787 addoldbinary(pchange, fctx, originalfname)
788 788 else:
789 789 maketext(pchange, ctx, fname)
790 790
791 791 pdiff.addchange(pchange)
792 792
793 793 for _path, copiedchange in copiedchanges.items():
794 794 pdiff.addchange(copiedchange)
795 795 for _path, movedchange in movedchanges.items():
796 796 pdiff.addchange(movedchange)
797 797
798 798
799 799 def creatediff(ctx):
800 800 """create a Differential Diff"""
801 801 repo = ctx.repo()
802 802 repophid = getrepophid(repo)
803 # Create a "Differential Diff" via "differential.createrawdiff" API
804 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
803 # Create a "Differential Diff" via "differential.creatediff" API
804 pdiff = phabdiff(
805 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
806 branch=b'%s' % ctx.branch(),
807 )
808 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
809 # addadded will remove moved files from removed, so addremoved won't get
810 # them
811 addadded(pdiff, ctx, added, removed)
812 addmodified(pdiff, ctx, modified)
813 addremoved(pdiff, ctx, removed)
805 814 if repophid:
806 params[b'repositoryPHID'] = repophid
807 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
815 pdiff.repositoryPHID = repophid
816 diff = callconduit(
817 repo.ui,
818 b'differential.creatediff',
819 pycompat.byteskwargs(attr.asdict(pdiff)),
820 )
808 821 if not diff:
809 822 raise error.Abort(_(b'cannot create diff for %s') % ctx)
810 823 return diff
811 824
812 825
813 826 def writediffproperties(ctx, diff):
814 827 """write metadata to diff so patches could be applied losslessly"""
828 # creatediff returns with a diffid but query returns with an id
829 diffid = diff.get(b'diffid', diff.get(b'id'))
815 830 params = {
816 b'diff_id': diff[b'id'],
831 b'diff_id': diffid,
817 832 b'name': b'hg:meta',
818 833 b'data': templatefilters.json(
819 834 {
820 835 b'user': ctx.user(),
821 836 b'date': b'%d %d' % ctx.date(),
822 837 b'branch': ctx.branch(),
823 838 b'node': ctx.hex(),
824 839 b'parent': ctx.p1().hex(),
825 840 }
826 841 ),
827 842 }
828 843 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
829 844
830 845 params = {
831 b'diff_id': diff[b'id'],
846 b'diff_id': diffid,
832 847 b'name': b'local:commits',
833 848 b'data': templatefilters.json(
834 849 {
835 850 ctx.hex(): {
836 851 b'author': stringutil.person(ctx.user()),
837 852 b'authorEmail': stringutil.email(ctx.user()),
838 853 b'time': int(ctx.date()[0]),
839 854 b'commit': ctx.hex(),
840 855 b'parents': [ctx.p1().hex()],
841 856 b'branch': ctx.branch(),
842 857 },
843 858 }
844 859 ),
845 860 }
846 861 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
847 862
848 863
849 864 def createdifferentialrevision(
850 865 ctx,
851 866 revid=None,
852 867 parentrevphid=None,
853 868 oldnode=None,
854 869 olddiff=None,
855 870 actions=None,
856 871 comment=None,
857 872 ):
858 873 """create or update a Differential Revision
859 874
860 875 If revid is None, create a new Differential Revision, otherwise update
861 876 revid. If parentrevphid is not None, set it as a dependency.
862 877
863 878 If oldnode is not None, check if the patch content (without commit message
864 879 and metadata) has changed before creating another diff.
865 880
866 881 If actions is not None, they will be appended to the transaction.
867 882 """
868 883 repo = ctx.repo()
869 884 if oldnode:
870 885 diffopts = mdiff.diffopts(git=True, context=32767)
871 886 oldctx = repo.unfiltered()[oldnode]
872 887 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
873 888 else:
874 889 neednewdiff = True
875 890
876 891 transactions = []
877 892 if neednewdiff:
878 893 diff = creatediff(ctx)
879 894 transactions.append({b'type': b'update', b'value': diff[b'phid']})
880 895 if comment:
881 896 transactions.append({b'type': b'comment', b'value': comment})
882 897 else:
883 898 # Even if we don't need to upload a new diff because the patch content
884 899 # does not change. We might still need to update its metadata so
885 900 # pushers could know the correct node metadata.
886 901 assert olddiff
887 902 diff = olddiff
888 903 writediffproperties(ctx, diff)
889 904
890 905 # Set the parent Revision every time, so commit re-ordering is picked-up
891 906 if parentrevphid:
892 907 transactions.append(
893 908 {b'type': b'parents.set', b'value': [parentrevphid]}
894 909 )
895 910
896 911 if actions:
897 912 transactions += actions
898 913
899 914 # Parse commit message and update related fields.
900 915 desc = ctx.description()
901 916 info = callconduit(
902 917 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
903 918 )
904 919 for k, v in info[b'fields'].items():
905 920 if k in [b'title', b'summary', b'testPlan']:
906 921 transactions.append({b'type': k, b'value': v})
907 922
908 923 params = {b'transactions': transactions}
909 924 if revid is not None:
910 925 # Update an existing Differential Revision
911 926 params[b'objectIdentifier'] = revid
912 927
913 928 revision = callconduit(repo.ui, b'differential.revision.edit', params)
914 929 if not revision:
915 930 raise error.Abort(_(b'cannot create revision for %s') % ctx)
916 931
917 932 return revision, diff
918 933
919 934
920 935 def userphids(repo, names):
921 936 """convert user names to PHIDs"""
922 937 names = [name.lower() for name in names]
923 938 query = {b'constraints': {b'usernames': names}}
924 939 result = callconduit(repo.ui, b'user.search', query)
925 940 # username not found is not an error of the API. So check if we have missed
926 941 # some names here.
927 942 data = result[b'data']
928 943 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
929 944 unresolved = set(names) - resolved
930 945 if unresolved:
931 946 raise error.Abort(
932 947 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
933 948 )
934 949 return [entry[b'phid'] for entry in data]
935 950
936 951
937 952 @vcrcommand(
938 953 b'phabsend',
939 954 [
940 955 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
941 956 (b'', b'amend', True, _(b'update commit messages')),
942 957 (b'', b'reviewer', [], _(b'specify reviewers')),
943 958 (b'', b'blocker', [], _(b'specify blocking reviewers')),
944 959 (
945 960 b'm',
946 961 b'comment',
947 962 b'',
948 963 _(b'add a comment to Revisions with new/updated Diffs'),
949 964 ),
950 965 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
951 966 ],
952 967 _(b'REV [OPTIONS]'),
953 968 helpcategory=command.CATEGORY_IMPORT_EXPORT,
954 969 )
955 970 def phabsend(ui, repo, *revs, **opts):
956 971 """upload changesets to Phabricator
957 972
958 973 If there are multiple revisions specified, they will be send as a stack
959 974 with a linear dependencies relationship using the order specified by the
960 975 revset.
961 976
962 977 For the first time uploading changesets, local tags will be created to
963 978 maintain the association. After the first time, phabsend will check
964 979 obsstore and tags information so it can figure out whether to update an
965 980 existing Differential Revision, or create a new one.
966 981
967 982 If --amend is set, update commit messages so they have the
968 983 ``Differential Revision`` URL, remove related tags. This is similar to what
969 984 arcanist will do, and is more desired in author-push workflows. Otherwise,
970 985 use local tags to record the ``Differential Revision`` association.
971 986
972 987 The --confirm option lets you confirm changesets before sending them. You
973 988 can also add following to your configuration file to make it default
974 989 behaviour::
975 990
976 991 [phabsend]
977 992 confirm = true
978 993
979 994 phabsend will check obsstore and the above association to decide whether to
980 995 update an existing Differential Revision, or create a new one.
981 996 """
982 997 opts = pycompat.byteskwargs(opts)
983 998 revs = list(revs) + opts.get(b'rev', [])
984 999 revs = scmutil.revrange(repo, revs)
985 1000
986 1001 if not revs:
987 1002 raise error.Abort(_(b'phabsend requires at least one changeset'))
988 1003 if opts.get(b'amend'):
989 1004 cmdutil.checkunfinished(repo)
990 1005
991 1006 # {newnode: (oldnode, olddiff, olddrev}
992 1007 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
993 1008
994 1009 confirm = ui.configbool(b'phabsend', b'confirm')
995 1010 confirm |= bool(opts.get(b'confirm'))
996 1011 if confirm:
997 1012 confirmed = _confirmbeforesend(repo, revs, oldmap)
998 1013 if not confirmed:
999 1014 raise error.Abort(_(b'phabsend cancelled'))
1000 1015
1001 1016 actions = []
1002 1017 reviewers = opts.get(b'reviewer', [])
1003 1018 blockers = opts.get(b'blocker', [])
1004 1019 phids = []
1005 1020 if reviewers:
1006 1021 phids.extend(userphids(repo, reviewers))
1007 1022 if blockers:
1008 1023 phids.extend(
1009 1024 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1010 1025 )
1011 1026 if phids:
1012 1027 actions.append({b'type': b'reviewers.add', b'value': phids})
1013 1028
1014 1029 drevids = [] # [int]
1015 1030 diffmap = {} # {newnode: diff}
1016 1031
1017 1032 # Send patches one by one so we know their Differential Revision PHIDs and
1018 1033 # can provide dependency relationship
1019 1034 lastrevphid = None
1020 1035 for rev in revs:
1021 1036 ui.debug(b'sending rev %d\n' % rev)
1022 1037 ctx = repo[rev]
1023 1038
1024 1039 # Get Differential Revision ID
1025 1040 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1026 1041 if oldnode != ctx.node() or opts.get(b'amend'):
1027 1042 # Create or update Differential Revision
1028 1043 revision, diff = createdifferentialrevision(
1029 1044 ctx,
1030 1045 revid,
1031 1046 lastrevphid,
1032 1047 oldnode,
1033 1048 olddiff,
1034 1049 actions,
1035 1050 opts.get(b'comment'),
1036 1051 )
1037 1052 diffmap[ctx.node()] = diff
1038 1053 newrevid = int(revision[b'object'][b'id'])
1039 1054 newrevphid = revision[b'object'][b'phid']
1040 1055 if revid:
1041 1056 action = b'updated'
1042 1057 else:
1043 1058 action = b'created'
1044 1059
1045 1060 # Create a local tag to note the association, if commit message
1046 1061 # does not have it already
1047 1062 m = _differentialrevisiondescre.search(ctx.description())
1048 1063 if not m or int(m.group(r'id')) != newrevid:
1049 1064 tagname = b'D%d' % newrevid
1050 1065 tags.tag(
1051 1066 repo,
1052 1067 tagname,
1053 1068 ctx.node(),
1054 1069 message=None,
1055 1070 user=None,
1056 1071 date=None,
1057 1072 local=True,
1058 1073 )
1059 1074 else:
1060 1075 # Nothing changed. But still set "newrevphid" so the next revision
1061 1076 # could depend on this one and "newrevid" for the summary line.
1062 1077 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1063 1078 newrevid = revid
1064 1079 action = b'skipped'
1065 1080
1066 1081 actiondesc = ui.label(
1067 1082 {
1068 1083 b'created': _(b'created'),
1069 1084 b'skipped': _(b'skipped'),
1070 1085 b'updated': _(b'updated'),
1071 1086 }[action],
1072 1087 b'phabricator.action.%s' % action,
1073 1088 )
1074 1089 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1075 1090 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1076 1091 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1077 1092 ui.write(
1078 1093 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1079 1094 )
1080 1095 drevids.append(newrevid)
1081 1096 lastrevphid = newrevphid
1082 1097
1083 1098 # Update commit messages and remove tags
1084 1099 if opts.get(b'amend'):
1085 1100 unfi = repo.unfiltered()
1086 1101 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1087 1102 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1088 1103 wnode = unfi[b'.'].node()
1089 1104 mapping = {} # {oldnode: [newnode]}
1090 1105 for i, rev in enumerate(revs):
1091 1106 old = unfi[rev]
1092 1107 drevid = drevids[i]
1093 1108 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1094 1109 newdesc = getdescfromdrev(drev)
1095 1110 # Make sure commit message contain "Differential Revision"
1096 1111 if old.description() != newdesc:
1097 1112 if old.phase() == phases.public:
1098 1113 ui.warn(
1099 1114 _(b"warning: not updating public commit %s\n")
1100 1115 % scmutil.formatchangeid(old)
1101 1116 )
1102 1117 continue
1103 1118 parents = [
1104 1119 mapping.get(old.p1().node(), (old.p1(),))[0],
1105 1120 mapping.get(old.p2().node(), (old.p2(),))[0],
1106 1121 ]
1107 1122 new = context.metadataonlyctx(
1108 1123 repo,
1109 1124 old,
1110 1125 parents=parents,
1111 1126 text=newdesc,
1112 1127 user=old.user(),
1113 1128 date=old.date(),
1114 1129 extra=old.extra(),
1115 1130 )
1116 1131
1117 1132 newnode = new.commit()
1118 1133
1119 1134 mapping[old.node()] = [newnode]
1120 1135 # Update diff property
1121 1136 # If it fails just warn and keep going, otherwise the DREV
1122 1137 # associations will be lost
1123 1138 try:
1124 1139 writediffproperties(unfi[newnode], diffmap[old.node()])
1125 1140 except util.urlerr.urlerror:
1126 1141 ui.warnnoi18n(
1127 1142 b'Failed to update metadata for D%s\n' % drevid
1128 1143 )
1129 1144 # Remove local tags since it's no longer necessary
1130 1145 tagname = b'D%d' % drevid
1131 1146 if tagname in repo.tags():
1132 1147 tags.tag(
1133 1148 repo,
1134 1149 tagname,
1135 1150 nullid,
1136 1151 message=None,
1137 1152 user=None,
1138 1153 date=None,
1139 1154 local=True,
1140 1155 )
1141 1156 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1142 1157 if wnode in mapping:
1143 1158 unfi.setparents(mapping[wnode][0])
1144 1159
1145 1160
1146 1161 # Map from "hg:meta" keys to header understood by "hg import". The order is
1147 1162 # consistent with "hg export" output.
1148 1163 _metanamemap = util.sortdict(
1149 1164 [
1150 1165 (b'user', b'User'),
1151 1166 (b'date', b'Date'),
1152 1167 (b'branch', b'Branch'),
1153 1168 (b'node', b'Node ID'),
1154 1169 (b'parent', b'Parent '),
1155 1170 ]
1156 1171 )
1157 1172
1158 1173
1159 1174 def _confirmbeforesend(repo, revs, oldmap):
1160 1175 url, token = readurltoken(repo.ui)
1161 1176 ui = repo.ui
1162 1177 for rev in revs:
1163 1178 ctx = repo[rev]
1164 1179 desc = ctx.description().splitlines()[0]
1165 1180 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1166 1181 if drevid:
1167 1182 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1168 1183 else:
1169 1184 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1170 1185
1171 1186 ui.write(
1172 1187 _(b'%s - %s: %s\n')
1173 1188 % (
1174 1189 drevdesc,
1175 1190 ui.label(bytes(ctx), b'phabricator.node'),
1176 1191 ui.label(desc, b'phabricator.desc'),
1177 1192 )
1178 1193 )
1179 1194
1180 1195 if ui.promptchoice(
1181 1196 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1182 1197 ):
1183 1198 return False
1184 1199
1185 1200 return True
1186 1201
1187 1202
1188 1203 _knownstatusnames = {
1189 1204 b'accepted',
1190 1205 b'needsreview',
1191 1206 b'needsrevision',
1192 1207 b'closed',
1193 1208 b'abandoned',
1194 1209 }
1195 1210
1196 1211
1197 1212 def _getstatusname(drev):
1198 1213 """get normalized status name from a Differential Revision"""
1199 1214 return drev[b'statusName'].replace(b' ', b'').lower()
1200 1215
1201 1216
1202 1217 # Small language to specify differential revisions. Support symbols: (), :X,
1203 1218 # +, and -.
1204 1219
1205 1220 _elements = {
1206 1221 # token-type: binding-strength, primary, prefix, infix, suffix
1207 1222 b'(': (12, None, (b'group', 1, b')'), None, None),
1208 1223 b':': (8, None, (b'ancestors', 8), None, None),
1209 1224 b'&': (5, None, None, (b'and_', 5), None),
1210 1225 b'+': (4, None, None, (b'add', 4), None),
1211 1226 b'-': (4, None, None, (b'sub', 4), None),
1212 1227 b')': (0, None, None, None, None),
1213 1228 b'symbol': (0, b'symbol', None, None, None),
1214 1229 b'end': (0, None, None, None, None),
1215 1230 }
1216 1231
1217 1232
1218 1233 def _tokenize(text):
1219 1234 view = memoryview(text) # zero-copy slice
1220 1235 special = b'():+-& '
1221 1236 pos = 0
1222 1237 length = len(text)
1223 1238 while pos < length:
1224 1239 symbol = b''.join(
1225 1240 itertools.takewhile(
1226 1241 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1227 1242 )
1228 1243 )
1229 1244 if symbol:
1230 1245 yield (b'symbol', symbol, pos)
1231 1246 pos += len(symbol)
1232 1247 else: # special char, ignore space
1233 1248 if text[pos] != b' ':
1234 1249 yield (text[pos], None, pos)
1235 1250 pos += 1
1236 1251 yield (b'end', None, pos)
1237 1252
1238 1253
1239 1254 def _parse(text):
1240 1255 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1241 1256 if pos != len(text):
1242 1257 raise error.ParseError(b'invalid token', pos)
1243 1258 return tree
1244 1259
1245 1260
1246 1261 def _parsedrev(symbol):
1247 1262 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1248 1263 if symbol.startswith(b'D') and symbol[1:].isdigit():
1249 1264 return int(symbol[1:])
1250 1265 if symbol.isdigit():
1251 1266 return int(symbol)
1252 1267
1253 1268
1254 1269 def _prefetchdrevs(tree):
1255 1270 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1256 1271 drevs = set()
1257 1272 ancestordrevs = set()
1258 1273 op = tree[0]
1259 1274 if op == b'symbol':
1260 1275 r = _parsedrev(tree[1])
1261 1276 if r:
1262 1277 drevs.add(r)
1263 1278 elif op == b'ancestors':
1264 1279 r, a = _prefetchdrevs(tree[1])
1265 1280 drevs.update(r)
1266 1281 ancestordrevs.update(r)
1267 1282 ancestordrevs.update(a)
1268 1283 else:
1269 1284 for t in tree[1:]:
1270 1285 r, a = _prefetchdrevs(t)
1271 1286 drevs.update(r)
1272 1287 ancestordrevs.update(a)
1273 1288 return drevs, ancestordrevs
1274 1289
1275 1290
1276 1291 def querydrev(repo, spec):
1277 1292 """return a list of "Differential Revision" dicts
1278 1293
1279 1294 spec is a string using a simple query language, see docstring in phabread
1280 1295 for details.
1281 1296
1282 1297 A "Differential Revision dict" looks like:
1283 1298
1284 1299 {
1285 1300 "id": "2",
1286 1301 "phid": "PHID-DREV-672qvysjcczopag46qty",
1287 1302 "title": "example",
1288 1303 "uri": "https://phab.example.com/D2",
1289 1304 "dateCreated": "1499181406",
1290 1305 "dateModified": "1499182103",
1291 1306 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1292 1307 "status": "0",
1293 1308 "statusName": "Needs Review",
1294 1309 "properties": [],
1295 1310 "branch": null,
1296 1311 "summary": "",
1297 1312 "testPlan": "",
1298 1313 "lineCount": "2",
1299 1314 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1300 1315 "diffs": [
1301 1316 "3",
1302 1317 "4",
1303 1318 ],
1304 1319 "commits": [],
1305 1320 "reviewers": [],
1306 1321 "ccs": [],
1307 1322 "hashes": [],
1308 1323 "auxiliary": {
1309 1324 "phabricator:projects": [],
1310 1325 "phabricator:depends-on": [
1311 1326 "PHID-DREV-gbapp366kutjebt7agcd"
1312 1327 ]
1313 1328 },
1314 1329 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1315 1330 "sourcePath": null
1316 1331 }
1317 1332 """
1318 1333
1319 1334 def fetch(params):
1320 1335 """params -> single drev or None"""
1321 1336 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1322 1337 if key in prefetched:
1323 1338 return prefetched[key]
1324 1339 drevs = callconduit(repo.ui, b'differential.query', params)
1325 1340 # Fill prefetched with the result
1326 1341 for drev in drevs:
1327 1342 prefetched[drev[b'phid']] = drev
1328 1343 prefetched[int(drev[b'id'])] = drev
1329 1344 if key not in prefetched:
1330 1345 raise error.Abort(
1331 1346 _(b'cannot get Differential Revision %r') % params
1332 1347 )
1333 1348 return prefetched[key]
1334 1349
1335 1350 def getstack(topdrevids):
1336 1351 """given a top, get a stack from the bottom, [id] -> [id]"""
1337 1352 visited = set()
1338 1353 result = []
1339 1354 queue = [{b'ids': [i]} for i in topdrevids]
1340 1355 while queue:
1341 1356 params = queue.pop()
1342 1357 drev = fetch(params)
1343 1358 if drev[b'id'] in visited:
1344 1359 continue
1345 1360 visited.add(drev[b'id'])
1346 1361 result.append(int(drev[b'id']))
1347 1362 auxiliary = drev.get(b'auxiliary', {})
1348 1363 depends = auxiliary.get(b'phabricator:depends-on', [])
1349 1364 for phid in depends:
1350 1365 queue.append({b'phids': [phid]})
1351 1366 result.reverse()
1352 1367 return smartset.baseset(result)
1353 1368
1354 1369 # Initialize prefetch cache
1355 1370 prefetched = {} # {id or phid: drev}
1356 1371
1357 1372 tree = _parse(spec)
1358 1373 drevs, ancestordrevs = _prefetchdrevs(tree)
1359 1374
1360 1375 # developer config: phabricator.batchsize
1361 1376 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1362 1377
1363 1378 # Prefetch Differential Revisions in batch
1364 1379 tofetch = set(drevs)
1365 1380 for r in ancestordrevs:
1366 1381 tofetch.update(range(max(1, r - batchsize), r + 1))
1367 1382 if drevs:
1368 1383 fetch({b'ids': list(tofetch)})
1369 1384 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1370 1385
1371 1386 # Walk through the tree, return smartsets
1372 1387 def walk(tree):
1373 1388 op = tree[0]
1374 1389 if op == b'symbol':
1375 1390 drev = _parsedrev(tree[1])
1376 1391 if drev:
1377 1392 return smartset.baseset([drev])
1378 1393 elif tree[1] in _knownstatusnames:
1379 1394 drevs = [
1380 1395 r
1381 1396 for r in validids
1382 1397 if _getstatusname(prefetched[r]) == tree[1]
1383 1398 ]
1384 1399 return smartset.baseset(drevs)
1385 1400 else:
1386 1401 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1387 1402 elif op in {b'and_', b'add', b'sub'}:
1388 1403 assert len(tree) == 3
1389 1404 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1390 1405 elif op == b'group':
1391 1406 return walk(tree[1])
1392 1407 elif op == b'ancestors':
1393 1408 return getstack(walk(tree[1]))
1394 1409 else:
1395 1410 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1396 1411
1397 1412 return [prefetched[r] for r in walk(tree)]
1398 1413
1399 1414
1400 1415 def getdescfromdrev(drev):
1401 1416 """get description (commit message) from "Differential Revision"
1402 1417
1403 1418 This is similar to differential.getcommitmessage API. But we only care
1404 1419 about limited fields: title, summary, test plan, and URL.
1405 1420 """
1406 1421 title = drev[b'title']
1407 1422 summary = drev[b'summary'].rstrip()
1408 1423 testplan = drev[b'testPlan'].rstrip()
1409 1424 if testplan:
1410 1425 testplan = b'Test Plan:\n%s' % testplan
1411 1426 uri = b'Differential Revision: %s' % drev[b'uri']
1412 1427 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1413 1428
1414 1429
1415 1430 def getdiffmeta(diff):
1416 1431 """get commit metadata (date, node, user, p1) from a diff object
1417 1432
1418 1433 The metadata could be "hg:meta", sent by phabsend, like:
1419 1434
1420 1435 "properties": {
1421 1436 "hg:meta": {
1422 1437 "date": "1499571514 25200",
1423 1438 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1424 1439 "user": "Foo Bar <foo@example.com>",
1425 1440 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1426 1441 }
1427 1442 }
1428 1443
1429 1444 Or converted from "local:commits", sent by "arc", like:
1430 1445
1431 1446 "properties": {
1432 1447 "local:commits": {
1433 1448 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1434 1449 "author": "Foo Bar",
1435 1450 "time": 1499546314,
1436 1451 "branch": "default",
1437 1452 "tag": "",
1438 1453 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1439 1454 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1440 1455 "local": "1000",
1441 1456 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1442 1457 "summary": "...",
1443 1458 "message": "...",
1444 1459 "authorEmail": "foo@example.com"
1445 1460 }
1446 1461 }
1447 1462 }
1448 1463
1449 1464 Note: metadata extracted from "local:commits" will lose time zone
1450 1465 information.
1451 1466 """
1452 1467 props = diff.get(b'properties') or {}
1453 1468 meta = props.get(b'hg:meta')
1454 1469 if not meta:
1455 1470 if props.get(b'local:commits'):
1456 1471 commit = sorted(props[b'local:commits'].values())[0]
1457 1472 meta = {}
1458 1473 if b'author' in commit and b'authorEmail' in commit:
1459 1474 meta[b'user'] = b'%s <%s>' % (
1460 1475 commit[b'author'],
1461 1476 commit[b'authorEmail'],
1462 1477 )
1463 1478 if b'time' in commit:
1464 1479 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1465 1480 if b'branch' in commit:
1466 1481 meta[b'branch'] = commit[b'branch']
1467 1482 node = commit.get(b'commit', commit.get(b'rev'))
1468 1483 if node:
1469 1484 meta[b'node'] = node
1470 1485 if len(commit.get(b'parents', ())) >= 1:
1471 1486 meta[b'parent'] = commit[b'parents'][0]
1472 1487 else:
1473 1488 meta = {}
1474 1489 if b'date' not in meta and b'dateCreated' in diff:
1475 1490 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1476 1491 if b'branch' not in meta and diff.get(b'branch'):
1477 1492 meta[b'branch'] = diff[b'branch']
1478 1493 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1479 1494 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1480 1495 return meta
1481 1496
1482 1497
1483 1498 def readpatch(repo, drevs, write):
1484 1499 """generate plain-text patch readable by 'hg import'
1485 1500
1486 1501 write is usually ui.write. drevs is what "querydrev" returns, results of
1487 1502 "differential.query".
1488 1503 """
1489 1504 # Prefetch hg:meta property for all diffs
1490 1505 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1491 1506 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1492 1507
1493 1508 # Generate patch for each drev
1494 1509 for drev in drevs:
1495 1510 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1496 1511
1497 1512 diffid = max(int(v) for v in drev[b'diffs'])
1498 1513 body = callconduit(
1499 1514 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1500 1515 )
1501 1516 desc = getdescfromdrev(drev)
1502 1517 header = b'# HG changeset patch\n'
1503 1518
1504 1519 # Try to preserve metadata from hg:meta property. Write hg patch
1505 1520 # headers that can be read by the "import" command. See patchheadermap
1506 1521 # and extract in mercurial/patch.py for supported headers.
1507 1522 meta = getdiffmeta(diffs[b'%d' % diffid])
1508 1523 for k in _metanamemap.keys():
1509 1524 if k in meta:
1510 1525 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1511 1526
1512 1527 content = b'%s%s\n%s' % (header, desc, body)
1513 1528 write(content)
1514 1529
1515 1530
1516 1531 @vcrcommand(
1517 1532 b'phabread',
1518 1533 [(b'', b'stack', False, _(b'read dependencies'))],
1519 1534 _(b'DREVSPEC [OPTIONS]'),
1520 1535 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1521 1536 )
1522 1537 def phabread(ui, repo, spec, **opts):
1523 1538 """print patches from Phabricator suitable for importing
1524 1539
1525 1540 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1526 1541 the number ``123``. It could also have common operators like ``+``, ``-``,
1527 1542 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1528 1543 select a stack.
1529 1544
1530 1545 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1531 1546 could be used to filter patches by status. For performance reason, they
1532 1547 only represent a subset of non-status selections and cannot be used alone.
1533 1548
1534 1549 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1535 1550 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1536 1551 stack up to D9.
1537 1552
1538 1553 If --stack is given, follow dependencies information and read all patches.
1539 1554 It is equivalent to the ``:`` operator.
1540 1555 """
1541 1556 opts = pycompat.byteskwargs(opts)
1542 1557 if opts.get(b'stack'):
1543 1558 spec = b':(%s)' % spec
1544 1559 drevs = querydrev(repo, spec)
1545 1560 readpatch(repo, drevs, ui.write)
1546 1561
1547 1562
1548 1563 @vcrcommand(
1549 1564 b'phabupdate',
1550 1565 [
1551 1566 (b'', b'accept', False, _(b'accept revisions')),
1552 1567 (b'', b'reject', False, _(b'reject revisions')),
1553 1568 (b'', b'abandon', False, _(b'abandon revisions')),
1554 1569 (b'', b'reclaim', False, _(b'reclaim revisions')),
1555 1570 (b'm', b'comment', b'', _(b'comment on the last revision')),
1556 1571 ],
1557 1572 _(b'DREVSPEC [OPTIONS]'),
1558 1573 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1559 1574 )
1560 1575 def phabupdate(ui, repo, spec, **opts):
1561 1576 """update Differential Revision in batch
1562 1577
1563 1578 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1564 1579 """
1565 1580 opts = pycompat.byteskwargs(opts)
1566 1581 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1567 1582 if len(flags) > 1:
1568 1583 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1569 1584
1570 1585 actions = []
1571 1586 for f in flags:
1572 1587 actions.append({b'type': f, b'value': b'true'})
1573 1588
1574 1589 drevs = querydrev(repo, spec)
1575 1590 for i, drev in enumerate(drevs):
1576 1591 if i + 1 == len(drevs) and opts.get(b'comment'):
1577 1592 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1578 1593 if actions:
1579 1594 params = {
1580 1595 b'objectIdentifier': drev[b'phid'],
1581 1596 b'transactions': actions,
1582 1597 }
1583 1598 callconduit(ui, b'differential.revision.edit', params)
1584 1599
1585 1600
1586 1601 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1587 1602 def template_review(context, mapping):
1588 1603 """:phabreview: Object describing the review for this changeset.
1589 1604 Has attributes `url` and `id`.
1590 1605 """
1591 1606 ctx = context.resource(mapping, b'ctx')
1592 1607 m = _differentialrevisiondescre.search(ctx.description())
1593 1608 if m:
1594 1609 return templateutil.hybriddict(
1595 1610 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1596 1611 )
1597 1612 else:
1598 1613 tags = ctx.repo().nodetags(ctx.node())
1599 1614 for t in tags:
1600 1615 if _differentialrevisiontagre.match(t):
1601 1616 url = ctx.repo().ui.config(b'phabricator', b'url')
1602 1617 if not url.endswith(b'/'):
1603 1618 url += b'/'
1604 1619 url += t
1605 1620
1606 1621 return templateutil.hybriddict({b'url': url, b'id': t,})
1607 1622 return None
General Comments 0
You need to be logged in to leave comments. Login now