##// END OF EJS Templates
phabricator: update hgmatcher to cope with the new data format...
Ian Moody -
r43558:a4da1c3b default
parent child Browse files
Show More
@@ -1,1639 +1,1651 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 46 import hashlib
47 47 import itertools
48 48 import json
49 49 import mimetypes
50 50 import operator
51 51 import re
52 52
53 53 from mercurial.node import bin, nullid
54 54 from mercurial.i18n import _
55 55 from mercurial.pycompat import getattr
56 56 from mercurial.thirdparty import attr
57 57 from mercurial import (
58 58 cmdutil,
59 59 context,
60 60 encoding,
61 61 error,
62 62 exthelper,
63 63 httpconnection as httpconnectionmod,
64 64 match,
65 65 mdiff,
66 66 obsutil,
67 67 parser,
68 68 patch,
69 69 phases,
70 70 pycompat,
71 71 scmutil,
72 72 smartset,
73 73 tags,
74 74 templatefilters,
75 75 templateutil,
76 76 url as urlmod,
77 77 util,
78 78 )
79 79 from mercurial.utils import (
80 80 procutil,
81 81 stringutil,
82 82 )
83 83
84 84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 86 # be specifying the version(s) of Mercurial they are tested with, or
87 87 # leave the attribute unspecified.
88 88 testedwith = b'ships-with-hg-core'
89 89
90 90 eh = exthelper.exthelper()
91 91
92 92 cmdtable = eh.cmdtable
93 93 command = eh.command
94 94 configtable = eh.configtable
95 95 templatekeyword = eh.templatekeyword
96 96
97 97 # developer config: phabricator.batchsize
98 98 eh.configitem(
99 99 b'phabricator', b'batchsize', default=12,
100 100 )
101 101 eh.configitem(
102 102 b'phabricator', b'callsign', default=None,
103 103 )
104 104 eh.configitem(
105 105 b'phabricator', b'curlcmd', default=None,
106 106 )
107 107 # developer config: phabricator.repophid
108 108 eh.configitem(
109 109 b'phabricator', b'repophid', default=None,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'url', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabsend', b'confirm', default=False,
116 116 )
117 117
118 118 colortable = {
119 119 b'phabricator.action.created': b'green',
120 120 b'phabricator.action.skipped': b'magenta',
121 121 b'phabricator.action.updated': b'magenta',
122 122 b'phabricator.desc': b'',
123 123 b'phabricator.drev': b'bold',
124 124 b'phabricator.node': b'',
125 125 }
126 126
127 127 _VCR_FLAGS = [
128 128 (
129 129 b'',
130 130 b'test-vcr',
131 131 b'',
132 132 _(
133 133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 134 b', otherwise will mock all http requests using the specified vcr file.'
135 135 b' (ADVANCED)'
136 136 ),
137 137 ),
138 138 ]
139 139
140 140
141 141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 142 fullflags = flags + _VCR_FLAGS
143 143
144 144 def hgmatcher(r1, r2):
145 145 if r1.uri != r2.uri or r1.method != r2.method:
146 146 return False
147 r1params = r1.body.split(b'&')
148 r2params = r2.body.split(b'&')
149 return set(r1params) == set(r2params)
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
150 if key not in r2params:
151 return False
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = json.loads(value)
156 r2json = json.loads(r2params[key][0])
157 if r1json != r2json:
158 return False
159 elif r2params[key][0] != value:
160 return False
161 return True
150 162
151 163 def sanitiserequest(request):
152 164 request.body = re.sub(
153 165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 166 )
155 167 return request
156 168
157 169 def sanitiseresponse(response):
158 170 if r'set-cookie' in response[r'headers']:
159 171 del response[r'headers'][r'set-cookie']
160 172 return response
161 173
162 174 def decorate(fn):
163 175 def inner(*args, **kwargs):
164 176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 177 if cassette:
166 178 import hgdemandimport
167 179
168 180 with hgdemandimport.deactivated():
169 181 import vcr as vcrmod
170 182 import vcr.stubs as stubs
171 183
172 184 vcr = vcrmod.VCR(
173 185 serializer=r'json',
174 186 before_record_request=sanitiserequest,
175 187 before_record_response=sanitiseresponse,
176 188 custom_patches=[
177 189 (
178 190 urlmod,
179 191 r'httpconnection',
180 192 stubs.VCRHTTPConnection,
181 193 ),
182 194 (
183 195 urlmod,
184 196 r'httpsconnection',
185 197 stubs.VCRHTTPSConnection,
186 198 ),
187 199 ],
188 200 )
189 201 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 203 return fn(*args, **kwargs)
192 204 return fn(*args, **kwargs)
193 205
194 206 inner.__name__ = fn.__name__
195 207 inner.__doc__ = fn.__doc__
196 208 return command(
197 209 name,
198 210 fullflags,
199 211 spec,
200 212 helpcategory=helpcategory,
201 213 optionalrepo=optionalrepo,
202 214 )(inner)
203 215
204 216 return decorate
205 217
206 218
207 219 def urlencodenested(params):
208 220 """like urlencode, but works with nested parameters.
209 221
210 222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 225 """
214 226 flatparams = util.sortdict()
215 227
216 228 def process(prefix, obj):
217 229 if isinstance(obj, bool):
218 230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 233 if items is None:
222 234 flatparams[prefix] = obj
223 235 else:
224 236 for k, v in items(obj):
225 237 if prefix:
226 238 process(b'%s[%s]' % (prefix, k), v)
227 239 else:
228 240 process(k, v)
229 241
230 242 process(b'', params)
231 243 return util.urlreq.urlencode(flatparams)
232 244
233 245
234 246 def readurltoken(ui):
235 247 """return conduit url, token and make sure they exist
236 248
237 249 Currently read from [auth] config section. In the future, it might
238 250 make sense to read from .arcconfig and .arcrc as well.
239 251 """
240 252 url = ui.config(b'phabricator', b'url')
241 253 if not url:
242 254 raise error.Abort(
243 255 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 256 )
245 257
246 258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 259 token = None
248 260
249 261 if res:
250 262 group, auth = res
251 263
252 264 ui.debug(b"using auth.%s.* for authentication\n" % group)
253 265
254 266 token = auth.get(b'phabtoken')
255 267
256 268 if not token:
257 269 raise error.Abort(
258 270 _(b'Can\'t find conduit token associated to %s') % (url,)
259 271 )
260 272
261 273 return url, token
262 274
263 275
264 276 def callconduit(ui, name, params):
265 277 """call Conduit API, params is a dict. return json.loads result, or None"""
266 278 host, token = readurltoken(ui)
267 279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 281 params = params.copy()
270 282 params[b'__conduit__'] = {
271 283 b'token': token,
272 284 }
273 285 rawdata = {
274 286 b'params': templatefilters.json(params),
275 287 b'output': b'json',
276 288 b'__conduit__': 1,
277 289 }
278 290 data = urlencodenested(rawdata)
279 291 curlcmd = ui.config(b'phabricator', b'curlcmd')
280 292 if curlcmd:
281 293 sin, sout = procutil.popen2(
282 294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
283 295 )
284 296 sin.write(data)
285 297 sin.close()
286 298 body = sout.read()
287 299 else:
288 300 urlopener = urlmod.opener(ui, authinfo)
289 301 request = util.urlreq.request(pycompat.strurl(url), data=data)
290 302 with contextlib.closing(urlopener.open(request)) as rsp:
291 303 body = rsp.read()
292 304 ui.debug(b'Conduit Response: %s\n' % body)
293 305 parsed = pycompat.rapply(
294 306 lambda x: encoding.unitolocal(x)
295 307 if isinstance(x, pycompat.unicode)
296 308 else x,
297 309 # json.loads only accepts bytes from py3.6+
298 310 json.loads(encoding.unifromlocal(body)),
299 311 )
300 312 if parsed.get(b'error_code'):
301 313 msg = _(b'Conduit Error (%s): %s') % (
302 314 parsed[b'error_code'],
303 315 parsed[b'error_info'],
304 316 )
305 317 raise error.Abort(msg)
306 318 return parsed[b'result']
307 319
308 320
309 321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
310 322 def debugcallconduit(ui, repo, name):
311 323 """call Conduit API
312 324
313 325 Call parameters are read from stdin as a JSON blob. Result will be written
314 326 to stdout as a JSON blob.
315 327 """
316 328 # json.loads only accepts bytes from 3.6+
317 329 rawparams = encoding.unifromlocal(ui.fin.read())
318 330 # json.loads only returns unicode strings
319 331 params = pycompat.rapply(
320 332 lambda x: encoding.unitolocal(x)
321 333 if isinstance(x, pycompat.unicode)
322 334 else x,
323 335 json.loads(rawparams),
324 336 )
325 337 # json.dumps only accepts unicode strings
326 338 result = pycompat.rapply(
327 339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
328 340 callconduit(ui, name, params),
329 341 )
330 342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
331 343 ui.write(b'%s\n' % encoding.unitolocal(s))
332 344
333 345
334 346 def getrepophid(repo):
335 347 """given callsign, return repository PHID or None"""
336 348 # developer config: phabricator.repophid
337 349 repophid = repo.ui.config(b'phabricator', b'repophid')
338 350 if repophid:
339 351 return repophid
340 352 callsign = repo.ui.config(b'phabricator', b'callsign')
341 353 if not callsign:
342 354 return None
343 355 query = callconduit(
344 356 repo.ui,
345 357 b'diffusion.repository.search',
346 358 {b'constraints': {b'callsigns': [callsign]}},
347 359 )
348 360 if len(query[b'data']) == 0:
349 361 return None
350 362 repophid = query[b'data'][0][b'phid']
351 363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
352 364 return repophid
353 365
354 366
355 367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
356 368 _differentialrevisiondescre = re.compile(
357 369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
358 370 )
359 371
360 372
361 373 def getoldnodedrevmap(repo, nodelist):
362 374 """find previous nodes that has been sent to Phabricator
363 375
364 376 return {node: (oldnode, Differential diff, Differential Revision ID)}
365 377 for node in nodelist with known previous sent versions, or associated
366 378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
367 379 be ``None``.
368 380
369 381 Examines commit messages like "Differential Revision:" to get the
370 382 association information.
371 383
372 384 If such commit message line is not found, examines all precursors and their
373 385 tags. Tags with format like "D1234" are considered a match and the node
374 386 with that tag, and the number after "D" (ex. 1234) will be returned.
375 387
376 388 The ``old node``, if not None, is guaranteed to be the last diff of
377 389 corresponding Differential Revision, and exist in the repo.
378 390 """
379 391 unfi = repo.unfiltered()
380 392 nodemap = unfi.changelog.nodemap
381 393
382 394 result = {} # {node: (oldnode?, lastdiff?, drev)}
383 395 toconfirm = {} # {node: (force, {precnode}, drev)}
384 396 for node in nodelist:
385 397 ctx = unfi[node]
386 398 # For tags like "D123", put them into "toconfirm" to verify later
387 399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
388 400 for n in precnodes:
389 401 if n in nodemap:
390 402 for tag in unfi.nodetags(n):
391 403 m = _differentialrevisiontagre.match(tag)
392 404 if m:
393 405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
394 406 continue
395 407
396 408 # Check commit message
397 409 m = _differentialrevisiondescre.search(ctx.description())
398 410 if m:
399 411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
400 412
401 413 # Double check if tags are genuine by collecting all old nodes from
402 414 # Phabricator, and expect precursors overlap with it.
403 415 if toconfirm:
404 416 drevs = [drev for force, precs, drev in toconfirm.values()]
405 417 alldiffs = callconduit(
406 418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
407 419 )
408 420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
409 421 for newnode, (force, precset, drev) in toconfirm.items():
410 422 diffs = [
411 423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
412 424 ]
413 425
414 426 # "precursors" as known by Phabricator
415 427 phprecset = set(getnode(d) for d in diffs)
416 428
417 429 # Ignore if precursors (Phabricator and local repo) do not overlap,
418 430 # and force is not set (when commit message says nothing)
419 431 if not force and not bool(phprecset & precset):
420 432 tagname = b'D%d' % drev
421 433 tags.tag(
422 434 repo,
423 435 tagname,
424 436 nullid,
425 437 message=None,
426 438 user=None,
427 439 date=None,
428 440 local=True,
429 441 )
430 442 unfi.ui.warn(
431 443 _(
432 444 b'D%s: local tag removed - does not match '
433 445 b'Differential history\n'
434 446 )
435 447 % drev
436 448 )
437 449 continue
438 450
439 451 # Find the last node using Phabricator metadata, and make sure it
440 452 # exists in the repo
441 453 oldnode = lastdiff = None
442 454 if diffs:
443 455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
444 456 oldnode = getnode(lastdiff)
445 457 if oldnode and oldnode not in nodemap:
446 458 oldnode = None
447 459
448 460 result[newnode] = (oldnode, lastdiff, drev)
449 461
450 462 return result
451 463
452 464
453 465 def getdiff(ctx, diffopts):
454 466 """plain-text diff without header (user, commit message, etc)"""
455 467 output = util.stringio()
456 468 for chunk, _label in patch.diffui(
457 469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
458 470 ):
459 471 output.write(chunk)
460 472 return output.getvalue()
461 473
462 474
463 475 class DiffChangeType(object):
464 476 ADD = 1
465 477 CHANGE = 2
466 478 DELETE = 3
467 479 MOVE_AWAY = 4
468 480 COPY_AWAY = 5
469 481 MOVE_HERE = 6
470 482 COPY_HERE = 7
471 483 MULTICOPY = 8
472 484
473 485
474 486 class DiffFileType(object):
475 487 TEXT = 1
476 488 IMAGE = 2
477 489 BINARY = 3
478 490
479 491
480 492 @attr.s
481 493 class phabhunk(dict):
482 494 """Represents a Differential hunk, which is owned by a Differential change
483 495 """
484 496
485 497 oldOffset = attr.ib(default=0) # camelcase-required
486 498 oldLength = attr.ib(default=0) # camelcase-required
487 499 newOffset = attr.ib(default=0) # camelcase-required
488 500 newLength = attr.ib(default=0) # camelcase-required
489 501 corpus = attr.ib(default='')
490 502 # These get added to the phabchange's equivalents
491 503 addLines = attr.ib(default=0) # camelcase-required
492 504 delLines = attr.ib(default=0) # camelcase-required
493 505
494 506
495 507 @attr.s
496 508 class phabchange(object):
497 509 """Represents a Differential change, owns Differential hunks and owned by a
498 510 Differential diff. Each one represents one file in a diff.
499 511 """
500 512
501 513 currentPath = attr.ib(default=None) # camelcase-required
502 514 oldPath = attr.ib(default=None) # camelcase-required
503 515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
504 516 metadata = attr.ib(default=attr.Factory(dict))
505 517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
507 519 type = attr.ib(default=DiffChangeType.CHANGE)
508 520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
509 521 commitHash = attr.ib(default=None) # camelcase-required
510 522 addLines = attr.ib(default=0) # camelcase-required
511 523 delLines = attr.ib(default=0) # camelcase-required
512 524 hunks = attr.ib(default=attr.Factory(list))
513 525
514 526 def copynewmetadatatoold(self):
515 527 for key in list(self.metadata.keys()):
516 528 newkey = key.replace(b'new:', b'old:')
517 529 self.metadata[newkey] = self.metadata[key]
518 530
519 531 def addoldmode(self, value):
520 532 self.oldProperties[b'unix:filemode'] = value
521 533
522 534 def addnewmode(self, value):
523 535 self.newProperties[b'unix:filemode'] = value
524 536
525 537 def addhunk(self, hunk):
526 538 if not isinstance(hunk, phabhunk):
527 539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
528 540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
529 541 # It's useful to include these stats since the Phab web UI shows them,
530 542 # and uses them to estimate how large a change a Revision is. Also used
531 543 # in email subjects for the [+++--] bit.
532 544 self.addLines += hunk.addLines
533 545 self.delLines += hunk.delLines
534 546
535 547
536 548 @attr.s
537 549 class phabdiff(object):
538 550 """Represents a Differential diff, owns Differential changes. Corresponds
539 551 to a commit.
540 552 """
541 553
542 554 # Doesn't seem to be any reason to send this (output of uname -n)
543 555 sourceMachine = attr.ib(default=b'') # camelcase-required
544 556 sourcePath = attr.ib(default=b'/') # camelcase-required
545 557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
546 558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
547 559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
548 560 branch = attr.ib(default=b'default')
549 561 bookmark = attr.ib(default=None)
550 562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
551 563 lintStatus = attr.ib(default=b'none') # camelcase-required
552 564 unitStatus = attr.ib(default=b'none') # camelcase-required
553 565 changes = attr.ib(default=attr.Factory(dict))
554 566 repositoryPHID = attr.ib(default=None) # camelcase-required
555 567
556 568 def addchange(self, change):
557 569 if not isinstance(change, phabchange):
558 570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
559 571 self.changes[change.currentPath] = pycompat.byteskwargs(
560 572 attr.asdict(change)
561 573 )
562 574
563 575
564 576 def maketext(pchange, ctx, fname):
565 577 """populate the phabchange for a text file"""
566 578 repo = ctx.repo()
567 579 fmatcher = match.exact([fname])
568 580 diffopts = mdiff.diffopts(git=True, context=32767)
569 581 _pfctx, _fctx, header, fhunks = next(
570 582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
571 583 )
572 584
573 585 for fhunk in fhunks:
574 586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
575 587 corpus = b''.join(lines[1:])
576 588 shunk = list(header)
577 589 shunk.extend(lines)
578 590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
579 591 patch.diffstatdata(util.iterlines(shunk))
580 592 )
581 593 pchange.addhunk(
582 594 phabhunk(
583 595 oldOffset,
584 596 oldLength,
585 597 newOffset,
586 598 newLength,
587 599 corpus,
588 600 addLines,
589 601 delLines,
590 602 )
591 603 )
592 604
593 605
594 606 def uploadchunks(fctx, fphid):
595 607 """upload large binary files as separate chunks.
596 608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
597 609 """
598 610 ui = fctx.repo().ui
599 611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
600 612 progress = ui.makeprogress(
601 613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
602 614 )
603 615 for chunk in chunks:
604 616 progress.increment()
605 617 if chunk[b'complete']:
606 618 continue
607 619 bstart = int(chunk[b'byteStart'])
608 620 bend = int(chunk[b'byteEnd'])
609 621 callconduit(
610 622 ui,
611 623 b'file.uploadchunk',
612 624 {
613 625 b'filePHID': fphid,
614 626 b'byteStart': bstart,
615 627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
616 628 b'dataEncoding': b'base64',
617 629 },
618 630 )
619 631 progress.complete()
620 632
621 633
622 634 def uploadfile(fctx):
623 635 """upload binary files to Phabricator"""
624 636 repo = fctx.repo()
625 637 ui = repo.ui
626 638 fname = fctx.path()
627 639 size = fctx.size()
628 640 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
629 641
630 642 # an allocate call is required first to see if an upload is even required
631 643 # (Phab might already have it) and to determine if chunking is needed
632 644 allocateparams = {
633 645 b'name': fname,
634 646 b'contentLength': size,
635 647 b'contentHash': fhash,
636 648 }
637 649 filealloc = callconduit(ui, b'file.allocate', allocateparams)
638 650 fphid = filealloc[b'filePHID']
639 651
640 652 if filealloc[b'upload']:
641 653 ui.write(_(b'uploading %s\n') % bytes(fctx))
642 654 if not fphid:
643 655 uploadparams = {
644 656 b'name': fname,
645 657 b'data_base64': base64.b64encode(fctx.data()),
646 658 }
647 659 fphid = callconduit(ui, b'file.upload', uploadparams)
648 660 else:
649 661 uploadchunks(fctx, fphid)
650 662 else:
651 663 ui.debug(b'server already has %s\n' % bytes(fctx))
652 664
653 665 if not fphid:
654 666 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
655 667
656 668 return fphid
657 669
658 670
659 671 def addoldbinary(pchange, fctx, originalfname):
660 672 """add the metadata for the previous version of a binary file to the
661 673 phabchange for the new version
662 674 """
663 675 oldfctx = fctx.p1()[originalfname]
664 676 if fctx.cmp(oldfctx):
665 677 # Files differ, add the old one
666 678 pchange.metadata[b'old:file:size'] = oldfctx.size()
667 679 mimeguess, _enc = mimetypes.guess_type(
668 680 encoding.unifromlocal(oldfctx.path())
669 681 )
670 682 if mimeguess:
671 683 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
672 684 mimeguess
673 685 )
674 686 fphid = uploadfile(oldfctx)
675 687 pchange.metadata[b'old:binary-phid'] = fphid
676 688 else:
677 689 # If it's left as IMAGE/BINARY web UI might try to display it
678 690 pchange.fileType = DiffFileType.TEXT
679 691 pchange.copynewmetadatatoold()
680 692
681 693
682 694 def makebinary(pchange, fctx):
683 695 """populate the phabchange for a binary file"""
684 696 pchange.fileType = DiffFileType.BINARY
685 697 fphid = uploadfile(fctx)
686 698 pchange.metadata[b'new:binary-phid'] = fphid
687 699 pchange.metadata[b'new:file:size'] = fctx.size()
688 700 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
689 701 if mimeguess:
690 702 mimeguess = pycompat.bytestr(mimeguess)
691 703 pchange.metadata[b'new:file:mime-type'] = mimeguess
692 704 if mimeguess.startswith(b'image/'):
693 705 pchange.fileType = DiffFileType.IMAGE
694 706
695 707
696 708 # Copied from mercurial/patch.py
697 709 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
698 710
699 711
700 712 def notutf8(fctx):
701 713 """detect non-UTF-8 text files since Phabricator requires them to be marked
702 714 as binary
703 715 """
704 716 try:
705 717 fctx.data().decode('utf-8')
706 718 if fctx.parents():
707 719 fctx.p1().data().decode('utf-8')
708 720 return False
709 721 except UnicodeDecodeError:
710 722 fctx.repo().ui.write(
711 723 _(b'file %s detected as non-UTF-8, marked as binary\n')
712 724 % fctx.path()
713 725 )
714 726 return True
715 727
716 728
717 729 def addremoved(pdiff, ctx, removed):
718 730 """add removed files to the phabdiff. Shouldn't include moves"""
719 731 for fname in removed:
720 732 pchange = phabchange(
721 733 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
722 734 )
723 735 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
724 736 fctx = ctx.p1()[fname]
725 737 if not (fctx.isbinary() or notutf8(fctx)):
726 738 maketext(pchange, ctx, fname)
727 739
728 740 pdiff.addchange(pchange)
729 741
730 742
731 743 def addmodified(pdiff, ctx, modified):
732 744 """add modified files to the phabdiff"""
733 745 for fname in modified:
734 746 fctx = ctx[fname]
735 747 pchange = phabchange(currentPath=fname, oldPath=fname)
736 748 filemode = gitmode[ctx[fname].flags()]
737 749 originalmode = gitmode[ctx.p1()[fname].flags()]
738 750 if filemode != originalmode:
739 751 pchange.addoldmode(originalmode)
740 752 pchange.addnewmode(filemode)
741 753
742 754 if fctx.isbinary() or notutf8(fctx):
743 755 makebinary(pchange, fctx)
744 756 addoldbinary(pchange, fctx, fname)
745 757 else:
746 758 maketext(pchange, ctx, fname)
747 759
748 760 pdiff.addchange(pchange)
749 761
750 762
751 763 def addadded(pdiff, ctx, added, removed):
752 764 """add file adds to the phabdiff, both new files and copies/moves"""
753 765 # Keep track of files that've been recorded as moved/copied, so if there are
754 766 # additional copies we can mark them (moves get removed from removed)
755 767 copiedchanges = {}
756 768 movedchanges = {}
757 769 for fname in added:
758 770 fctx = ctx[fname]
759 771 pchange = phabchange(currentPath=fname)
760 772
761 773 filemode = gitmode[ctx[fname].flags()]
762 774 renamed = fctx.renamed()
763 775
764 776 if renamed:
765 777 originalfname = renamed[0]
766 778 originalmode = gitmode[ctx.p1()[originalfname].flags()]
767 779 pchange.oldPath = originalfname
768 780
769 781 if originalfname in removed:
770 782 origpchange = phabchange(
771 783 currentPath=originalfname,
772 784 oldPath=originalfname,
773 785 type=DiffChangeType.MOVE_AWAY,
774 786 awayPaths=[fname],
775 787 )
776 788 movedchanges[originalfname] = origpchange
777 789 removed.remove(originalfname)
778 790 pchange.type = DiffChangeType.MOVE_HERE
779 791 elif originalfname in movedchanges:
780 792 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
781 793 movedchanges[originalfname].awayPaths.append(fname)
782 794 pchange.type = DiffChangeType.COPY_HERE
783 795 else: # pure copy
784 796 if originalfname not in copiedchanges:
785 797 origpchange = phabchange(
786 798 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
787 799 )
788 800 copiedchanges[originalfname] = origpchange
789 801 else:
790 802 origpchange = copiedchanges[originalfname]
791 803 origpchange.awayPaths.append(fname)
792 804 pchange.type = DiffChangeType.COPY_HERE
793 805
794 806 if filemode != originalmode:
795 807 pchange.addoldmode(originalmode)
796 808 pchange.addnewmode(filemode)
797 809 else: # Brand-new file
798 810 pchange.addnewmode(gitmode[fctx.flags()])
799 811 pchange.type = DiffChangeType.ADD
800 812
801 813 if fctx.isbinary() or notutf8(fctx):
802 814 makebinary(pchange, fctx)
803 815 if renamed:
804 816 addoldbinary(pchange, fctx, originalfname)
805 817 else:
806 818 maketext(pchange, ctx, fname)
807 819
808 820 pdiff.addchange(pchange)
809 821
810 822 for _path, copiedchange in copiedchanges.items():
811 823 pdiff.addchange(copiedchange)
812 824 for _path, movedchange in movedchanges.items():
813 825 pdiff.addchange(movedchange)
814 826
815 827
816 828 def creatediff(ctx):
817 829 """create a Differential Diff"""
818 830 repo = ctx.repo()
819 831 repophid = getrepophid(repo)
820 832 # Create a "Differential Diff" via "differential.creatediff" API
821 833 pdiff = phabdiff(
822 834 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
823 835 branch=b'%s' % ctx.branch(),
824 836 )
825 837 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
826 838 # addadded will remove moved files from removed, so addremoved won't get
827 839 # them
828 840 addadded(pdiff, ctx, added, removed)
829 841 addmodified(pdiff, ctx, modified)
830 842 addremoved(pdiff, ctx, removed)
831 843 if repophid:
832 844 pdiff.repositoryPHID = repophid
833 845 diff = callconduit(
834 846 repo.ui,
835 847 b'differential.creatediff',
836 848 pycompat.byteskwargs(attr.asdict(pdiff)),
837 849 )
838 850 if not diff:
839 851 raise error.Abort(_(b'cannot create diff for %s') % ctx)
840 852 return diff
841 853
842 854
843 855 def writediffproperties(ctx, diff):
844 856 """write metadata to diff so patches could be applied losslessly"""
845 857 # creatediff returns with a diffid but query returns with an id
846 858 diffid = diff.get(b'diffid', diff.get(b'id'))
847 859 params = {
848 860 b'diff_id': diffid,
849 861 b'name': b'hg:meta',
850 862 b'data': templatefilters.json(
851 863 {
852 864 b'user': ctx.user(),
853 865 b'date': b'%d %d' % ctx.date(),
854 866 b'branch': ctx.branch(),
855 867 b'node': ctx.hex(),
856 868 b'parent': ctx.p1().hex(),
857 869 }
858 870 ),
859 871 }
860 872 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
861 873
862 874 params = {
863 875 b'diff_id': diffid,
864 876 b'name': b'local:commits',
865 877 b'data': templatefilters.json(
866 878 {
867 879 ctx.hex(): {
868 880 b'author': stringutil.person(ctx.user()),
869 881 b'authorEmail': stringutil.email(ctx.user()),
870 882 b'time': int(ctx.date()[0]),
871 883 b'commit': ctx.hex(),
872 884 b'parents': [ctx.p1().hex()],
873 885 b'branch': ctx.branch(),
874 886 },
875 887 }
876 888 ),
877 889 }
878 890 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
879 891
880 892
881 893 def createdifferentialrevision(
882 894 ctx,
883 895 revid=None,
884 896 parentrevphid=None,
885 897 oldnode=None,
886 898 olddiff=None,
887 899 actions=None,
888 900 comment=None,
889 901 ):
890 902 """create or update a Differential Revision
891 903
892 904 If revid is None, create a new Differential Revision, otherwise update
893 905 revid. If parentrevphid is not None, set it as a dependency.
894 906
895 907 If oldnode is not None, check if the patch content (without commit message
896 908 and metadata) has changed before creating another diff.
897 909
898 910 If actions is not None, they will be appended to the transaction.
899 911 """
900 912 repo = ctx.repo()
901 913 if oldnode:
902 914 diffopts = mdiff.diffopts(git=True, context=32767)
903 915 oldctx = repo.unfiltered()[oldnode]
904 916 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
905 917 else:
906 918 neednewdiff = True
907 919
908 920 transactions = []
909 921 if neednewdiff:
910 922 diff = creatediff(ctx)
911 923 transactions.append({b'type': b'update', b'value': diff[b'phid']})
912 924 if comment:
913 925 transactions.append({b'type': b'comment', b'value': comment})
914 926 else:
915 927 # Even if we don't need to upload a new diff because the patch content
916 928 # does not change. We might still need to update its metadata so
917 929 # pushers could know the correct node metadata.
918 930 assert olddiff
919 931 diff = olddiff
920 932 writediffproperties(ctx, diff)
921 933
922 934 # Set the parent Revision every time, so commit re-ordering is picked-up
923 935 if parentrevphid:
924 936 transactions.append(
925 937 {b'type': b'parents.set', b'value': [parentrevphid]}
926 938 )
927 939
928 940 if actions:
929 941 transactions += actions
930 942
931 943 # Parse commit message and update related fields.
932 944 desc = ctx.description()
933 945 info = callconduit(
934 946 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
935 947 )
936 948 for k, v in info[b'fields'].items():
937 949 if k in [b'title', b'summary', b'testPlan']:
938 950 transactions.append({b'type': k, b'value': v})
939 951
940 952 params = {b'transactions': transactions}
941 953 if revid is not None:
942 954 # Update an existing Differential Revision
943 955 params[b'objectIdentifier'] = revid
944 956
945 957 revision = callconduit(repo.ui, b'differential.revision.edit', params)
946 958 if not revision:
947 959 raise error.Abort(_(b'cannot create revision for %s') % ctx)
948 960
949 961 return revision, diff
950 962
951 963
952 964 def userphids(repo, names):
953 965 """convert user names to PHIDs"""
954 966 names = [name.lower() for name in names]
955 967 query = {b'constraints': {b'usernames': names}}
956 968 result = callconduit(repo.ui, b'user.search', query)
957 969 # username not found is not an error of the API. So check if we have missed
958 970 # some names here.
959 971 data = result[b'data']
960 972 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
961 973 unresolved = set(names) - resolved
962 974 if unresolved:
963 975 raise error.Abort(
964 976 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
965 977 )
966 978 return [entry[b'phid'] for entry in data]
967 979
968 980
969 981 @vcrcommand(
970 982 b'phabsend',
971 983 [
972 984 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
973 985 (b'', b'amend', True, _(b'update commit messages')),
974 986 (b'', b'reviewer', [], _(b'specify reviewers')),
975 987 (b'', b'blocker', [], _(b'specify blocking reviewers')),
976 988 (
977 989 b'm',
978 990 b'comment',
979 991 b'',
980 992 _(b'add a comment to Revisions with new/updated Diffs'),
981 993 ),
982 994 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
983 995 ],
984 996 _(b'REV [OPTIONS]'),
985 997 helpcategory=command.CATEGORY_IMPORT_EXPORT,
986 998 )
987 999 def phabsend(ui, repo, *revs, **opts):
988 1000 """upload changesets to Phabricator
989 1001
990 1002 If there are multiple revisions specified, they will be send as a stack
991 1003 with a linear dependencies relationship using the order specified by the
992 1004 revset.
993 1005
994 1006 For the first time uploading changesets, local tags will be created to
995 1007 maintain the association. After the first time, phabsend will check
996 1008 obsstore and tags information so it can figure out whether to update an
997 1009 existing Differential Revision, or create a new one.
998 1010
999 1011 If --amend is set, update commit messages so they have the
1000 1012 ``Differential Revision`` URL, remove related tags. This is similar to what
1001 1013 arcanist will do, and is more desired in author-push workflows. Otherwise,
1002 1014 use local tags to record the ``Differential Revision`` association.
1003 1015
1004 1016 The --confirm option lets you confirm changesets before sending them. You
1005 1017 can also add following to your configuration file to make it default
1006 1018 behaviour::
1007 1019
1008 1020 [phabsend]
1009 1021 confirm = true
1010 1022
1011 1023 phabsend will check obsstore and the above association to decide whether to
1012 1024 update an existing Differential Revision, or create a new one.
1013 1025 """
1014 1026 opts = pycompat.byteskwargs(opts)
1015 1027 revs = list(revs) + opts.get(b'rev', [])
1016 1028 revs = scmutil.revrange(repo, revs)
1017 1029
1018 1030 if not revs:
1019 1031 raise error.Abort(_(b'phabsend requires at least one changeset'))
1020 1032 if opts.get(b'amend'):
1021 1033 cmdutil.checkunfinished(repo)
1022 1034
1023 1035 # {newnode: (oldnode, olddiff, olddrev}
1024 1036 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1025 1037
1026 1038 confirm = ui.configbool(b'phabsend', b'confirm')
1027 1039 confirm |= bool(opts.get(b'confirm'))
1028 1040 if confirm:
1029 1041 confirmed = _confirmbeforesend(repo, revs, oldmap)
1030 1042 if not confirmed:
1031 1043 raise error.Abort(_(b'phabsend cancelled'))
1032 1044
1033 1045 actions = []
1034 1046 reviewers = opts.get(b'reviewer', [])
1035 1047 blockers = opts.get(b'blocker', [])
1036 1048 phids = []
1037 1049 if reviewers:
1038 1050 phids.extend(userphids(repo, reviewers))
1039 1051 if blockers:
1040 1052 phids.extend(
1041 1053 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1042 1054 )
1043 1055 if phids:
1044 1056 actions.append({b'type': b'reviewers.add', b'value': phids})
1045 1057
1046 1058 drevids = [] # [int]
1047 1059 diffmap = {} # {newnode: diff}
1048 1060
1049 1061 # Send patches one by one so we know their Differential Revision PHIDs and
1050 1062 # can provide dependency relationship
1051 1063 lastrevphid = None
1052 1064 for rev in revs:
1053 1065 ui.debug(b'sending rev %d\n' % rev)
1054 1066 ctx = repo[rev]
1055 1067
1056 1068 # Get Differential Revision ID
1057 1069 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1058 1070 if oldnode != ctx.node() or opts.get(b'amend'):
1059 1071 # Create or update Differential Revision
1060 1072 revision, diff = createdifferentialrevision(
1061 1073 ctx,
1062 1074 revid,
1063 1075 lastrevphid,
1064 1076 oldnode,
1065 1077 olddiff,
1066 1078 actions,
1067 1079 opts.get(b'comment'),
1068 1080 )
1069 1081 diffmap[ctx.node()] = diff
1070 1082 newrevid = int(revision[b'object'][b'id'])
1071 1083 newrevphid = revision[b'object'][b'phid']
1072 1084 if revid:
1073 1085 action = b'updated'
1074 1086 else:
1075 1087 action = b'created'
1076 1088
1077 1089 # Create a local tag to note the association, if commit message
1078 1090 # does not have it already
1079 1091 m = _differentialrevisiondescre.search(ctx.description())
1080 1092 if not m or int(m.group(r'id')) != newrevid:
1081 1093 tagname = b'D%d' % newrevid
1082 1094 tags.tag(
1083 1095 repo,
1084 1096 tagname,
1085 1097 ctx.node(),
1086 1098 message=None,
1087 1099 user=None,
1088 1100 date=None,
1089 1101 local=True,
1090 1102 )
1091 1103 else:
1092 1104 # Nothing changed. But still set "newrevphid" so the next revision
1093 1105 # could depend on this one and "newrevid" for the summary line.
1094 1106 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1095 1107 newrevid = revid
1096 1108 action = b'skipped'
1097 1109
1098 1110 actiondesc = ui.label(
1099 1111 {
1100 1112 b'created': _(b'created'),
1101 1113 b'skipped': _(b'skipped'),
1102 1114 b'updated': _(b'updated'),
1103 1115 }[action],
1104 1116 b'phabricator.action.%s' % action,
1105 1117 )
1106 1118 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1107 1119 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1108 1120 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1109 1121 ui.write(
1110 1122 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1111 1123 )
1112 1124 drevids.append(newrevid)
1113 1125 lastrevphid = newrevphid
1114 1126
1115 1127 # Update commit messages and remove tags
1116 1128 if opts.get(b'amend'):
1117 1129 unfi = repo.unfiltered()
1118 1130 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1119 1131 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1120 1132 wnode = unfi[b'.'].node()
1121 1133 mapping = {} # {oldnode: [newnode]}
1122 1134 for i, rev in enumerate(revs):
1123 1135 old = unfi[rev]
1124 1136 drevid = drevids[i]
1125 1137 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1126 1138 newdesc = getdescfromdrev(drev)
1127 1139 # Make sure commit message contain "Differential Revision"
1128 1140 if old.description() != newdesc:
1129 1141 if old.phase() == phases.public:
1130 1142 ui.warn(
1131 1143 _(b"warning: not updating public commit %s\n")
1132 1144 % scmutil.formatchangeid(old)
1133 1145 )
1134 1146 continue
1135 1147 parents = [
1136 1148 mapping.get(old.p1().node(), (old.p1(),))[0],
1137 1149 mapping.get(old.p2().node(), (old.p2(),))[0],
1138 1150 ]
1139 1151 new = context.metadataonlyctx(
1140 1152 repo,
1141 1153 old,
1142 1154 parents=parents,
1143 1155 text=newdesc,
1144 1156 user=old.user(),
1145 1157 date=old.date(),
1146 1158 extra=old.extra(),
1147 1159 )
1148 1160
1149 1161 newnode = new.commit()
1150 1162
1151 1163 mapping[old.node()] = [newnode]
1152 1164 # Update diff property
1153 1165 # If it fails just warn and keep going, otherwise the DREV
1154 1166 # associations will be lost
1155 1167 try:
1156 1168 writediffproperties(unfi[newnode], diffmap[old.node()])
1157 1169 except util.urlerr.urlerror:
1158 1170 ui.warnnoi18n(
1159 1171 b'Failed to update metadata for D%s\n' % drevid
1160 1172 )
1161 1173 # Remove local tags since it's no longer necessary
1162 1174 tagname = b'D%d' % drevid
1163 1175 if tagname in repo.tags():
1164 1176 tags.tag(
1165 1177 repo,
1166 1178 tagname,
1167 1179 nullid,
1168 1180 message=None,
1169 1181 user=None,
1170 1182 date=None,
1171 1183 local=True,
1172 1184 )
1173 1185 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1174 1186 if wnode in mapping:
1175 1187 unfi.setparents(mapping[wnode][0])
1176 1188
1177 1189
1178 1190 # Map from "hg:meta" keys to header understood by "hg import". The order is
1179 1191 # consistent with "hg export" output.
1180 1192 _metanamemap = util.sortdict(
1181 1193 [
1182 1194 (b'user', b'User'),
1183 1195 (b'date', b'Date'),
1184 1196 (b'branch', b'Branch'),
1185 1197 (b'node', b'Node ID'),
1186 1198 (b'parent', b'Parent '),
1187 1199 ]
1188 1200 )
1189 1201
1190 1202
1191 1203 def _confirmbeforesend(repo, revs, oldmap):
1192 1204 url, token = readurltoken(repo.ui)
1193 1205 ui = repo.ui
1194 1206 for rev in revs:
1195 1207 ctx = repo[rev]
1196 1208 desc = ctx.description().splitlines()[0]
1197 1209 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1198 1210 if drevid:
1199 1211 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1200 1212 else:
1201 1213 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1202 1214
1203 1215 ui.write(
1204 1216 _(b'%s - %s: %s\n')
1205 1217 % (
1206 1218 drevdesc,
1207 1219 ui.label(bytes(ctx), b'phabricator.node'),
1208 1220 ui.label(desc, b'phabricator.desc'),
1209 1221 )
1210 1222 )
1211 1223
1212 1224 if ui.promptchoice(
1213 1225 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1214 1226 ):
1215 1227 return False
1216 1228
1217 1229 return True
1218 1230
1219 1231
1220 1232 _knownstatusnames = {
1221 1233 b'accepted',
1222 1234 b'needsreview',
1223 1235 b'needsrevision',
1224 1236 b'closed',
1225 1237 b'abandoned',
1226 1238 }
1227 1239
1228 1240
1229 1241 def _getstatusname(drev):
1230 1242 """get normalized status name from a Differential Revision"""
1231 1243 return drev[b'statusName'].replace(b' ', b'').lower()
1232 1244
1233 1245
1234 1246 # Small language to specify differential revisions. Support symbols: (), :X,
1235 1247 # +, and -.
1236 1248
1237 1249 _elements = {
1238 1250 # token-type: binding-strength, primary, prefix, infix, suffix
1239 1251 b'(': (12, None, (b'group', 1, b')'), None, None),
1240 1252 b':': (8, None, (b'ancestors', 8), None, None),
1241 1253 b'&': (5, None, None, (b'and_', 5), None),
1242 1254 b'+': (4, None, None, (b'add', 4), None),
1243 1255 b'-': (4, None, None, (b'sub', 4), None),
1244 1256 b')': (0, None, None, None, None),
1245 1257 b'symbol': (0, b'symbol', None, None, None),
1246 1258 b'end': (0, None, None, None, None),
1247 1259 }
1248 1260
1249 1261
1250 1262 def _tokenize(text):
1251 1263 view = memoryview(text) # zero-copy slice
1252 1264 special = b'():+-& '
1253 1265 pos = 0
1254 1266 length = len(text)
1255 1267 while pos < length:
1256 1268 symbol = b''.join(
1257 1269 itertools.takewhile(
1258 1270 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1259 1271 )
1260 1272 )
1261 1273 if symbol:
1262 1274 yield (b'symbol', symbol, pos)
1263 1275 pos += len(symbol)
1264 1276 else: # special char, ignore space
1265 1277 if text[pos] != b' ':
1266 1278 yield (text[pos], None, pos)
1267 1279 pos += 1
1268 1280 yield (b'end', None, pos)
1269 1281
1270 1282
1271 1283 def _parse(text):
1272 1284 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1273 1285 if pos != len(text):
1274 1286 raise error.ParseError(b'invalid token', pos)
1275 1287 return tree
1276 1288
1277 1289
1278 1290 def _parsedrev(symbol):
1279 1291 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1280 1292 if symbol.startswith(b'D') and symbol[1:].isdigit():
1281 1293 return int(symbol[1:])
1282 1294 if symbol.isdigit():
1283 1295 return int(symbol)
1284 1296
1285 1297
1286 1298 def _prefetchdrevs(tree):
1287 1299 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1288 1300 drevs = set()
1289 1301 ancestordrevs = set()
1290 1302 op = tree[0]
1291 1303 if op == b'symbol':
1292 1304 r = _parsedrev(tree[1])
1293 1305 if r:
1294 1306 drevs.add(r)
1295 1307 elif op == b'ancestors':
1296 1308 r, a = _prefetchdrevs(tree[1])
1297 1309 drevs.update(r)
1298 1310 ancestordrevs.update(r)
1299 1311 ancestordrevs.update(a)
1300 1312 else:
1301 1313 for t in tree[1:]:
1302 1314 r, a = _prefetchdrevs(t)
1303 1315 drevs.update(r)
1304 1316 ancestordrevs.update(a)
1305 1317 return drevs, ancestordrevs
1306 1318
1307 1319
1308 1320 def querydrev(repo, spec):
1309 1321 """return a list of "Differential Revision" dicts
1310 1322
1311 1323 spec is a string using a simple query language, see docstring in phabread
1312 1324 for details.
1313 1325
1314 1326 A "Differential Revision dict" looks like:
1315 1327
1316 1328 {
1317 1329 "id": "2",
1318 1330 "phid": "PHID-DREV-672qvysjcczopag46qty",
1319 1331 "title": "example",
1320 1332 "uri": "https://phab.example.com/D2",
1321 1333 "dateCreated": "1499181406",
1322 1334 "dateModified": "1499182103",
1323 1335 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1324 1336 "status": "0",
1325 1337 "statusName": "Needs Review",
1326 1338 "properties": [],
1327 1339 "branch": null,
1328 1340 "summary": "",
1329 1341 "testPlan": "",
1330 1342 "lineCount": "2",
1331 1343 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1332 1344 "diffs": [
1333 1345 "3",
1334 1346 "4",
1335 1347 ],
1336 1348 "commits": [],
1337 1349 "reviewers": [],
1338 1350 "ccs": [],
1339 1351 "hashes": [],
1340 1352 "auxiliary": {
1341 1353 "phabricator:projects": [],
1342 1354 "phabricator:depends-on": [
1343 1355 "PHID-DREV-gbapp366kutjebt7agcd"
1344 1356 ]
1345 1357 },
1346 1358 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1347 1359 "sourcePath": null
1348 1360 }
1349 1361 """
1350 1362
1351 1363 def fetch(params):
1352 1364 """params -> single drev or None"""
1353 1365 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1354 1366 if key in prefetched:
1355 1367 return prefetched[key]
1356 1368 drevs = callconduit(repo.ui, b'differential.query', params)
1357 1369 # Fill prefetched with the result
1358 1370 for drev in drevs:
1359 1371 prefetched[drev[b'phid']] = drev
1360 1372 prefetched[int(drev[b'id'])] = drev
1361 1373 if key not in prefetched:
1362 1374 raise error.Abort(
1363 1375 _(b'cannot get Differential Revision %r') % params
1364 1376 )
1365 1377 return prefetched[key]
1366 1378
1367 1379 def getstack(topdrevids):
1368 1380 """given a top, get a stack from the bottom, [id] -> [id]"""
1369 1381 visited = set()
1370 1382 result = []
1371 1383 queue = [{b'ids': [i]} for i in topdrevids]
1372 1384 while queue:
1373 1385 params = queue.pop()
1374 1386 drev = fetch(params)
1375 1387 if drev[b'id'] in visited:
1376 1388 continue
1377 1389 visited.add(drev[b'id'])
1378 1390 result.append(int(drev[b'id']))
1379 1391 auxiliary = drev.get(b'auxiliary', {})
1380 1392 depends = auxiliary.get(b'phabricator:depends-on', [])
1381 1393 for phid in depends:
1382 1394 queue.append({b'phids': [phid]})
1383 1395 result.reverse()
1384 1396 return smartset.baseset(result)
1385 1397
1386 1398 # Initialize prefetch cache
1387 1399 prefetched = {} # {id or phid: drev}
1388 1400
1389 1401 tree = _parse(spec)
1390 1402 drevs, ancestordrevs = _prefetchdrevs(tree)
1391 1403
1392 1404 # developer config: phabricator.batchsize
1393 1405 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1394 1406
1395 1407 # Prefetch Differential Revisions in batch
1396 1408 tofetch = set(drevs)
1397 1409 for r in ancestordrevs:
1398 1410 tofetch.update(range(max(1, r - batchsize), r + 1))
1399 1411 if drevs:
1400 1412 fetch({b'ids': list(tofetch)})
1401 1413 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1402 1414
1403 1415 # Walk through the tree, return smartsets
1404 1416 def walk(tree):
1405 1417 op = tree[0]
1406 1418 if op == b'symbol':
1407 1419 drev = _parsedrev(tree[1])
1408 1420 if drev:
1409 1421 return smartset.baseset([drev])
1410 1422 elif tree[1] in _knownstatusnames:
1411 1423 drevs = [
1412 1424 r
1413 1425 for r in validids
1414 1426 if _getstatusname(prefetched[r]) == tree[1]
1415 1427 ]
1416 1428 return smartset.baseset(drevs)
1417 1429 else:
1418 1430 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1419 1431 elif op in {b'and_', b'add', b'sub'}:
1420 1432 assert len(tree) == 3
1421 1433 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1422 1434 elif op == b'group':
1423 1435 return walk(tree[1])
1424 1436 elif op == b'ancestors':
1425 1437 return getstack(walk(tree[1]))
1426 1438 else:
1427 1439 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1428 1440
1429 1441 return [prefetched[r] for r in walk(tree)]
1430 1442
1431 1443
1432 1444 def getdescfromdrev(drev):
1433 1445 """get description (commit message) from "Differential Revision"
1434 1446
1435 1447 This is similar to differential.getcommitmessage API. But we only care
1436 1448 about limited fields: title, summary, test plan, and URL.
1437 1449 """
1438 1450 title = drev[b'title']
1439 1451 summary = drev[b'summary'].rstrip()
1440 1452 testplan = drev[b'testPlan'].rstrip()
1441 1453 if testplan:
1442 1454 testplan = b'Test Plan:\n%s' % testplan
1443 1455 uri = b'Differential Revision: %s' % drev[b'uri']
1444 1456 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1445 1457
1446 1458
1447 1459 def getdiffmeta(diff):
1448 1460 """get commit metadata (date, node, user, p1) from a diff object
1449 1461
1450 1462 The metadata could be "hg:meta", sent by phabsend, like:
1451 1463
1452 1464 "properties": {
1453 1465 "hg:meta": {
1454 1466 "date": "1499571514 25200",
1455 1467 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1456 1468 "user": "Foo Bar <foo@example.com>",
1457 1469 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1458 1470 }
1459 1471 }
1460 1472
1461 1473 Or converted from "local:commits", sent by "arc", like:
1462 1474
1463 1475 "properties": {
1464 1476 "local:commits": {
1465 1477 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1466 1478 "author": "Foo Bar",
1467 1479 "time": 1499546314,
1468 1480 "branch": "default",
1469 1481 "tag": "",
1470 1482 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1471 1483 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1472 1484 "local": "1000",
1473 1485 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1474 1486 "summary": "...",
1475 1487 "message": "...",
1476 1488 "authorEmail": "foo@example.com"
1477 1489 }
1478 1490 }
1479 1491 }
1480 1492
1481 1493 Note: metadata extracted from "local:commits" will lose time zone
1482 1494 information.
1483 1495 """
1484 1496 props = diff.get(b'properties') or {}
1485 1497 meta = props.get(b'hg:meta')
1486 1498 if not meta:
1487 1499 if props.get(b'local:commits'):
1488 1500 commit = sorted(props[b'local:commits'].values())[0]
1489 1501 meta = {}
1490 1502 if b'author' in commit and b'authorEmail' in commit:
1491 1503 meta[b'user'] = b'%s <%s>' % (
1492 1504 commit[b'author'],
1493 1505 commit[b'authorEmail'],
1494 1506 )
1495 1507 if b'time' in commit:
1496 1508 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1497 1509 if b'branch' in commit:
1498 1510 meta[b'branch'] = commit[b'branch']
1499 1511 node = commit.get(b'commit', commit.get(b'rev'))
1500 1512 if node:
1501 1513 meta[b'node'] = node
1502 1514 if len(commit.get(b'parents', ())) >= 1:
1503 1515 meta[b'parent'] = commit[b'parents'][0]
1504 1516 else:
1505 1517 meta = {}
1506 1518 if b'date' not in meta and b'dateCreated' in diff:
1507 1519 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1508 1520 if b'branch' not in meta and diff.get(b'branch'):
1509 1521 meta[b'branch'] = diff[b'branch']
1510 1522 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1511 1523 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1512 1524 return meta
1513 1525
1514 1526
1515 1527 def readpatch(repo, drevs, write):
1516 1528 """generate plain-text patch readable by 'hg import'
1517 1529
1518 1530 write is usually ui.write. drevs is what "querydrev" returns, results of
1519 1531 "differential.query".
1520 1532 """
1521 1533 # Prefetch hg:meta property for all diffs
1522 1534 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1523 1535 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1524 1536
1525 1537 # Generate patch for each drev
1526 1538 for drev in drevs:
1527 1539 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1528 1540
1529 1541 diffid = max(int(v) for v in drev[b'diffs'])
1530 1542 body = callconduit(
1531 1543 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1532 1544 )
1533 1545 desc = getdescfromdrev(drev)
1534 1546 header = b'# HG changeset patch\n'
1535 1547
1536 1548 # Try to preserve metadata from hg:meta property. Write hg patch
1537 1549 # headers that can be read by the "import" command. See patchheadermap
1538 1550 # and extract in mercurial/patch.py for supported headers.
1539 1551 meta = getdiffmeta(diffs[b'%d' % diffid])
1540 1552 for k in _metanamemap.keys():
1541 1553 if k in meta:
1542 1554 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1543 1555
1544 1556 content = b'%s%s\n%s' % (header, desc, body)
1545 1557 write(content)
1546 1558
1547 1559
1548 1560 @vcrcommand(
1549 1561 b'phabread',
1550 1562 [(b'', b'stack', False, _(b'read dependencies'))],
1551 1563 _(b'DREVSPEC [OPTIONS]'),
1552 1564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1553 1565 )
1554 1566 def phabread(ui, repo, spec, **opts):
1555 1567 """print patches from Phabricator suitable for importing
1556 1568
1557 1569 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1558 1570 the number ``123``. It could also have common operators like ``+``, ``-``,
1559 1571 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1560 1572 select a stack.
1561 1573
1562 1574 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1563 1575 could be used to filter patches by status. For performance reason, they
1564 1576 only represent a subset of non-status selections and cannot be used alone.
1565 1577
1566 1578 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1567 1579 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1568 1580 stack up to D9.
1569 1581
1570 1582 If --stack is given, follow dependencies information and read all patches.
1571 1583 It is equivalent to the ``:`` operator.
1572 1584 """
1573 1585 opts = pycompat.byteskwargs(opts)
1574 1586 if opts.get(b'stack'):
1575 1587 spec = b':(%s)' % spec
1576 1588 drevs = querydrev(repo, spec)
1577 1589 readpatch(repo, drevs, ui.write)
1578 1590
1579 1591
1580 1592 @vcrcommand(
1581 1593 b'phabupdate',
1582 1594 [
1583 1595 (b'', b'accept', False, _(b'accept revisions')),
1584 1596 (b'', b'reject', False, _(b'reject revisions')),
1585 1597 (b'', b'abandon', False, _(b'abandon revisions')),
1586 1598 (b'', b'reclaim', False, _(b'reclaim revisions')),
1587 1599 (b'm', b'comment', b'', _(b'comment on the last revision')),
1588 1600 ],
1589 1601 _(b'DREVSPEC [OPTIONS]'),
1590 1602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1591 1603 )
1592 1604 def phabupdate(ui, repo, spec, **opts):
1593 1605 """update Differential Revision in batch
1594 1606
1595 1607 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1596 1608 """
1597 1609 opts = pycompat.byteskwargs(opts)
1598 1610 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1599 1611 if len(flags) > 1:
1600 1612 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1601 1613
1602 1614 actions = []
1603 1615 for f in flags:
1604 1616 actions.append({b'type': f, b'value': b'true'})
1605 1617
1606 1618 drevs = querydrev(repo, spec)
1607 1619 for i, drev in enumerate(drevs):
1608 1620 if i + 1 == len(drevs) and opts.get(b'comment'):
1609 1621 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1610 1622 if actions:
1611 1623 params = {
1612 1624 b'objectIdentifier': drev[b'phid'],
1613 1625 b'transactions': actions,
1614 1626 }
1615 1627 callconduit(ui, b'differential.revision.edit', params)
1616 1628
1617 1629
1618 1630 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1619 1631 def template_review(context, mapping):
1620 1632 """:phabreview: Object describing the review for this changeset.
1621 1633 Has attributes `url` and `id`.
1622 1634 """
1623 1635 ctx = context.resource(mapping, b'ctx')
1624 1636 m = _differentialrevisiondescre.search(ctx.description())
1625 1637 if m:
1626 1638 return templateutil.hybriddict(
1627 1639 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1628 1640 )
1629 1641 else:
1630 1642 tags = ctx.repo().nodetags(ctx.node())
1631 1643 for t in tags:
1632 1644 if _differentialrevisiontagre.match(t):
1633 1645 url = ctx.repo().ui.config(b'phabricator', b'url')
1634 1646 if not url.endswith(b'/'):
1635 1647 url += b'/'
1636 1648 url += t
1637 1649
1638 1650 return templateutil.hybriddict({b'url': url, b'id': t,})
1639 1651 return None
General Comments 0
You need to be logged in to leave comments. Login now