##// END OF EJS Templates
index: use `index.has_node` in `phabricator.getoldnodedrevmap`...
marmoute -
r43949:4cb3f5bb default
parent child Browse files
Show More
@@ -1,1650 +1,1650 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 46 import hashlib
47 47 import itertools
48 48 import json
49 49 import mimetypes
50 50 import operator
51 51 import re
52 52
53 53 from mercurial.node import bin, nullid
54 54 from mercurial.i18n import _
55 55 from mercurial.pycompat import getattr
56 56 from mercurial.thirdparty import attr
57 57 from mercurial import (
58 58 cmdutil,
59 59 context,
60 60 encoding,
61 61 error,
62 62 exthelper,
63 63 httpconnection as httpconnectionmod,
64 64 match,
65 65 mdiff,
66 66 obsutil,
67 67 parser,
68 68 patch,
69 69 phases,
70 70 pycompat,
71 71 scmutil,
72 72 smartset,
73 73 tags,
74 74 templatefilters,
75 75 templateutil,
76 76 url as urlmod,
77 77 util,
78 78 )
79 79 from mercurial.utils import (
80 80 procutil,
81 81 stringutil,
82 82 )
83 83
84 84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 86 # be specifying the version(s) of Mercurial they are tested with, or
87 87 # leave the attribute unspecified.
88 88 testedwith = b'ships-with-hg-core'
89 89
90 90 eh = exthelper.exthelper()
91 91
92 92 cmdtable = eh.cmdtable
93 93 command = eh.command
94 94 configtable = eh.configtable
95 95 templatekeyword = eh.templatekeyword
96 96
97 97 # developer config: phabricator.batchsize
98 98 eh.configitem(
99 99 b'phabricator', b'batchsize', default=12,
100 100 )
101 101 eh.configitem(
102 102 b'phabricator', b'callsign', default=None,
103 103 )
104 104 eh.configitem(
105 105 b'phabricator', b'curlcmd', default=None,
106 106 )
107 107 # developer config: phabricator.repophid
108 108 eh.configitem(
109 109 b'phabricator', b'repophid', default=None,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'url', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabsend', b'confirm', default=False,
116 116 )
117 117
118 118 colortable = {
119 119 b'phabricator.action.created': b'green',
120 120 b'phabricator.action.skipped': b'magenta',
121 121 b'phabricator.action.updated': b'magenta',
122 122 b'phabricator.desc': b'',
123 123 b'phabricator.drev': b'bold',
124 124 b'phabricator.node': b'',
125 125 }
126 126
127 127 _VCR_FLAGS = [
128 128 (
129 129 b'',
130 130 b'test-vcr',
131 131 b'',
132 132 _(
133 133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 134 b', otherwise will mock all http requests using the specified vcr file.'
135 135 b' (ADVANCED)'
136 136 ),
137 137 ),
138 138 ]
139 139
140 140
141 141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 142 fullflags = flags + _VCR_FLAGS
143 143
144 144 def hgmatcher(r1, r2):
145 145 if r1.uri != r2.uri or r1.method != r2.method:
146 146 return False
147 147 r1params = util.urlreq.parseqs(r1.body)
148 148 r2params = util.urlreq.parseqs(r2.body)
149 149 for key in r1params:
150 150 if key not in r2params:
151 151 return False
152 152 value = r1params[key][0]
153 153 # we want to compare json payloads without worrying about ordering
154 154 if value.startswith(b'{') and value.endswith(b'}'):
155 155 r1json = pycompat.json_loads(value)
156 156 r2json = pycompat.json_loads(r2params[key][0])
157 157 if r1json != r2json:
158 158 return False
159 159 elif r2params[key][0] != value:
160 160 return False
161 161 return True
162 162
163 163 def sanitiserequest(request):
164 164 request.body = re.sub(
165 165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 166 )
167 167 return request
168 168
169 169 def sanitiseresponse(response):
170 170 if 'set-cookie' in response['headers']:
171 171 del response['headers']['set-cookie']
172 172 return response
173 173
174 174 def decorate(fn):
175 175 def inner(*args, **kwargs):
176 176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
177 177 if cassette:
178 178 import hgdemandimport
179 179
180 180 with hgdemandimport.deactivated():
181 181 import vcr as vcrmod
182 182 import vcr.stubs as stubs
183 183
184 184 vcr = vcrmod.VCR(
185 185 serializer='json',
186 186 before_record_request=sanitiserequest,
187 187 before_record_response=sanitiseresponse,
188 188 custom_patches=[
189 189 (
190 190 urlmod,
191 191 'httpconnection',
192 192 stubs.VCRHTTPConnection,
193 193 ),
194 194 (
195 195 urlmod,
196 196 'httpsconnection',
197 197 stubs.VCRHTTPSConnection,
198 198 ),
199 199 ],
200 200 )
201 201 vcr.register_matcher('hgmatcher', hgmatcher)
202 202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
203 203 return fn(*args, **kwargs)
204 204 return fn(*args, **kwargs)
205 205
206 206 inner.__name__ = fn.__name__
207 207 inner.__doc__ = fn.__doc__
208 208 return command(
209 209 name,
210 210 fullflags,
211 211 spec,
212 212 helpcategory=helpcategory,
213 213 optionalrepo=optionalrepo,
214 214 )(inner)
215 215
216 216 return decorate
217 217
218 218
219 219 def urlencodenested(params):
220 220 """like urlencode, but works with nested parameters.
221 221
222 222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 225 """
226 226 flatparams = util.sortdict()
227 227
228 228 def process(prefix, obj):
229 229 if isinstance(obj, bool):
230 230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 233 if items is None:
234 234 flatparams[prefix] = obj
235 235 else:
236 236 for k, v in items(obj):
237 237 if prefix:
238 238 process(b'%s[%s]' % (prefix, k), v)
239 239 else:
240 240 process(k, v)
241 241
242 242 process(b'', params)
243 243 return util.urlreq.urlencode(flatparams)
244 244
245 245
246 246 def readurltoken(ui):
247 247 """return conduit url, token and make sure they exist
248 248
249 249 Currently read from [auth] config section. In the future, it might
250 250 make sense to read from .arcconfig and .arcrc as well.
251 251 """
252 252 url = ui.config(b'phabricator', b'url')
253 253 if not url:
254 254 raise error.Abort(
255 255 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 256 )
257 257
258 258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 259 token = None
260 260
261 261 if res:
262 262 group, auth = res
263 263
264 264 ui.debug(b"using auth.%s.* for authentication\n" % group)
265 265
266 266 token = auth.get(b'phabtoken')
267 267
268 268 if not token:
269 269 raise error.Abort(
270 270 _(b'Can\'t find conduit token associated to %s') % (url,)
271 271 )
272 272
273 273 return url, token
274 274
275 275
276 276 def callconduit(ui, name, params):
277 277 """call Conduit API, params is a dict. return json.loads result, or None"""
278 278 host, token = readurltoken(ui)
279 279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 281 params = params.copy()
282 282 params[b'__conduit__'] = {
283 283 b'token': token,
284 284 }
285 285 rawdata = {
286 286 b'params': templatefilters.json(params),
287 287 b'output': b'json',
288 288 b'__conduit__': 1,
289 289 }
290 290 data = urlencodenested(rawdata)
291 291 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 292 if curlcmd:
293 293 sin, sout = procutil.popen2(
294 294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 295 )
296 296 sin.write(data)
297 297 sin.close()
298 298 body = sout.read()
299 299 else:
300 300 urlopener = urlmod.opener(ui, authinfo)
301 301 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 302 with contextlib.closing(urlopener.open(request)) as rsp:
303 303 body = rsp.read()
304 304 ui.debug(b'Conduit Response: %s\n' % body)
305 305 parsed = pycompat.rapply(
306 306 lambda x: encoding.unitolocal(x)
307 307 if isinstance(x, pycompat.unicode)
308 308 else x,
309 309 # json.loads only accepts bytes from py3.6+
310 310 pycompat.json_loads(encoding.unifromlocal(body)),
311 311 )
312 312 if parsed.get(b'error_code'):
313 313 msg = _(b'Conduit Error (%s): %s') % (
314 314 parsed[b'error_code'],
315 315 parsed[b'error_info'],
316 316 )
317 317 raise error.Abort(msg)
318 318 return parsed[b'result']
319 319
320 320
321 321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 322 def debugcallconduit(ui, repo, name):
323 323 """call Conduit API
324 324
325 325 Call parameters are read from stdin as a JSON blob. Result will be written
326 326 to stdout as a JSON blob.
327 327 """
328 328 # json.loads only accepts bytes from 3.6+
329 329 rawparams = encoding.unifromlocal(ui.fin.read())
330 330 # json.loads only returns unicode strings
331 331 params = pycompat.rapply(
332 332 lambda x: encoding.unitolocal(x)
333 333 if isinstance(x, pycompat.unicode)
334 334 else x,
335 335 pycompat.json_loads(rawparams),
336 336 )
337 337 # json.dumps only accepts unicode strings
338 338 result = pycompat.rapply(
339 339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 340 callconduit(ui, name, params),
341 341 )
342 342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 343 ui.write(b'%s\n' % encoding.unitolocal(s))
344 344
345 345
346 346 def getrepophid(repo):
347 347 """given callsign, return repository PHID or None"""
348 348 # developer config: phabricator.repophid
349 349 repophid = repo.ui.config(b'phabricator', b'repophid')
350 350 if repophid:
351 351 return repophid
352 352 callsign = repo.ui.config(b'phabricator', b'callsign')
353 353 if not callsign:
354 354 return None
355 355 query = callconduit(
356 356 repo.ui,
357 357 b'diffusion.repository.search',
358 358 {b'constraints': {b'callsigns': [callsign]}},
359 359 )
360 360 if len(query[b'data']) == 0:
361 361 return None
362 362 repophid = query[b'data'][0][b'phid']
363 363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 364 return repophid
365 365
366 366
367 367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 368 _differentialrevisiondescre = re.compile(
369 369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 370 )
371 371
372 372
373 373 def getoldnodedrevmap(repo, nodelist):
374 374 """find previous nodes that has been sent to Phabricator
375 375
376 376 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 377 for node in nodelist with known previous sent versions, or associated
378 378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 379 be ``None``.
380 380
381 381 Examines commit messages like "Differential Revision:" to get the
382 382 association information.
383 383
384 384 If such commit message line is not found, examines all precursors and their
385 385 tags. Tags with format like "D1234" are considered a match and the node
386 386 with that tag, and the number after "D" (ex. 1234) will be returned.
387 387
388 388 The ``old node``, if not None, is guaranteed to be the last diff of
389 389 corresponding Differential Revision, and exist in the repo.
390 390 """
391 391 unfi = repo.unfiltered()
392 nodemap = unfi.changelog.nodemap
392 has_node = unfi.changelog.index.has_node
393 393
394 394 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 395 toconfirm = {} # {node: (force, {precnode}, drev)}
396 396 for node in nodelist:
397 397 ctx = unfi[node]
398 398 # For tags like "D123", put them into "toconfirm" to verify later
399 399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 400 for n in precnodes:
401 if n in nodemap:
401 if has_node(n):
402 402 for tag in unfi.nodetags(n):
403 403 m = _differentialrevisiontagre.match(tag)
404 404 if m:
405 405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 406 continue
407 407
408 408 # Check commit message
409 409 m = _differentialrevisiondescre.search(ctx.description())
410 410 if m:
411 411 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
412 412
413 413 # Double check if tags are genuine by collecting all old nodes from
414 414 # Phabricator, and expect precursors overlap with it.
415 415 if toconfirm:
416 416 drevs = [drev for force, precs, drev in toconfirm.values()]
417 417 alldiffs = callconduit(
418 418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
419 419 )
420 420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
421 421 for newnode, (force, precset, drev) in toconfirm.items():
422 422 diffs = [
423 423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
424 424 ]
425 425
426 426 # "precursors" as known by Phabricator
427 427 phprecset = set(getnode(d) for d in diffs)
428 428
429 429 # Ignore if precursors (Phabricator and local repo) do not overlap,
430 430 # and force is not set (when commit message says nothing)
431 431 if not force and not bool(phprecset & precset):
432 432 tagname = b'D%d' % drev
433 433 tags.tag(
434 434 repo,
435 435 tagname,
436 436 nullid,
437 437 message=None,
438 438 user=None,
439 439 date=None,
440 440 local=True,
441 441 )
442 442 unfi.ui.warn(
443 443 _(
444 444 b'D%d: local tag removed - does not match '
445 445 b'Differential history\n'
446 446 )
447 447 % drev
448 448 )
449 449 continue
450 450
451 451 # Find the last node using Phabricator metadata, and make sure it
452 452 # exists in the repo
453 453 oldnode = lastdiff = None
454 454 if diffs:
455 455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
456 456 oldnode = getnode(lastdiff)
457 if oldnode and oldnode not in nodemap:
457 if oldnode and not has_node(oldnode):
458 458 oldnode = None
459 459
460 460 result[newnode] = (oldnode, lastdiff, drev)
461 461
462 462 return result
463 463
464 464
465 465 def getdiff(ctx, diffopts):
466 466 """plain-text diff without header (user, commit message, etc)"""
467 467 output = util.stringio()
468 468 for chunk, _label in patch.diffui(
469 469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
470 470 ):
471 471 output.write(chunk)
472 472 return output.getvalue()
473 473
474 474
475 475 class DiffChangeType(object):
476 476 ADD = 1
477 477 CHANGE = 2
478 478 DELETE = 3
479 479 MOVE_AWAY = 4
480 480 COPY_AWAY = 5
481 481 MOVE_HERE = 6
482 482 COPY_HERE = 7
483 483 MULTICOPY = 8
484 484
485 485
486 486 class DiffFileType(object):
487 487 TEXT = 1
488 488 IMAGE = 2
489 489 BINARY = 3
490 490
491 491
492 492 @attr.s
493 493 class phabhunk(dict):
494 494 """Represents a Differential hunk, which is owned by a Differential change
495 495 """
496 496
497 497 oldOffset = attr.ib(default=0) # camelcase-required
498 498 oldLength = attr.ib(default=0) # camelcase-required
499 499 newOffset = attr.ib(default=0) # camelcase-required
500 500 newLength = attr.ib(default=0) # camelcase-required
501 501 corpus = attr.ib(default='')
502 502 # These get added to the phabchange's equivalents
503 503 addLines = attr.ib(default=0) # camelcase-required
504 504 delLines = attr.ib(default=0) # camelcase-required
505 505
506 506
507 507 @attr.s
508 508 class phabchange(object):
509 509 """Represents a Differential change, owns Differential hunks and owned by a
510 510 Differential diff. Each one represents one file in a diff.
511 511 """
512 512
513 513 currentPath = attr.ib(default=None) # camelcase-required
514 514 oldPath = attr.ib(default=None) # camelcase-required
515 515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
516 516 metadata = attr.ib(default=attr.Factory(dict))
517 517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
519 519 type = attr.ib(default=DiffChangeType.CHANGE)
520 520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
521 521 commitHash = attr.ib(default=None) # camelcase-required
522 522 addLines = attr.ib(default=0) # camelcase-required
523 523 delLines = attr.ib(default=0) # camelcase-required
524 524 hunks = attr.ib(default=attr.Factory(list))
525 525
526 526 def copynewmetadatatoold(self):
527 527 for key in list(self.metadata.keys()):
528 528 newkey = key.replace(b'new:', b'old:')
529 529 self.metadata[newkey] = self.metadata[key]
530 530
531 531 def addoldmode(self, value):
532 532 self.oldProperties[b'unix:filemode'] = value
533 533
534 534 def addnewmode(self, value):
535 535 self.newProperties[b'unix:filemode'] = value
536 536
537 537 def addhunk(self, hunk):
538 538 if not isinstance(hunk, phabhunk):
539 539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
540 540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
541 541 # It's useful to include these stats since the Phab web UI shows them,
542 542 # and uses them to estimate how large a change a Revision is. Also used
543 543 # in email subjects for the [+++--] bit.
544 544 self.addLines += hunk.addLines
545 545 self.delLines += hunk.delLines
546 546
547 547
548 548 @attr.s
549 549 class phabdiff(object):
550 550 """Represents a Differential diff, owns Differential changes. Corresponds
551 551 to a commit.
552 552 """
553 553
554 554 # Doesn't seem to be any reason to send this (output of uname -n)
555 555 sourceMachine = attr.ib(default=b'') # camelcase-required
556 556 sourcePath = attr.ib(default=b'/') # camelcase-required
557 557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
558 558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
559 559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
560 560 branch = attr.ib(default=b'default')
561 561 bookmark = attr.ib(default=None)
562 562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
563 563 lintStatus = attr.ib(default=b'none') # camelcase-required
564 564 unitStatus = attr.ib(default=b'none') # camelcase-required
565 565 changes = attr.ib(default=attr.Factory(dict))
566 566 repositoryPHID = attr.ib(default=None) # camelcase-required
567 567
568 568 def addchange(self, change):
569 569 if not isinstance(change, phabchange):
570 570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
571 571 self.changes[change.currentPath] = pycompat.byteskwargs(
572 572 attr.asdict(change)
573 573 )
574 574
575 575
576 576 def maketext(pchange, ctx, fname):
577 577 """populate the phabchange for a text file"""
578 578 repo = ctx.repo()
579 579 fmatcher = match.exact([fname])
580 580 diffopts = mdiff.diffopts(git=True, context=32767)
581 581 _pfctx, _fctx, header, fhunks = next(
582 582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
583 583 )
584 584
585 585 for fhunk in fhunks:
586 586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
587 587 corpus = b''.join(lines[1:])
588 588 shunk = list(header)
589 589 shunk.extend(lines)
590 590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
591 591 patch.diffstatdata(util.iterlines(shunk))
592 592 )
593 593 pchange.addhunk(
594 594 phabhunk(
595 595 oldOffset,
596 596 oldLength,
597 597 newOffset,
598 598 newLength,
599 599 corpus,
600 600 addLines,
601 601 delLines,
602 602 )
603 603 )
604 604
605 605
606 606 def uploadchunks(fctx, fphid):
607 607 """upload large binary files as separate chunks.
608 608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
609 609 """
610 610 ui = fctx.repo().ui
611 611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
612 612 with ui.makeprogress(
613 613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
614 614 ) as progress:
615 615 for chunk in chunks:
616 616 progress.increment()
617 617 if chunk[b'complete']:
618 618 continue
619 619 bstart = int(chunk[b'byteStart'])
620 620 bend = int(chunk[b'byteEnd'])
621 621 callconduit(
622 622 ui,
623 623 b'file.uploadchunk',
624 624 {
625 625 b'filePHID': fphid,
626 626 b'byteStart': bstart,
627 627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
628 628 b'dataEncoding': b'base64',
629 629 },
630 630 )
631 631
632 632
633 633 def uploadfile(fctx):
634 634 """upload binary files to Phabricator"""
635 635 repo = fctx.repo()
636 636 ui = repo.ui
637 637 fname = fctx.path()
638 638 size = fctx.size()
639 639 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
640 640
641 641 # an allocate call is required first to see if an upload is even required
642 642 # (Phab might already have it) and to determine if chunking is needed
643 643 allocateparams = {
644 644 b'name': fname,
645 645 b'contentLength': size,
646 646 b'contentHash': fhash,
647 647 }
648 648 filealloc = callconduit(ui, b'file.allocate', allocateparams)
649 649 fphid = filealloc[b'filePHID']
650 650
651 651 if filealloc[b'upload']:
652 652 ui.write(_(b'uploading %s\n') % bytes(fctx))
653 653 if not fphid:
654 654 uploadparams = {
655 655 b'name': fname,
656 656 b'data_base64': base64.b64encode(fctx.data()),
657 657 }
658 658 fphid = callconduit(ui, b'file.upload', uploadparams)
659 659 else:
660 660 uploadchunks(fctx, fphid)
661 661 else:
662 662 ui.debug(b'server already has %s\n' % bytes(fctx))
663 663
664 664 if not fphid:
665 665 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
666 666
667 667 return fphid
668 668
669 669
670 670 def addoldbinary(pchange, fctx, originalfname):
671 671 """add the metadata for the previous version of a binary file to the
672 672 phabchange for the new version
673 673 """
674 674 oldfctx = fctx.p1()[originalfname]
675 675 if fctx.cmp(oldfctx):
676 676 # Files differ, add the old one
677 677 pchange.metadata[b'old:file:size'] = oldfctx.size()
678 678 mimeguess, _enc = mimetypes.guess_type(
679 679 encoding.unifromlocal(oldfctx.path())
680 680 )
681 681 if mimeguess:
682 682 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
683 683 mimeguess
684 684 )
685 685 fphid = uploadfile(oldfctx)
686 686 pchange.metadata[b'old:binary-phid'] = fphid
687 687 else:
688 688 # If it's left as IMAGE/BINARY web UI might try to display it
689 689 pchange.fileType = DiffFileType.TEXT
690 690 pchange.copynewmetadatatoold()
691 691
692 692
693 693 def makebinary(pchange, fctx):
694 694 """populate the phabchange for a binary file"""
695 695 pchange.fileType = DiffFileType.BINARY
696 696 fphid = uploadfile(fctx)
697 697 pchange.metadata[b'new:binary-phid'] = fphid
698 698 pchange.metadata[b'new:file:size'] = fctx.size()
699 699 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
700 700 if mimeguess:
701 701 mimeguess = pycompat.bytestr(mimeguess)
702 702 pchange.metadata[b'new:file:mime-type'] = mimeguess
703 703 if mimeguess.startswith(b'image/'):
704 704 pchange.fileType = DiffFileType.IMAGE
705 705
706 706
707 707 # Copied from mercurial/patch.py
708 708 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
709 709
710 710
711 711 def notutf8(fctx):
712 712 """detect non-UTF-8 text files since Phabricator requires them to be marked
713 713 as binary
714 714 """
715 715 try:
716 716 fctx.data().decode('utf-8')
717 717 if fctx.parents():
718 718 fctx.p1().data().decode('utf-8')
719 719 return False
720 720 except UnicodeDecodeError:
721 721 fctx.repo().ui.write(
722 722 _(b'file %s detected as non-UTF-8, marked as binary\n')
723 723 % fctx.path()
724 724 )
725 725 return True
726 726
727 727
728 728 def addremoved(pdiff, ctx, removed):
729 729 """add removed files to the phabdiff. Shouldn't include moves"""
730 730 for fname in removed:
731 731 pchange = phabchange(
732 732 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
733 733 )
734 734 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
735 735 fctx = ctx.p1()[fname]
736 736 if not (fctx.isbinary() or notutf8(fctx)):
737 737 maketext(pchange, ctx, fname)
738 738
739 739 pdiff.addchange(pchange)
740 740
741 741
742 742 def addmodified(pdiff, ctx, modified):
743 743 """add modified files to the phabdiff"""
744 744 for fname in modified:
745 745 fctx = ctx[fname]
746 746 pchange = phabchange(currentPath=fname, oldPath=fname)
747 747 filemode = gitmode[ctx[fname].flags()]
748 748 originalmode = gitmode[ctx.p1()[fname].flags()]
749 749 if filemode != originalmode:
750 750 pchange.addoldmode(originalmode)
751 751 pchange.addnewmode(filemode)
752 752
753 753 if fctx.isbinary() or notutf8(fctx):
754 754 makebinary(pchange, fctx)
755 755 addoldbinary(pchange, fctx, fname)
756 756 else:
757 757 maketext(pchange, ctx, fname)
758 758
759 759 pdiff.addchange(pchange)
760 760
761 761
762 762 def addadded(pdiff, ctx, added, removed):
763 763 """add file adds to the phabdiff, both new files and copies/moves"""
764 764 # Keep track of files that've been recorded as moved/copied, so if there are
765 765 # additional copies we can mark them (moves get removed from removed)
766 766 copiedchanges = {}
767 767 movedchanges = {}
768 768 for fname in added:
769 769 fctx = ctx[fname]
770 770 pchange = phabchange(currentPath=fname)
771 771
772 772 filemode = gitmode[ctx[fname].flags()]
773 773 renamed = fctx.renamed()
774 774
775 775 if renamed:
776 776 originalfname = renamed[0]
777 777 originalmode = gitmode[ctx.p1()[originalfname].flags()]
778 778 pchange.oldPath = originalfname
779 779
780 780 if originalfname in removed:
781 781 origpchange = phabchange(
782 782 currentPath=originalfname,
783 783 oldPath=originalfname,
784 784 type=DiffChangeType.MOVE_AWAY,
785 785 awayPaths=[fname],
786 786 )
787 787 movedchanges[originalfname] = origpchange
788 788 removed.remove(originalfname)
789 789 pchange.type = DiffChangeType.MOVE_HERE
790 790 elif originalfname in movedchanges:
791 791 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
792 792 movedchanges[originalfname].awayPaths.append(fname)
793 793 pchange.type = DiffChangeType.COPY_HERE
794 794 else: # pure copy
795 795 if originalfname not in copiedchanges:
796 796 origpchange = phabchange(
797 797 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
798 798 )
799 799 copiedchanges[originalfname] = origpchange
800 800 else:
801 801 origpchange = copiedchanges[originalfname]
802 802 origpchange.awayPaths.append(fname)
803 803 pchange.type = DiffChangeType.COPY_HERE
804 804
805 805 if filemode != originalmode:
806 806 pchange.addoldmode(originalmode)
807 807 pchange.addnewmode(filemode)
808 808 else: # Brand-new file
809 809 pchange.addnewmode(gitmode[fctx.flags()])
810 810 pchange.type = DiffChangeType.ADD
811 811
812 812 if fctx.isbinary() or notutf8(fctx):
813 813 makebinary(pchange, fctx)
814 814 if renamed:
815 815 addoldbinary(pchange, fctx, originalfname)
816 816 else:
817 817 maketext(pchange, ctx, fname)
818 818
819 819 pdiff.addchange(pchange)
820 820
821 821 for _path, copiedchange in copiedchanges.items():
822 822 pdiff.addchange(copiedchange)
823 823 for _path, movedchange in movedchanges.items():
824 824 pdiff.addchange(movedchange)
825 825
826 826
827 827 def creatediff(ctx):
828 828 """create a Differential Diff"""
829 829 repo = ctx.repo()
830 830 repophid = getrepophid(repo)
831 831 # Create a "Differential Diff" via "differential.creatediff" API
832 832 pdiff = phabdiff(
833 833 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
834 834 branch=b'%s' % ctx.branch(),
835 835 )
836 836 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
837 837 # addadded will remove moved files from removed, so addremoved won't get
838 838 # them
839 839 addadded(pdiff, ctx, added, removed)
840 840 addmodified(pdiff, ctx, modified)
841 841 addremoved(pdiff, ctx, removed)
842 842 if repophid:
843 843 pdiff.repositoryPHID = repophid
844 844 diff = callconduit(
845 845 repo.ui,
846 846 b'differential.creatediff',
847 847 pycompat.byteskwargs(attr.asdict(pdiff)),
848 848 )
849 849 if not diff:
850 850 raise error.Abort(_(b'cannot create diff for %s') % ctx)
851 851 return diff
852 852
853 853
854 854 def writediffproperties(ctx, diff):
855 855 """write metadata to diff so patches could be applied losslessly"""
856 856 # creatediff returns with a diffid but query returns with an id
857 857 diffid = diff.get(b'diffid', diff.get(b'id'))
858 858 params = {
859 859 b'diff_id': diffid,
860 860 b'name': b'hg:meta',
861 861 b'data': templatefilters.json(
862 862 {
863 863 b'user': ctx.user(),
864 864 b'date': b'%d %d' % ctx.date(),
865 865 b'branch': ctx.branch(),
866 866 b'node': ctx.hex(),
867 867 b'parent': ctx.p1().hex(),
868 868 }
869 869 ),
870 870 }
871 871 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
872 872
873 873 params = {
874 874 b'diff_id': diffid,
875 875 b'name': b'local:commits',
876 876 b'data': templatefilters.json(
877 877 {
878 878 ctx.hex(): {
879 879 b'author': stringutil.person(ctx.user()),
880 880 b'authorEmail': stringutil.email(ctx.user()),
881 881 b'time': int(ctx.date()[0]),
882 882 b'commit': ctx.hex(),
883 883 b'parents': [ctx.p1().hex()],
884 884 b'branch': ctx.branch(),
885 885 },
886 886 }
887 887 ),
888 888 }
889 889 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
890 890
891 891
892 892 def createdifferentialrevision(
893 893 ctx,
894 894 revid=None,
895 895 parentrevphid=None,
896 896 oldnode=None,
897 897 olddiff=None,
898 898 actions=None,
899 899 comment=None,
900 900 ):
901 901 """create or update a Differential Revision
902 902
903 903 If revid is None, create a new Differential Revision, otherwise update
904 904 revid. If parentrevphid is not None, set it as a dependency.
905 905
906 906 If oldnode is not None, check if the patch content (without commit message
907 907 and metadata) has changed before creating another diff.
908 908
909 909 If actions is not None, they will be appended to the transaction.
910 910 """
911 911 repo = ctx.repo()
912 912 if oldnode:
913 913 diffopts = mdiff.diffopts(git=True, context=32767)
914 914 oldctx = repo.unfiltered()[oldnode]
915 915 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
916 916 else:
917 917 neednewdiff = True
918 918
919 919 transactions = []
920 920 if neednewdiff:
921 921 diff = creatediff(ctx)
922 922 transactions.append({b'type': b'update', b'value': diff[b'phid']})
923 923 if comment:
924 924 transactions.append({b'type': b'comment', b'value': comment})
925 925 else:
926 926 # Even if we don't need to upload a new diff because the patch content
927 927 # does not change. We might still need to update its metadata so
928 928 # pushers could know the correct node metadata.
929 929 assert olddiff
930 930 diff = olddiff
931 931 writediffproperties(ctx, diff)
932 932
933 933 # Set the parent Revision every time, so commit re-ordering is picked-up
934 934 if parentrevphid:
935 935 transactions.append(
936 936 {b'type': b'parents.set', b'value': [parentrevphid]}
937 937 )
938 938
939 939 if actions:
940 940 transactions += actions
941 941
942 942 # Parse commit message and update related fields.
943 943 desc = ctx.description()
944 944 info = callconduit(
945 945 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
946 946 )
947 947 for k, v in info[b'fields'].items():
948 948 if k in [b'title', b'summary', b'testPlan']:
949 949 transactions.append({b'type': k, b'value': v})
950 950
951 951 params = {b'transactions': transactions}
952 952 if revid is not None:
953 953 # Update an existing Differential Revision
954 954 params[b'objectIdentifier'] = revid
955 955
956 956 revision = callconduit(repo.ui, b'differential.revision.edit', params)
957 957 if not revision:
958 958 raise error.Abort(_(b'cannot create revision for %s') % ctx)
959 959
960 960 return revision, diff
961 961
962 962
963 963 def userphids(repo, names):
964 964 """convert user names to PHIDs"""
965 965 names = [name.lower() for name in names]
966 966 query = {b'constraints': {b'usernames': names}}
967 967 result = callconduit(repo.ui, b'user.search', query)
968 968 # username not found is not an error of the API. So check if we have missed
969 969 # some names here.
970 970 data = result[b'data']
971 971 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
972 972 unresolved = set(names) - resolved
973 973 if unresolved:
974 974 raise error.Abort(
975 975 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
976 976 )
977 977 return [entry[b'phid'] for entry in data]
978 978
979 979
980 980 @vcrcommand(
981 981 b'phabsend',
982 982 [
983 983 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
984 984 (b'', b'amend', True, _(b'update commit messages')),
985 985 (b'', b'reviewer', [], _(b'specify reviewers')),
986 986 (b'', b'blocker', [], _(b'specify blocking reviewers')),
987 987 (
988 988 b'm',
989 989 b'comment',
990 990 b'',
991 991 _(b'add a comment to Revisions with new/updated Diffs'),
992 992 ),
993 993 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
994 994 ],
995 995 _(b'REV [OPTIONS]'),
996 996 helpcategory=command.CATEGORY_IMPORT_EXPORT,
997 997 )
998 998 def phabsend(ui, repo, *revs, **opts):
999 999 """upload changesets to Phabricator
1000 1000
1001 1001 If there are multiple revisions specified, they will be send as a stack
1002 1002 with a linear dependencies relationship using the order specified by the
1003 1003 revset.
1004 1004
1005 1005 For the first time uploading changesets, local tags will be created to
1006 1006 maintain the association. After the first time, phabsend will check
1007 1007 obsstore and tags information so it can figure out whether to update an
1008 1008 existing Differential Revision, or create a new one.
1009 1009
1010 1010 If --amend is set, update commit messages so they have the
1011 1011 ``Differential Revision`` URL, remove related tags. This is similar to what
1012 1012 arcanist will do, and is more desired in author-push workflows. Otherwise,
1013 1013 use local tags to record the ``Differential Revision`` association.
1014 1014
1015 1015 The --confirm option lets you confirm changesets before sending them. You
1016 1016 can also add following to your configuration file to make it default
1017 1017 behaviour::
1018 1018
1019 1019 [phabsend]
1020 1020 confirm = true
1021 1021
1022 1022 phabsend will check obsstore and the above association to decide whether to
1023 1023 update an existing Differential Revision, or create a new one.
1024 1024 """
1025 1025 opts = pycompat.byteskwargs(opts)
1026 1026 revs = list(revs) + opts.get(b'rev', [])
1027 1027 revs = scmutil.revrange(repo, revs)
1028 1028
1029 1029 if not revs:
1030 1030 raise error.Abort(_(b'phabsend requires at least one changeset'))
1031 1031 if opts.get(b'amend'):
1032 1032 cmdutil.checkunfinished(repo)
1033 1033
1034 1034 # {newnode: (oldnode, olddiff, olddrev}
1035 1035 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1036 1036
1037 1037 confirm = ui.configbool(b'phabsend', b'confirm')
1038 1038 confirm |= bool(opts.get(b'confirm'))
1039 1039 if confirm:
1040 1040 confirmed = _confirmbeforesend(repo, revs, oldmap)
1041 1041 if not confirmed:
1042 1042 raise error.Abort(_(b'phabsend cancelled'))
1043 1043
1044 1044 actions = []
1045 1045 reviewers = opts.get(b'reviewer', [])
1046 1046 blockers = opts.get(b'blocker', [])
1047 1047 phids = []
1048 1048 if reviewers:
1049 1049 phids.extend(userphids(repo, reviewers))
1050 1050 if blockers:
1051 1051 phids.extend(
1052 1052 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1053 1053 )
1054 1054 if phids:
1055 1055 actions.append({b'type': b'reviewers.add', b'value': phids})
1056 1056
1057 1057 drevids = [] # [int]
1058 1058 diffmap = {} # {newnode: diff}
1059 1059
1060 1060 # Send patches one by one so we know their Differential Revision PHIDs and
1061 1061 # can provide dependency relationship
1062 1062 lastrevphid = None
1063 1063 for rev in revs:
1064 1064 ui.debug(b'sending rev %d\n' % rev)
1065 1065 ctx = repo[rev]
1066 1066
1067 1067 # Get Differential Revision ID
1068 1068 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1069 1069 if oldnode != ctx.node() or opts.get(b'amend'):
1070 1070 # Create or update Differential Revision
1071 1071 revision, diff = createdifferentialrevision(
1072 1072 ctx,
1073 1073 revid,
1074 1074 lastrevphid,
1075 1075 oldnode,
1076 1076 olddiff,
1077 1077 actions,
1078 1078 opts.get(b'comment'),
1079 1079 )
1080 1080 diffmap[ctx.node()] = diff
1081 1081 newrevid = int(revision[b'object'][b'id'])
1082 1082 newrevphid = revision[b'object'][b'phid']
1083 1083 if revid:
1084 1084 action = b'updated'
1085 1085 else:
1086 1086 action = b'created'
1087 1087
1088 1088 # Create a local tag to note the association, if commit message
1089 1089 # does not have it already
1090 1090 m = _differentialrevisiondescre.search(ctx.description())
1091 1091 if not m or int(m.group('id')) != newrevid:
1092 1092 tagname = b'D%d' % newrevid
1093 1093 tags.tag(
1094 1094 repo,
1095 1095 tagname,
1096 1096 ctx.node(),
1097 1097 message=None,
1098 1098 user=None,
1099 1099 date=None,
1100 1100 local=True,
1101 1101 )
1102 1102 else:
1103 1103 # Nothing changed. But still set "newrevphid" so the next revision
1104 1104 # could depend on this one and "newrevid" for the summary line.
1105 1105 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1106 1106 newrevid = revid
1107 1107 action = b'skipped'
1108 1108
1109 1109 actiondesc = ui.label(
1110 1110 {
1111 1111 b'created': _(b'created'),
1112 1112 b'skipped': _(b'skipped'),
1113 1113 b'updated': _(b'updated'),
1114 1114 }[action],
1115 1115 b'phabricator.action.%s' % action,
1116 1116 )
1117 1117 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1118 1118 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1119 1119 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1120 1120 ui.write(
1121 1121 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1122 1122 )
1123 1123 drevids.append(newrevid)
1124 1124 lastrevphid = newrevphid
1125 1125
1126 1126 # Update commit messages and remove tags
1127 1127 if opts.get(b'amend'):
1128 1128 unfi = repo.unfiltered()
1129 1129 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1130 1130 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1131 1131 wnode = unfi[b'.'].node()
1132 1132 mapping = {} # {oldnode: [newnode]}
1133 1133 for i, rev in enumerate(revs):
1134 1134 old = unfi[rev]
1135 1135 drevid = drevids[i]
1136 1136 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1137 1137 newdesc = getdescfromdrev(drev)
1138 1138 # Make sure commit message contain "Differential Revision"
1139 1139 if old.description() != newdesc:
1140 1140 if old.phase() == phases.public:
1141 1141 ui.warn(
1142 1142 _(b"warning: not updating public commit %s\n")
1143 1143 % scmutil.formatchangeid(old)
1144 1144 )
1145 1145 continue
1146 1146 parents = [
1147 1147 mapping.get(old.p1().node(), (old.p1(),))[0],
1148 1148 mapping.get(old.p2().node(), (old.p2(),))[0],
1149 1149 ]
1150 1150 new = context.metadataonlyctx(
1151 1151 repo,
1152 1152 old,
1153 1153 parents=parents,
1154 1154 text=newdesc,
1155 1155 user=old.user(),
1156 1156 date=old.date(),
1157 1157 extra=old.extra(),
1158 1158 )
1159 1159
1160 1160 newnode = new.commit()
1161 1161
1162 1162 mapping[old.node()] = [newnode]
1163 1163 # Update diff property
1164 1164 # If it fails just warn and keep going, otherwise the DREV
1165 1165 # associations will be lost
1166 1166 try:
1167 1167 writediffproperties(unfi[newnode], diffmap[old.node()])
1168 1168 except util.urlerr.urlerror:
1169 1169 ui.warnnoi18n(
1170 1170 b'Failed to update metadata for D%d\n' % drevid
1171 1171 )
1172 1172 # Remove local tags since it's no longer necessary
1173 1173 tagname = b'D%d' % drevid
1174 1174 if tagname in repo.tags():
1175 1175 tags.tag(
1176 1176 repo,
1177 1177 tagname,
1178 1178 nullid,
1179 1179 message=None,
1180 1180 user=None,
1181 1181 date=None,
1182 1182 local=True,
1183 1183 )
1184 1184 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1185 1185 if wnode in mapping:
1186 1186 unfi.setparents(mapping[wnode][0])
1187 1187
1188 1188
1189 1189 # Map from "hg:meta" keys to header understood by "hg import". The order is
1190 1190 # consistent with "hg export" output.
1191 1191 _metanamemap = util.sortdict(
1192 1192 [
1193 1193 (b'user', b'User'),
1194 1194 (b'date', b'Date'),
1195 1195 (b'branch', b'Branch'),
1196 1196 (b'node', b'Node ID'),
1197 1197 (b'parent', b'Parent '),
1198 1198 ]
1199 1199 )
1200 1200
1201 1201
1202 1202 def _confirmbeforesend(repo, revs, oldmap):
1203 1203 url, token = readurltoken(repo.ui)
1204 1204 ui = repo.ui
1205 1205 for rev in revs:
1206 1206 ctx = repo[rev]
1207 1207 desc = ctx.description().splitlines()[0]
1208 1208 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1209 1209 if drevid:
1210 1210 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1211 1211 else:
1212 1212 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1213 1213
1214 1214 ui.write(
1215 1215 _(b'%s - %s: %s\n')
1216 1216 % (
1217 1217 drevdesc,
1218 1218 ui.label(bytes(ctx), b'phabricator.node'),
1219 1219 ui.label(desc, b'phabricator.desc'),
1220 1220 )
1221 1221 )
1222 1222
1223 1223 if ui.promptchoice(
1224 1224 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1225 1225 ):
1226 1226 return False
1227 1227
1228 1228 return True
1229 1229
1230 1230
1231 1231 _knownstatusnames = {
1232 1232 b'accepted',
1233 1233 b'needsreview',
1234 1234 b'needsrevision',
1235 1235 b'closed',
1236 1236 b'abandoned',
1237 1237 }
1238 1238
1239 1239
1240 1240 def _getstatusname(drev):
1241 1241 """get normalized status name from a Differential Revision"""
1242 1242 return drev[b'statusName'].replace(b' ', b'').lower()
1243 1243
1244 1244
1245 1245 # Small language to specify differential revisions. Support symbols: (), :X,
1246 1246 # +, and -.
1247 1247
1248 1248 _elements = {
1249 1249 # token-type: binding-strength, primary, prefix, infix, suffix
1250 1250 b'(': (12, None, (b'group', 1, b')'), None, None),
1251 1251 b':': (8, None, (b'ancestors', 8), None, None),
1252 1252 b'&': (5, None, None, (b'and_', 5), None),
1253 1253 b'+': (4, None, None, (b'add', 4), None),
1254 1254 b'-': (4, None, None, (b'sub', 4), None),
1255 1255 b')': (0, None, None, None, None),
1256 1256 b'symbol': (0, b'symbol', None, None, None),
1257 1257 b'end': (0, None, None, None, None),
1258 1258 }
1259 1259
1260 1260
1261 1261 def _tokenize(text):
1262 1262 view = memoryview(text) # zero-copy slice
1263 1263 special = b'():+-& '
1264 1264 pos = 0
1265 1265 length = len(text)
1266 1266 while pos < length:
1267 1267 symbol = b''.join(
1268 1268 itertools.takewhile(
1269 1269 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1270 1270 )
1271 1271 )
1272 1272 if symbol:
1273 1273 yield (b'symbol', symbol, pos)
1274 1274 pos += len(symbol)
1275 1275 else: # special char, ignore space
1276 1276 if text[pos : pos + 1] != b' ':
1277 1277 yield (text[pos : pos + 1], None, pos)
1278 1278 pos += 1
1279 1279 yield (b'end', None, pos)
1280 1280
1281 1281
1282 1282 def _parse(text):
1283 1283 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1284 1284 if pos != len(text):
1285 1285 raise error.ParseError(b'invalid token', pos)
1286 1286 return tree
1287 1287
1288 1288
1289 1289 def _parsedrev(symbol):
1290 1290 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1291 1291 if symbol.startswith(b'D') and symbol[1:].isdigit():
1292 1292 return int(symbol[1:])
1293 1293 if symbol.isdigit():
1294 1294 return int(symbol)
1295 1295
1296 1296
1297 1297 def _prefetchdrevs(tree):
1298 1298 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1299 1299 drevs = set()
1300 1300 ancestordrevs = set()
1301 1301 op = tree[0]
1302 1302 if op == b'symbol':
1303 1303 r = _parsedrev(tree[1])
1304 1304 if r:
1305 1305 drevs.add(r)
1306 1306 elif op == b'ancestors':
1307 1307 r, a = _prefetchdrevs(tree[1])
1308 1308 drevs.update(r)
1309 1309 ancestordrevs.update(r)
1310 1310 ancestordrevs.update(a)
1311 1311 else:
1312 1312 for t in tree[1:]:
1313 1313 r, a = _prefetchdrevs(t)
1314 1314 drevs.update(r)
1315 1315 ancestordrevs.update(a)
1316 1316 return drevs, ancestordrevs
1317 1317
1318 1318
1319 1319 def querydrev(repo, spec):
1320 1320 """return a list of "Differential Revision" dicts
1321 1321
1322 1322 spec is a string using a simple query language, see docstring in phabread
1323 1323 for details.
1324 1324
1325 1325 A "Differential Revision dict" looks like:
1326 1326
1327 1327 {
1328 1328 "id": "2",
1329 1329 "phid": "PHID-DREV-672qvysjcczopag46qty",
1330 1330 "title": "example",
1331 1331 "uri": "https://phab.example.com/D2",
1332 1332 "dateCreated": "1499181406",
1333 1333 "dateModified": "1499182103",
1334 1334 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1335 1335 "status": "0",
1336 1336 "statusName": "Needs Review",
1337 1337 "properties": [],
1338 1338 "branch": null,
1339 1339 "summary": "",
1340 1340 "testPlan": "",
1341 1341 "lineCount": "2",
1342 1342 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1343 1343 "diffs": [
1344 1344 "3",
1345 1345 "4",
1346 1346 ],
1347 1347 "commits": [],
1348 1348 "reviewers": [],
1349 1349 "ccs": [],
1350 1350 "hashes": [],
1351 1351 "auxiliary": {
1352 1352 "phabricator:projects": [],
1353 1353 "phabricator:depends-on": [
1354 1354 "PHID-DREV-gbapp366kutjebt7agcd"
1355 1355 ]
1356 1356 },
1357 1357 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1358 1358 "sourcePath": null
1359 1359 }
1360 1360 """
1361 1361
1362 1362 def fetch(params):
1363 1363 """params -> single drev or None"""
1364 1364 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1365 1365 if key in prefetched:
1366 1366 return prefetched[key]
1367 1367 drevs = callconduit(repo.ui, b'differential.query', params)
1368 1368 # Fill prefetched with the result
1369 1369 for drev in drevs:
1370 1370 prefetched[drev[b'phid']] = drev
1371 1371 prefetched[int(drev[b'id'])] = drev
1372 1372 if key not in prefetched:
1373 1373 raise error.Abort(
1374 1374 _(b'cannot get Differential Revision %r') % params
1375 1375 )
1376 1376 return prefetched[key]
1377 1377
1378 1378 def getstack(topdrevids):
1379 1379 """given a top, get a stack from the bottom, [id] -> [id]"""
1380 1380 visited = set()
1381 1381 result = []
1382 1382 queue = [{b'ids': [i]} for i in topdrevids]
1383 1383 while queue:
1384 1384 params = queue.pop()
1385 1385 drev = fetch(params)
1386 1386 if drev[b'id'] in visited:
1387 1387 continue
1388 1388 visited.add(drev[b'id'])
1389 1389 result.append(int(drev[b'id']))
1390 1390 auxiliary = drev.get(b'auxiliary', {})
1391 1391 depends = auxiliary.get(b'phabricator:depends-on', [])
1392 1392 for phid in depends:
1393 1393 queue.append({b'phids': [phid]})
1394 1394 result.reverse()
1395 1395 return smartset.baseset(result)
1396 1396
1397 1397 # Initialize prefetch cache
1398 1398 prefetched = {} # {id or phid: drev}
1399 1399
1400 1400 tree = _parse(spec)
1401 1401 drevs, ancestordrevs = _prefetchdrevs(tree)
1402 1402
1403 1403 # developer config: phabricator.batchsize
1404 1404 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1405 1405
1406 1406 # Prefetch Differential Revisions in batch
1407 1407 tofetch = set(drevs)
1408 1408 for r in ancestordrevs:
1409 1409 tofetch.update(range(max(1, r - batchsize), r + 1))
1410 1410 if drevs:
1411 1411 fetch({b'ids': list(tofetch)})
1412 1412 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1413 1413
1414 1414 # Walk through the tree, return smartsets
1415 1415 def walk(tree):
1416 1416 op = tree[0]
1417 1417 if op == b'symbol':
1418 1418 drev = _parsedrev(tree[1])
1419 1419 if drev:
1420 1420 return smartset.baseset([drev])
1421 1421 elif tree[1] in _knownstatusnames:
1422 1422 drevs = [
1423 1423 r
1424 1424 for r in validids
1425 1425 if _getstatusname(prefetched[r]) == tree[1]
1426 1426 ]
1427 1427 return smartset.baseset(drevs)
1428 1428 else:
1429 1429 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1430 1430 elif op in {b'and_', b'add', b'sub'}:
1431 1431 assert len(tree) == 3
1432 1432 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1433 1433 elif op == b'group':
1434 1434 return walk(tree[1])
1435 1435 elif op == b'ancestors':
1436 1436 return getstack(walk(tree[1]))
1437 1437 else:
1438 1438 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1439 1439
1440 1440 return [prefetched[r] for r in walk(tree)]
1441 1441
1442 1442
1443 1443 def getdescfromdrev(drev):
1444 1444 """get description (commit message) from "Differential Revision"
1445 1445
1446 1446 This is similar to differential.getcommitmessage API. But we only care
1447 1447 about limited fields: title, summary, test plan, and URL.
1448 1448 """
1449 1449 title = drev[b'title']
1450 1450 summary = drev[b'summary'].rstrip()
1451 1451 testplan = drev[b'testPlan'].rstrip()
1452 1452 if testplan:
1453 1453 testplan = b'Test Plan:\n%s' % testplan
1454 1454 uri = b'Differential Revision: %s' % drev[b'uri']
1455 1455 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1456 1456
1457 1457
1458 1458 def getdiffmeta(diff):
1459 1459 """get commit metadata (date, node, user, p1) from a diff object
1460 1460
1461 1461 The metadata could be "hg:meta", sent by phabsend, like:
1462 1462
1463 1463 "properties": {
1464 1464 "hg:meta": {
1465 1465 "date": "1499571514 25200",
1466 1466 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1467 1467 "user": "Foo Bar <foo@example.com>",
1468 1468 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1469 1469 }
1470 1470 }
1471 1471
1472 1472 Or converted from "local:commits", sent by "arc", like:
1473 1473
1474 1474 "properties": {
1475 1475 "local:commits": {
1476 1476 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1477 1477 "author": "Foo Bar",
1478 1478 "time": 1499546314,
1479 1479 "branch": "default",
1480 1480 "tag": "",
1481 1481 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 1482 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 1483 "local": "1000",
1484 1484 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1485 1485 "summary": "...",
1486 1486 "message": "...",
1487 1487 "authorEmail": "foo@example.com"
1488 1488 }
1489 1489 }
1490 1490 }
1491 1491
1492 1492 Note: metadata extracted from "local:commits" will lose time zone
1493 1493 information.
1494 1494 """
1495 1495 props = diff.get(b'properties') or {}
1496 1496 meta = props.get(b'hg:meta')
1497 1497 if not meta:
1498 1498 if props.get(b'local:commits'):
1499 1499 commit = sorted(props[b'local:commits'].values())[0]
1500 1500 meta = {}
1501 1501 if b'author' in commit and b'authorEmail' in commit:
1502 1502 meta[b'user'] = b'%s <%s>' % (
1503 1503 commit[b'author'],
1504 1504 commit[b'authorEmail'],
1505 1505 )
1506 1506 if b'time' in commit:
1507 1507 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1508 1508 if b'branch' in commit:
1509 1509 meta[b'branch'] = commit[b'branch']
1510 1510 node = commit.get(b'commit', commit.get(b'rev'))
1511 1511 if node:
1512 1512 meta[b'node'] = node
1513 1513 if len(commit.get(b'parents', ())) >= 1:
1514 1514 meta[b'parent'] = commit[b'parents'][0]
1515 1515 else:
1516 1516 meta = {}
1517 1517 if b'date' not in meta and b'dateCreated' in diff:
1518 1518 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1519 1519 if b'branch' not in meta and diff.get(b'branch'):
1520 1520 meta[b'branch'] = diff[b'branch']
1521 1521 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1522 1522 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1523 1523 return meta
1524 1524
1525 1525
1526 1526 def readpatch(repo, drevs, write):
1527 1527 """generate plain-text patch readable by 'hg import'
1528 1528
1529 1529 write is usually ui.write. drevs is what "querydrev" returns, results of
1530 1530 "differential.query".
1531 1531 """
1532 1532 # Prefetch hg:meta property for all diffs
1533 1533 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1534 1534 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1535 1535
1536 1536 # Generate patch for each drev
1537 1537 for drev in drevs:
1538 1538 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1539 1539
1540 1540 diffid = max(int(v) for v in drev[b'diffs'])
1541 1541 body = callconduit(
1542 1542 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1543 1543 )
1544 1544 desc = getdescfromdrev(drev)
1545 1545 header = b'# HG changeset patch\n'
1546 1546
1547 1547 # Try to preserve metadata from hg:meta property. Write hg patch
1548 1548 # headers that can be read by the "import" command. See patchheadermap
1549 1549 # and extract in mercurial/patch.py for supported headers.
1550 1550 meta = getdiffmeta(diffs[b'%d' % diffid])
1551 1551 for k in _metanamemap.keys():
1552 1552 if k in meta:
1553 1553 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1554 1554
1555 1555 content = b'%s%s\n%s' % (header, desc, body)
1556 1556 write(content)
1557 1557
1558 1558
1559 1559 @vcrcommand(
1560 1560 b'phabread',
1561 1561 [(b'', b'stack', False, _(b'read dependencies'))],
1562 1562 _(b'DREVSPEC [OPTIONS]'),
1563 1563 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1564 1564 )
1565 1565 def phabread(ui, repo, spec, **opts):
1566 1566 """print patches from Phabricator suitable for importing
1567 1567
1568 1568 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1569 1569 the number ``123``. It could also have common operators like ``+``, ``-``,
1570 1570 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1571 1571 select a stack.
1572 1572
1573 1573 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1574 1574 could be used to filter patches by status. For performance reason, they
1575 1575 only represent a subset of non-status selections and cannot be used alone.
1576 1576
1577 1577 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1578 1578 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1579 1579 stack up to D9.
1580 1580
1581 1581 If --stack is given, follow dependencies information and read all patches.
1582 1582 It is equivalent to the ``:`` operator.
1583 1583 """
1584 1584 opts = pycompat.byteskwargs(opts)
1585 1585 if opts.get(b'stack'):
1586 1586 spec = b':(%s)' % spec
1587 1587 drevs = querydrev(repo, spec)
1588 1588 readpatch(repo, drevs, ui.write)
1589 1589
1590 1590
1591 1591 @vcrcommand(
1592 1592 b'phabupdate',
1593 1593 [
1594 1594 (b'', b'accept', False, _(b'accept revisions')),
1595 1595 (b'', b'reject', False, _(b'reject revisions')),
1596 1596 (b'', b'abandon', False, _(b'abandon revisions')),
1597 1597 (b'', b'reclaim', False, _(b'reclaim revisions')),
1598 1598 (b'm', b'comment', b'', _(b'comment on the last revision')),
1599 1599 ],
1600 1600 _(b'DREVSPEC [OPTIONS]'),
1601 1601 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1602 1602 )
1603 1603 def phabupdate(ui, repo, spec, **opts):
1604 1604 """update Differential Revision in batch
1605 1605
1606 1606 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1607 1607 """
1608 1608 opts = pycompat.byteskwargs(opts)
1609 1609 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1610 1610 if len(flags) > 1:
1611 1611 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1612 1612
1613 1613 actions = []
1614 1614 for f in flags:
1615 1615 actions.append({b'type': f, b'value': True})
1616 1616
1617 1617 drevs = querydrev(repo, spec)
1618 1618 for i, drev in enumerate(drevs):
1619 1619 if i + 1 == len(drevs) and opts.get(b'comment'):
1620 1620 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1621 1621 if actions:
1622 1622 params = {
1623 1623 b'objectIdentifier': drev[b'phid'],
1624 1624 b'transactions': actions,
1625 1625 }
1626 1626 callconduit(ui, b'differential.revision.edit', params)
1627 1627
1628 1628
1629 1629 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1630 1630 def template_review(context, mapping):
1631 1631 """:phabreview: Object describing the review for this changeset.
1632 1632 Has attributes `url` and `id`.
1633 1633 """
1634 1634 ctx = context.resource(mapping, b'ctx')
1635 1635 m = _differentialrevisiondescre.search(ctx.description())
1636 1636 if m:
1637 1637 return templateutil.hybriddict(
1638 1638 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1639 1639 )
1640 1640 else:
1641 1641 tags = ctx.repo().nodetags(ctx.node())
1642 1642 for t in tags:
1643 1643 if _differentialrevisiontagre.match(t):
1644 1644 url = ctx.repo().ui.config(b'phabricator', b'url')
1645 1645 if not url.endswith(b'/'):
1646 1646 url += b'/'
1647 1647 url += t
1648 1648
1649 1649 return templateutil.hybriddict({b'url': url, b'id': t,})
1650 1650 return None
General Comments 0
You need to be logged in to leave comments. Login now