##// END OF EJS Templates
phabricator: treat non-utf-8 text files as binary as phabricator requires...
Ian Moody -
r43557:06a33a50 default
parent child Browse files
Show More
@@ -1,1622 +1,1639 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 46 import hashlib
47 47 import itertools
48 48 import json
49 49 import mimetypes
50 50 import operator
51 51 import re
52 52
53 53 from mercurial.node import bin, nullid
54 54 from mercurial.i18n import _
55 55 from mercurial.pycompat import getattr
56 56 from mercurial.thirdparty import attr
57 57 from mercurial import (
58 58 cmdutil,
59 59 context,
60 60 encoding,
61 61 error,
62 62 exthelper,
63 63 httpconnection as httpconnectionmod,
64 64 match,
65 65 mdiff,
66 66 obsutil,
67 67 parser,
68 68 patch,
69 69 phases,
70 70 pycompat,
71 71 scmutil,
72 72 smartset,
73 73 tags,
74 74 templatefilters,
75 75 templateutil,
76 76 url as urlmod,
77 77 util,
78 78 )
79 79 from mercurial.utils import (
80 80 procutil,
81 81 stringutil,
82 82 )
83 83
84 84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 86 # be specifying the version(s) of Mercurial they are tested with, or
87 87 # leave the attribute unspecified.
88 88 testedwith = b'ships-with-hg-core'
89 89
90 90 eh = exthelper.exthelper()
91 91
92 92 cmdtable = eh.cmdtable
93 93 command = eh.command
94 94 configtable = eh.configtable
95 95 templatekeyword = eh.templatekeyword
96 96
97 97 # developer config: phabricator.batchsize
98 98 eh.configitem(
99 99 b'phabricator', b'batchsize', default=12,
100 100 )
101 101 eh.configitem(
102 102 b'phabricator', b'callsign', default=None,
103 103 )
104 104 eh.configitem(
105 105 b'phabricator', b'curlcmd', default=None,
106 106 )
107 107 # developer config: phabricator.repophid
108 108 eh.configitem(
109 109 b'phabricator', b'repophid', default=None,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'url', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabsend', b'confirm', default=False,
116 116 )
117 117
118 118 colortable = {
119 119 b'phabricator.action.created': b'green',
120 120 b'phabricator.action.skipped': b'magenta',
121 121 b'phabricator.action.updated': b'magenta',
122 122 b'phabricator.desc': b'',
123 123 b'phabricator.drev': b'bold',
124 124 b'phabricator.node': b'',
125 125 }
126 126
127 127 _VCR_FLAGS = [
128 128 (
129 129 b'',
130 130 b'test-vcr',
131 131 b'',
132 132 _(
133 133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 134 b', otherwise will mock all http requests using the specified vcr file.'
135 135 b' (ADVANCED)'
136 136 ),
137 137 ),
138 138 ]
139 139
140 140
141 141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 142 fullflags = flags + _VCR_FLAGS
143 143
144 144 def hgmatcher(r1, r2):
145 145 if r1.uri != r2.uri or r1.method != r2.method:
146 146 return False
147 147 r1params = r1.body.split(b'&')
148 148 r2params = r2.body.split(b'&')
149 149 return set(r1params) == set(r2params)
150 150
151 151 def sanitiserequest(request):
152 152 request.body = re.sub(
153 153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 154 )
155 155 return request
156 156
157 157 def sanitiseresponse(response):
158 158 if r'set-cookie' in response[r'headers']:
159 159 del response[r'headers'][r'set-cookie']
160 160 return response
161 161
162 162 def decorate(fn):
163 163 def inner(*args, **kwargs):
164 164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 165 if cassette:
166 166 import hgdemandimport
167 167
168 168 with hgdemandimport.deactivated():
169 169 import vcr as vcrmod
170 170 import vcr.stubs as stubs
171 171
172 172 vcr = vcrmod.VCR(
173 173 serializer=r'json',
174 174 before_record_request=sanitiserequest,
175 175 before_record_response=sanitiseresponse,
176 176 custom_patches=[
177 177 (
178 178 urlmod,
179 179 r'httpconnection',
180 180 stubs.VCRHTTPConnection,
181 181 ),
182 182 (
183 183 urlmod,
184 184 r'httpsconnection',
185 185 stubs.VCRHTTPSConnection,
186 186 ),
187 187 ],
188 188 )
189 189 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 191 return fn(*args, **kwargs)
192 192 return fn(*args, **kwargs)
193 193
194 194 inner.__name__ = fn.__name__
195 195 inner.__doc__ = fn.__doc__
196 196 return command(
197 197 name,
198 198 fullflags,
199 199 spec,
200 200 helpcategory=helpcategory,
201 201 optionalrepo=optionalrepo,
202 202 )(inner)
203 203
204 204 return decorate
205 205
206 206
207 207 def urlencodenested(params):
208 208 """like urlencode, but works with nested parameters.
209 209
210 210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 213 """
214 214 flatparams = util.sortdict()
215 215
216 216 def process(prefix, obj):
217 217 if isinstance(obj, bool):
218 218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 221 if items is None:
222 222 flatparams[prefix] = obj
223 223 else:
224 224 for k, v in items(obj):
225 225 if prefix:
226 226 process(b'%s[%s]' % (prefix, k), v)
227 227 else:
228 228 process(k, v)
229 229
230 230 process(b'', params)
231 231 return util.urlreq.urlencode(flatparams)
232 232
233 233
234 234 def readurltoken(ui):
235 235 """return conduit url, token and make sure they exist
236 236
237 237 Currently read from [auth] config section. In the future, it might
238 238 make sense to read from .arcconfig and .arcrc as well.
239 239 """
240 240 url = ui.config(b'phabricator', b'url')
241 241 if not url:
242 242 raise error.Abort(
243 243 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 244 )
245 245
246 246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 247 token = None
248 248
249 249 if res:
250 250 group, auth = res
251 251
252 252 ui.debug(b"using auth.%s.* for authentication\n" % group)
253 253
254 254 token = auth.get(b'phabtoken')
255 255
256 256 if not token:
257 257 raise error.Abort(
258 258 _(b'Can\'t find conduit token associated to %s') % (url,)
259 259 )
260 260
261 261 return url, token
262 262
263 263
264 264 def callconduit(ui, name, params):
265 265 """call Conduit API, params is a dict. return json.loads result, or None"""
266 266 host, token = readurltoken(ui)
267 267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 269 params = params.copy()
270 270 params[b'__conduit__'] = {
271 271 b'token': token,
272 272 }
273 273 rawdata = {
274 274 b'params': templatefilters.json(params),
275 275 b'output': b'json',
276 276 b'__conduit__': 1,
277 277 }
278 278 data = urlencodenested(rawdata)
279 279 curlcmd = ui.config(b'phabricator', b'curlcmd')
280 280 if curlcmd:
281 281 sin, sout = procutil.popen2(
282 282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
283 283 )
284 284 sin.write(data)
285 285 sin.close()
286 286 body = sout.read()
287 287 else:
288 288 urlopener = urlmod.opener(ui, authinfo)
289 289 request = util.urlreq.request(pycompat.strurl(url), data=data)
290 290 with contextlib.closing(urlopener.open(request)) as rsp:
291 291 body = rsp.read()
292 292 ui.debug(b'Conduit Response: %s\n' % body)
293 293 parsed = pycompat.rapply(
294 294 lambda x: encoding.unitolocal(x)
295 295 if isinstance(x, pycompat.unicode)
296 296 else x,
297 297 # json.loads only accepts bytes from py3.6+
298 298 json.loads(encoding.unifromlocal(body)),
299 299 )
300 300 if parsed.get(b'error_code'):
301 301 msg = _(b'Conduit Error (%s): %s') % (
302 302 parsed[b'error_code'],
303 303 parsed[b'error_info'],
304 304 )
305 305 raise error.Abort(msg)
306 306 return parsed[b'result']
307 307
308 308
309 309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
310 310 def debugcallconduit(ui, repo, name):
311 311 """call Conduit API
312 312
313 313 Call parameters are read from stdin as a JSON blob. Result will be written
314 314 to stdout as a JSON blob.
315 315 """
316 316 # json.loads only accepts bytes from 3.6+
317 317 rawparams = encoding.unifromlocal(ui.fin.read())
318 318 # json.loads only returns unicode strings
319 319 params = pycompat.rapply(
320 320 lambda x: encoding.unitolocal(x)
321 321 if isinstance(x, pycompat.unicode)
322 322 else x,
323 323 json.loads(rawparams),
324 324 )
325 325 # json.dumps only accepts unicode strings
326 326 result = pycompat.rapply(
327 327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
328 328 callconduit(ui, name, params),
329 329 )
330 330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
331 331 ui.write(b'%s\n' % encoding.unitolocal(s))
332 332
333 333
334 334 def getrepophid(repo):
335 335 """given callsign, return repository PHID or None"""
336 336 # developer config: phabricator.repophid
337 337 repophid = repo.ui.config(b'phabricator', b'repophid')
338 338 if repophid:
339 339 return repophid
340 340 callsign = repo.ui.config(b'phabricator', b'callsign')
341 341 if not callsign:
342 342 return None
343 343 query = callconduit(
344 344 repo.ui,
345 345 b'diffusion.repository.search',
346 346 {b'constraints': {b'callsigns': [callsign]}},
347 347 )
348 348 if len(query[b'data']) == 0:
349 349 return None
350 350 repophid = query[b'data'][0][b'phid']
351 351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
352 352 return repophid
353 353
354 354
355 355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
356 356 _differentialrevisiondescre = re.compile(
357 357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
358 358 )
359 359
360 360
361 361 def getoldnodedrevmap(repo, nodelist):
362 362 """find previous nodes that has been sent to Phabricator
363 363
364 364 return {node: (oldnode, Differential diff, Differential Revision ID)}
365 365 for node in nodelist with known previous sent versions, or associated
366 366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
367 367 be ``None``.
368 368
369 369 Examines commit messages like "Differential Revision:" to get the
370 370 association information.
371 371
372 372 If such commit message line is not found, examines all precursors and their
373 373 tags. Tags with format like "D1234" are considered a match and the node
374 374 with that tag, and the number after "D" (ex. 1234) will be returned.
375 375
376 376 The ``old node``, if not None, is guaranteed to be the last diff of
377 377 corresponding Differential Revision, and exist in the repo.
378 378 """
379 379 unfi = repo.unfiltered()
380 380 nodemap = unfi.changelog.nodemap
381 381
382 382 result = {} # {node: (oldnode?, lastdiff?, drev)}
383 383 toconfirm = {} # {node: (force, {precnode}, drev)}
384 384 for node in nodelist:
385 385 ctx = unfi[node]
386 386 # For tags like "D123", put them into "toconfirm" to verify later
387 387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
388 388 for n in precnodes:
389 389 if n in nodemap:
390 390 for tag in unfi.nodetags(n):
391 391 m = _differentialrevisiontagre.match(tag)
392 392 if m:
393 393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
394 394 continue
395 395
396 396 # Check commit message
397 397 m = _differentialrevisiondescre.search(ctx.description())
398 398 if m:
399 399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
400 400
401 401 # Double check if tags are genuine by collecting all old nodes from
402 402 # Phabricator, and expect precursors overlap with it.
403 403 if toconfirm:
404 404 drevs = [drev for force, precs, drev in toconfirm.values()]
405 405 alldiffs = callconduit(
406 406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
407 407 )
408 408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
409 409 for newnode, (force, precset, drev) in toconfirm.items():
410 410 diffs = [
411 411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
412 412 ]
413 413
414 414 # "precursors" as known by Phabricator
415 415 phprecset = set(getnode(d) for d in diffs)
416 416
417 417 # Ignore if precursors (Phabricator and local repo) do not overlap,
418 418 # and force is not set (when commit message says nothing)
419 419 if not force and not bool(phprecset & precset):
420 420 tagname = b'D%d' % drev
421 421 tags.tag(
422 422 repo,
423 423 tagname,
424 424 nullid,
425 425 message=None,
426 426 user=None,
427 427 date=None,
428 428 local=True,
429 429 )
430 430 unfi.ui.warn(
431 431 _(
432 432 b'D%s: local tag removed - does not match '
433 433 b'Differential history\n'
434 434 )
435 435 % drev
436 436 )
437 437 continue
438 438
439 439 # Find the last node using Phabricator metadata, and make sure it
440 440 # exists in the repo
441 441 oldnode = lastdiff = None
442 442 if diffs:
443 443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
444 444 oldnode = getnode(lastdiff)
445 445 if oldnode and oldnode not in nodemap:
446 446 oldnode = None
447 447
448 448 result[newnode] = (oldnode, lastdiff, drev)
449 449
450 450 return result
451 451
452 452
453 453 def getdiff(ctx, diffopts):
454 454 """plain-text diff without header (user, commit message, etc)"""
455 455 output = util.stringio()
456 456 for chunk, _label in patch.diffui(
457 457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
458 458 ):
459 459 output.write(chunk)
460 460 return output.getvalue()
461 461
462 462
463 463 class DiffChangeType(object):
464 464 ADD = 1
465 465 CHANGE = 2
466 466 DELETE = 3
467 467 MOVE_AWAY = 4
468 468 COPY_AWAY = 5
469 469 MOVE_HERE = 6
470 470 COPY_HERE = 7
471 471 MULTICOPY = 8
472 472
473 473
474 474 class DiffFileType(object):
475 475 TEXT = 1
476 476 IMAGE = 2
477 477 BINARY = 3
478 478
479 479
480 480 @attr.s
481 481 class phabhunk(dict):
482 482 """Represents a Differential hunk, which is owned by a Differential change
483 483 """
484 484
485 485 oldOffset = attr.ib(default=0) # camelcase-required
486 486 oldLength = attr.ib(default=0) # camelcase-required
487 487 newOffset = attr.ib(default=0) # camelcase-required
488 488 newLength = attr.ib(default=0) # camelcase-required
489 489 corpus = attr.ib(default='')
490 490 # These get added to the phabchange's equivalents
491 491 addLines = attr.ib(default=0) # camelcase-required
492 492 delLines = attr.ib(default=0) # camelcase-required
493 493
494 494
495 495 @attr.s
496 496 class phabchange(object):
497 497 """Represents a Differential change, owns Differential hunks and owned by a
498 498 Differential diff. Each one represents one file in a diff.
499 499 """
500 500
501 501 currentPath = attr.ib(default=None) # camelcase-required
502 502 oldPath = attr.ib(default=None) # camelcase-required
503 503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
504 504 metadata = attr.ib(default=attr.Factory(dict))
505 505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
507 507 type = attr.ib(default=DiffChangeType.CHANGE)
508 508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
509 509 commitHash = attr.ib(default=None) # camelcase-required
510 510 addLines = attr.ib(default=0) # camelcase-required
511 511 delLines = attr.ib(default=0) # camelcase-required
512 512 hunks = attr.ib(default=attr.Factory(list))
513 513
514 514 def copynewmetadatatoold(self):
515 515 for key in list(self.metadata.keys()):
516 516 newkey = key.replace(b'new:', b'old:')
517 517 self.metadata[newkey] = self.metadata[key]
518 518
519 519 def addoldmode(self, value):
520 520 self.oldProperties[b'unix:filemode'] = value
521 521
522 522 def addnewmode(self, value):
523 523 self.newProperties[b'unix:filemode'] = value
524 524
525 525 def addhunk(self, hunk):
526 526 if not isinstance(hunk, phabhunk):
527 527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
528 528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
529 529 # It's useful to include these stats since the Phab web UI shows them,
530 530 # and uses them to estimate how large a change a Revision is. Also used
531 531 # in email subjects for the [+++--] bit.
532 532 self.addLines += hunk.addLines
533 533 self.delLines += hunk.delLines
534 534
535 535
536 536 @attr.s
537 537 class phabdiff(object):
538 538 """Represents a Differential diff, owns Differential changes. Corresponds
539 539 to a commit.
540 540 """
541 541
542 542 # Doesn't seem to be any reason to send this (output of uname -n)
543 543 sourceMachine = attr.ib(default=b'') # camelcase-required
544 544 sourcePath = attr.ib(default=b'/') # camelcase-required
545 545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
546 546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
547 547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
548 548 branch = attr.ib(default=b'default')
549 549 bookmark = attr.ib(default=None)
550 550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
551 551 lintStatus = attr.ib(default=b'none') # camelcase-required
552 552 unitStatus = attr.ib(default=b'none') # camelcase-required
553 553 changes = attr.ib(default=attr.Factory(dict))
554 554 repositoryPHID = attr.ib(default=None) # camelcase-required
555 555
556 556 def addchange(self, change):
557 557 if not isinstance(change, phabchange):
558 558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
559 559 self.changes[change.currentPath] = pycompat.byteskwargs(
560 560 attr.asdict(change)
561 561 )
562 562
563 563
564 564 def maketext(pchange, ctx, fname):
565 565 """populate the phabchange for a text file"""
566 566 repo = ctx.repo()
567 567 fmatcher = match.exact([fname])
568 568 diffopts = mdiff.diffopts(git=True, context=32767)
569 569 _pfctx, _fctx, header, fhunks = next(
570 570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
571 571 )
572 572
573 573 for fhunk in fhunks:
574 574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
575 575 corpus = b''.join(lines[1:])
576 576 shunk = list(header)
577 577 shunk.extend(lines)
578 578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
579 579 patch.diffstatdata(util.iterlines(shunk))
580 580 )
581 581 pchange.addhunk(
582 582 phabhunk(
583 583 oldOffset,
584 584 oldLength,
585 585 newOffset,
586 586 newLength,
587 587 corpus,
588 588 addLines,
589 589 delLines,
590 590 )
591 591 )
592 592
593 593
594 594 def uploadchunks(fctx, fphid):
595 595 """upload large binary files as separate chunks.
596 596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
597 597 """
598 598 ui = fctx.repo().ui
599 599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
600 600 progress = ui.makeprogress(
601 601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
602 602 )
603 603 for chunk in chunks:
604 604 progress.increment()
605 605 if chunk[b'complete']:
606 606 continue
607 607 bstart = int(chunk[b'byteStart'])
608 608 bend = int(chunk[b'byteEnd'])
609 609 callconduit(
610 610 ui,
611 611 b'file.uploadchunk',
612 612 {
613 613 b'filePHID': fphid,
614 614 b'byteStart': bstart,
615 615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
616 616 b'dataEncoding': b'base64',
617 617 },
618 618 )
619 619 progress.complete()
620 620
621 621
622 622 def uploadfile(fctx):
623 623 """upload binary files to Phabricator"""
624 624 repo = fctx.repo()
625 625 ui = repo.ui
626 626 fname = fctx.path()
627 627 size = fctx.size()
628 628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
629 629
630 630 # an allocate call is required first to see if an upload is even required
631 631 # (Phab might already have it) and to determine if chunking is needed
632 632 allocateparams = {
633 633 b'name': fname,
634 634 b'contentLength': size,
635 635 b'contentHash': fhash,
636 636 }
637 637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
638 638 fphid = filealloc[b'filePHID']
639 639
640 640 if filealloc[b'upload']:
641 641 ui.write(_(b'uploading %s\n') % bytes(fctx))
642 642 if not fphid:
643 643 uploadparams = {
644 644 b'name': fname,
645 645 b'data_base64': base64.b64encode(fctx.data()),
646 646 }
647 647 fphid = callconduit(ui, b'file.upload', uploadparams)
648 648 else:
649 649 uploadchunks(fctx, fphid)
650 650 else:
651 651 ui.debug(b'server already has %s\n' % bytes(fctx))
652 652
653 653 if not fphid:
654 654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
655 655
656 656 return fphid
657 657
658 658
659 659 def addoldbinary(pchange, fctx, originalfname):
660 660 """add the metadata for the previous version of a binary file to the
661 661 phabchange for the new version
662 662 """
663 663 oldfctx = fctx.p1()[originalfname]
664 664 if fctx.cmp(oldfctx):
665 665 # Files differ, add the old one
666 666 pchange.metadata[b'old:file:size'] = oldfctx.size()
667 667 mimeguess, _enc = mimetypes.guess_type(
668 668 encoding.unifromlocal(oldfctx.path())
669 669 )
670 670 if mimeguess:
671 671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
672 672 mimeguess
673 673 )
674 674 fphid = uploadfile(oldfctx)
675 675 pchange.metadata[b'old:binary-phid'] = fphid
676 676 else:
677 677 # If it's left as IMAGE/BINARY web UI might try to display it
678 678 pchange.fileType = DiffFileType.TEXT
679 679 pchange.copynewmetadatatoold()
680 680
681 681
682 682 def makebinary(pchange, fctx):
683 683 """populate the phabchange for a binary file"""
684 684 pchange.fileType = DiffFileType.BINARY
685 685 fphid = uploadfile(fctx)
686 686 pchange.metadata[b'new:binary-phid'] = fphid
687 687 pchange.metadata[b'new:file:size'] = fctx.size()
688 688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
689 689 if mimeguess:
690 690 mimeguess = pycompat.bytestr(mimeguess)
691 691 pchange.metadata[b'new:file:mime-type'] = mimeguess
692 692 if mimeguess.startswith(b'image/'):
693 693 pchange.fileType = DiffFileType.IMAGE
694 694
695 695
696 696 # Copied from mercurial/patch.py
697 697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
698 698
699 699
700 def notutf8(fctx):
701 """detect non-UTF-8 text files since Phabricator requires them to be marked
702 as binary
703 """
704 try:
705 fctx.data().decode('utf-8')
706 if fctx.parents():
707 fctx.p1().data().decode('utf-8')
708 return False
709 except UnicodeDecodeError:
710 fctx.repo().ui.write(
711 _(b'file %s detected as non-UTF-8, marked as binary\n')
712 % fctx.path()
713 )
714 return True
715
716
700 717 def addremoved(pdiff, ctx, removed):
701 718 """add removed files to the phabdiff. Shouldn't include moves"""
702 719 for fname in removed:
703 720 pchange = phabchange(
704 721 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
705 722 )
706 723 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
707 724 fctx = ctx.p1()[fname]
708 if not fctx.isbinary():
725 if not (fctx.isbinary() or notutf8(fctx)):
709 726 maketext(pchange, ctx, fname)
710 727
711 728 pdiff.addchange(pchange)
712 729
713 730
714 731 def addmodified(pdiff, ctx, modified):
715 732 """add modified files to the phabdiff"""
716 733 for fname in modified:
717 734 fctx = ctx[fname]
718 735 pchange = phabchange(currentPath=fname, oldPath=fname)
719 736 filemode = gitmode[ctx[fname].flags()]
720 737 originalmode = gitmode[ctx.p1()[fname].flags()]
721 738 if filemode != originalmode:
722 739 pchange.addoldmode(originalmode)
723 740 pchange.addnewmode(filemode)
724 741
725 if fctx.isbinary():
742 if fctx.isbinary() or notutf8(fctx):
726 743 makebinary(pchange, fctx)
727 744 addoldbinary(pchange, fctx, fname)
728 745 else:
729 746 maketext(pchange, ctx, fname)
730 747
731 748 pdiff.addchange(pchange)
732 749
733 750
734 751 def addadded(pdiff, ctx, added, removed):
735 752 """add file adds to the phabdiff, both new files and copies/moves"""
736 753 # Keep track of files that've been recorded as moved/copied, so if there are
737 754 # additional copies we can mark them (moves get removed from removed)
738 755 copiedchanges = {}
739 756 movedchanges = {}
740 757 for fname in added:
741 758 fctx = ctx[fname]
742 759 pchange = phabchange(currentPath=fname)
743 760
744 761 filemode = gitmode[ctx[fname].flags()]
745 762 renamed = fctx.renamed()
746 763
747 764 if renamed:
748 765 originalfname = renamed[0]
749 766 originalmode = gitmode[ctx.p1()[originalfname].flags()]
750 767 pchange.oldPath = originalfname
751 768
752 769 if originalfname in removed:
753 770 origpchange = phabchange(
754 771 currentPath=originalfname,
755 772 oldPath=originalfname,
756 773 type=DiffChangeType.MOVE_AWAY,
757 774 awayPaths=[fname],
758 775 )
759 776 movedchanges[originalfname] = origpchange
760 777 removed.remove(originalfname)
761 778 pchange.type = DiffChangeType.MOVE_HERE
762 779 elif originalfname in movedchanges:
763 780 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
764 781 movedchanges[originalfname].awayPaths.append(fname)
765 782 pchange.type = DiffChangeType.COPY_HERE
766 783 else: # pure copy
767 784 if originalfname not in copiedchanges:
768 785 origpchange = phabchange(
769 786 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
770 787 )
771 788 copiedchanges[originalfname] = origpchange
772 789 else:
773 790 origpchange = copiedchanges[originalfname]
774 791 origpchange.awayPaths.append(fname)
775 792 pchange.type = DiffChangeType.COPY_HERE
776 793
777 794 if filemode != originalmode:
778 795 pchange.addoldmode(originalmode)
779 796 pchange.addnewmode(filemode)
780 797 else: # Brand-new file
781 798 pchange.addnewmode(gitmode[fctx.flags()])
782 799 pchange.type = DiffChangeType.ADD
783 800
784 if fctx.isbinary():
801 if fctx.isbinary() or notutf8(fctx):
785 802 makebinary(pchange, fctx)
786 803 if renamed:
787 804 addoldbinary(pchange, fctx, originalfname)
788 805 else:
789 806 maketext(pchange, ctx, fname)
790 807
791 808 pdiff.addchange(pchange)
792 809
793 810 for _path, copiedchange in copiedchanges.items():
794 811 pdiff.addchange(copiedchange)
795 812 for _path, movedchange in movedchanges.items():
796 813 pdiff.addchange(movedchange)
797 814
798 815
799 816 def creatediff(ctx):
800 817 """create a Differential Diff"""
801 818 repo = ctx.repo()
802 819 repophid = getrepophid(repo)
803 820 # Create a "Differential Diff" via "differential.creatediff" API
804 821 pdiff = phabdiff(
805 822 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
806 823 branch=b'%s' % ctx.branch(),
807 824 )
808 825 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
809 826 # addadded will remove moved files from removed, so addremoved won't get
810 827 # them
811 828 addadded(pdiff, ctx, added, removed)
812 829 addmodified(pdiff, ctx, modified)
813 830 addremoved(pdiff, ctx, removed)
814 831 if repophid:
815 832 pdiff.repositoryPHID = repophid
816 833 diff = callconduit(
817 834 repo.ui,
818 835 b'differential.creatediff',
819 836 pycompat.byteskwargs(attr.asdict(pdiff)),
820 837 )
821 838 if not diff:
822 839 raise error.Abort(_(b'cannot create diff for %s') % ctx)
823 840 return diff
824 841
825 842
826 843 def writediffproperties(ctx, diff):
827 844 """write metadata to diff so patches could be applied losslessly"""
828 845 # creatediff returns with a diffid but query returns with an id
829 846 diffid = diff.get(b'diffid', diff.get(b'id'))
830 847 params = {
831 848 b'diff_id': diffid,
832 849 b'name': b'hg:meta',
833 850 b'data': templatefilters.json(
834 851 {
835 852 b'user': ctx.user(),
836 853 b'date': b'%d %d' % ctx.date(),
837 854 b'branch': ctx.branch(),
838 855 b'node': ctx.hex(),
839 856 b'parent': ctx.p1().hex(),
840 857 }
841 858 ),
842 859 }
843 860 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
844 861
845 862 params = {
846 863 b'diff_id': diffid,
847 864 b'name': b'local:commits',
848 865 b'data': templatefilters.json(
849 866 {
850 867 ctx.hex(): {
851 868 b'author': stringutil.person(ctx.user()),
852 869 b'authorEmail': stringutil.email(ctx.user()),
853 870 b'time': int(ctx.date()[0]),
854 871 b'commit': ctx.hex(),
855 872 b'parents': [ctx.p1().hex()],
856 873 b'branch': ctx.branch(),
857 874 },
858 875 }
859 876 ),
860 877 }
861 878 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
862 879
863 880
864 881 def createdifferentialrevision(
865 882 ctx,
866 883 revid=None,
867 884 parentrevphid=None,
868 885 oldnode=None,
869 886 olddiff=None,
870 887 actions=None,
871 888 comment=None,
872 889 ):
873 890 """create or update a Differential Revision
874 891
875 892 If revid is None, create a new Differential Revision, otherwise update
876 893 revid. If parentrevphid is not None, set it as a dependency.
877 894
878 895 If oldnode is not None, check if the patch content (without commit message
879 896 and metadata) has changed before creating another diff.
880 897
881 898 If actions is not None, they will be appended to the transaction.
882 899 """
883 900 repo = ctx.repo()
884 901 if oldnode:
885 902 diffopts = mdiff.diffopts(git=True, context=32767)
886 903 oldctx = repo.unfiltered()[oldnode]
887 904 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
888 905 else:
889 906 neednewdiff = True
890 907
891 908 transactions = []
892 909 if neednewdiff:
893 910 diff = creatediff(ctx)
894 911 transactions.append({b'type': b'update', b'value': diff[b'phid']})
895 912 if comment:
896 913 transactions.append({b'type': b'comment', b'value': comment})
897 914 else:
898 915 # Even if we don't need to upload a new diff because the patch content
899 916 # does not change. We might still need to update its metadata so
900 917 # pushers could know the correct node metadata.
901 918 assert olddiff
902 919 diff = olddiff
903 920 writediffproperties(ctx, diff)
904 921
905 922 # Set the parent Revision every time, so commit re-ordering is picked-up
906 923 if parentrevphid:
907 924 transactions.append(
908 925 {b'type': b'parents.set', b'value': [parentrevphid]}
909 926 )
910 927
911 928 if actions:
912 929 transactions += actions
913 930
914 931 # Parse commit message and update related fields.
915 932 desc = ctx.description()
916 933 info = callconduit(
917 934 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
918 935 )
919 936 for k, v in info[b'fields'].items():
920 937 if k in [b'title', b'summary', b'testPlan']:
921 938 transactions.append({b'type': k, b'value': v})
922 939
923 940 params = {b'transactions': transactions}
924 941 if revid is not None:
925 942 # Update an existing Differential Revision
926 943 params[b'objectIdentifier'] = revid
927 944
928 945 revision = callconduit(repo.ui, b'differential.revision.edit', params)
929 946 if not revision:
930 947 raise error.Abort(_(b'cannot create revision for %s') % ctx)
931 948
932 949 return revision, diff
933 950
934 951
935 952 def userphids(repo, names):
936 953 """convert user names to PHIDs"""
937 954 names = [name.lower() for name in names]
938 955 query = {b'constraints': {b'usernames': names}}
939 956 result = callconduit(repo.ui, b'user.search', query)
940 957 # username not found is not an error of the API. So check if we have missed
941 958 # some names here.
942 959 data = result[b'data']
943 960 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
944 961 unresolved = set(names) - resolved
945 962 if unresolved:
946 963 raise error.Abort(
947 964 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
948 965 )
949 966 return [entry[b'phid'] for entry in data]
950 967
951 968
952 969 @vcrcommand(
953 970 b'phabsend',
954 971 [
955 972 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
956 973 (b'', b'amend', True, _(b'update commit messages')),
957 974 (b'', b'reviewer', [], _(b'specify reviewers')),
958 975 (b'', b'blocker', [], _(b'specify blocking reviewers')),
959 976 (
960 977 b'm',
961 978 b'comment',
962 979 b'',
963 980 _(b'add a comment to Revisions with new/updated Diffs'),
964 981 ),
965 982 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
966 983 ],
967 984 _(b'REV [OPTIONS]'),
968 985 helpcategory=command.CATEGORY_IMPORT_EXPORT,
969 986 )
970 987 def phabsend(ui, repo, *revs, **opts):
971 988 """upload changesets to Phabricator
972 989
973 990 If there are multiple revisions specified, they will be send as a stack
974 991 with a linear dependencies relationship using the order specified by the
975 992 revset.
976 993
977 994 For the first time uploading changesets, local tags will be created to
978 995 maintain the association. After the first time, phabsend will check
979 996 obsstore and tags information so it can figure out whether to update an
980 997 existing Differential Revision, or create a new one.
981 998
982 999 If --amend is set, update commit messages so they have the
983 1000 ``Differential Revision`` URL, remove related tags. This is similar to what
984 1001 arcanist will do, and is more desired in author-push workflows. Otherwise,
985 1002 use local tags to record the ``Differential Revision`` association.
986 1003
987 1004 The --confirm option lets you confirm changesets before sending them. You
988 1005 can also add following to your configuration file to make it default
989 1006 behaviour::
990 1007
991 1008 [phabsend]
992 1009 confirm = true
993 1010
994 1011 phabsend will check obsstore and the above association to decide whether to
995 1012 update an existing Differential Revision, or create a new one.
996 1013 """
997 1014 opts = pycompat.byteskwargs(opts)
998 1015 revs = list(revs) + opts.get(b'rev', [])
999 1016 revs = scmutil.revrange(repo, revs)
1000 1017
1001 1018 if not revs:
1002 1019 raise error.Abort(_(b'phabsend requires at least one changeset'))
1003 1020 if opts.get(b'amend'):
1004 1021 cmdutil.checkunfinished(repo)
1005 1022
1006 1023 # {newnode: (oldnode, olddiff, olddrev}
1007 1024 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1008 1025
1009 1026 confirm = ui.configbool(b'phabsend', b'confirm')
1010 1027 confirm |= bool(opts.get(b'confirm'))
1011 1028 if confirm:
1012 1029 confirmed = _confirmbeforesend(repo, revs, oldmap)
1013 1030 if not confirmed:
1014 1031 raise error.Abort(_(b'phabsend cancelled'))
1015 1032
1016 1033 actions = []
1017 1034 reviewers = opts.get(b'reviewer', [])
1018 1035 blockers = opts.get(b'blocker', [])
1019 1036 phids = []
1020 1037 if reviewers:
1021 1038 phids.extend(userphids(repo, reviewers))
1022 1039 if blockers:
1023 1040 phids.extend(
1024 1041 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1025 1042 )
1026 1043 if phids:
1027 1044 actions.append({b'type': b'reviewers.add', b'value': phids})
1028 1045
1029 1046 drevids = [] # [int]
1030 1047 diffmap = {} # {newnode: diff}
1031 1048
1032 1049 # Send patches one by one so we know their Differential Revision PHIDs and
1033 1050 # can provide dependency relationship
1034 1051 lastrevphid = None
1035 1052 for rev in revs:
1036 1053 ui.debug(b'sending rev %d\n' % rev)
1037 1054 ctx = repo[rev]
1038 1055
1039 1056 # Get Differential Revision ID
1040 1057 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1041 1058 if oldnode != ctx.node() or opts.get(b'amend'):
1042 1059 # Create or update Differential Revision
1043 1060 revision, diff = createdifferentialrevision(
1044 1061 ctx,
1045 1062 revid,
1046 1063 lastrevphid,
1047 1064 oldnode,
1048 1065 olddiff,
1049 1066 actions,
1050 1067 opts.get(b'comment'),
1051 1068 )
1052 1069 diffmap[ctx.node()] = diff
1053 1070 newrevid = int(revision[b'object'][b'id'])
1054 1071 newrevphid = revision[b'object'][b'phid']
1055 1072 if revid:
1056 1073 action = b'updated'
1057 1074 else:
1058 1075 action = b'created'
1059 1076
1060 1077 # Create a local tag to note the association, if commit message
1061 1078 # does not have it already
1062 1079 m = _differentialrevisiondescre.search(ctx.description())
1063 1080 if not m or int(m.group(r'id')) != newrevid:
1064 1081 tagname = b'D%d' % newrevid
1065 1082 tags.tag(
1066 1083 repo,
1067 1084 tagname,
1068 1085 ctx.node(),
1069 1086 message=None,
1070 1087 user=None,
1071 1088 date=None,
1072 1089 local=True,
1073 1090 )
1074 1091 else:
1075 1092 # Nothing changed. But still set "newrevphid" so the next revision
1076 1093 # could depend on this one and "newrevid" for the summary line.
1077 1094 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1078 1095 newrevid = revid
1079 1096 action = b'skipped'
1080 1097
1081 1098 actiondesc = ui.label(
1082 1099 {
1083 1100 b'created': _(b'created'),
1084 1101 b'skipped': _(b'skipped'),
1085 1102 b'updated': _(b'updated'),
1086 1103 }[action],
1087 1104 b'phabricator.action.%s' % action,
1088 1105 )
1089 1106 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1090 1107 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1091 1108 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1092 1109 ui.write(
1093 1110 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1094 1111 )
1095 1112 drevids.append(newrevid)
1096 1113 lastrevphid = newrevphid
1097 1114
1098 1115 # Update commit messages and remove tags
1099 1116 if opts.get(b'amend'):
1100 1117 unfi = repo.unfiltered()
1101 1118 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1102 1119 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1103 1120 wnode = unfi[b'.'].node()
1104 1121 mapping = {} # {oldnode: [newnode]}
1105 1122 for i, rev in enumerate(revs):
1106 1123 old = unfi[rev]
1107 1124 drevid = drevids[i]
1108 1125 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1109 1126 newdesc = getdescfromdrev(drev)
1110 1127 # Make sure commit message contain "Differential Revision"
1111 1128 if old.description() != newdesc:
1112 1129 if old.phase() == phases.public:
1113 1130 ui.warn(
1114 1131 _(b"warning: not updating public commit %s\n")
1115 1132 % scmutil.formatchangeid(old)
1116 1133 )
1117 1134 continue
1118 1135 parents = [
1119 1136 mapping.get(old.p1().node(), (old.p1(),))[0],
1120 1137 mapping.get(old.p2().node(), (old.p2(),))[0],
1121 1138 ]
1122 1139 new = context.metadataonlyctx(
1123 1140 repo,
1124 1141 old,
1125 1142 parents=parents,
1126 1143 text=newdesc,
1127 1144 user=old.user(),
1128 1145 date=old.date(),
1129 1146 extra=old.extra(),
1130 1147 )
1131 1148
1132 1149 newnode = new.commit()
1133 1150
1134 1151 mapping[old.node()] = [newnode]
1135 1152 # Update diff property
1136 1153 # If it fails just warn and keep going, otherwise the DREV
1137 1154 # associations will be lost
1138 1155 try:
1139 1156 writediffproperties(unfi[newnode], diffmap[old.node()])
1140 1157 except util.urlerr.urlerror:
1141 1158 ui.warnnoi18n(
1142 1159 b'Failed to update metadata for D%s\n' % drevid
1143 1160 )
1144 1161 # Remove local tags since it's no longer necessary
1145 1162 tagname = b'D%d' % drevid
1146 1163 if tagname in repo.tags():
1147 1164 tags.tag(
1148 1165 repo,
1149 1166 tagname,
1150 1167 nullid,
1151 1168 message=None,
1152 1169 user=None,
1153 1170 date=None,
1154 1171 local=True,
1155 1172 )
1156 1173 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1157 1174 if wnode in mapping:
1158 1175 unfi.setparents(mapping[wnode][0])
1159 1176
1160 1177
1161 1178 # Map from "hg:meta" keys to header understood by "hg import". The order is
1162 1179 # consistent with "hg export" output.
1163 1180 _metanamemap = util.sortdict(
1164 1181 [
1165 1182 (b'user', b'User'),
1166 1183 (b'date', b'Date'),
1167 1184 (b'branch', b'Branch'),
1168 1185 (b'node', b'Node ID'),
1169 1186 (b'parent', b'Parent '),
1170 1187 ]
1171 1188 )
1172 1189
1173 1190
1174 1191 def _confirmbeforesend(repo, revs, oldmap):
1175 1192 url, token = readurltoken(repo.ui)
1176 1193 ui = repo.ui
1177 1194 for rev in revs:
1178 1195 ctx = repo[rev]
1179 1196 desc = ctx.description().splitlines()[0]
1180 1197 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1181 1198 if drevid:
1182 1199 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1183 1200 else:
1184 1201 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1185 1202
1186 1203 ui.write(
1187 1204 _(b'%s - %s: %s\n')
1188 1205 % (
1189 1206 drevdesc,
1190 1207 ui.label(bytes(ctx), b'phabricator.node'),
1191 1208 ui.label(desc, b'phabricator.desc'),
1192 1209 )
1193 1210 )
1194 1211
1195 1212 if ui.promptchoice(
1196 1213 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1197 1214 ):
1198 1215 return False
1199 1216
1200 1217 return True
1201 1218
1202 1219
1203 1220 _knownstatusnames = {
1204 1221 b'accepted',
1205 1222 b'needsreview',
1206 1223 b'needsrevision',
1207 1224 b'closed',
1208 1225 b'abandoned',
1209 1226 }
1210 1227
1211 1228
1212 1229 def _getstatusname(drev):
1213 1230 """get normalized status name from a Differential Revision"""
1214 1231 return drev[b'statusName'].replace(b' ', b'').lower()
1215 1232
1216 1233
1217 1234 # Small language to specify differential revisions. Support symbols: (), :X,
1218 1235 # +, and -.
1219 1236
1220 1237 _elements = {
1221 1238 # token-type: binding-strength, primary, prefix, infix, suffix
1222 1239 b'(': (12, None, (b'group', 1, b')'), None, None),
1223 1240 b':': (8, None, (b'ancestors', 8), None, None),
1224 1241 b'&': (5, None, None, (b'and_', 5), None),
1225 1242 b'+': (4, None, None, (b'add', 4), None),
1226 1243 b'-': (4, None, None, (b'sub', 4), None),
1227 1244 b')': (0, None, None, None, None),
1228 1245 b'symbol': (0, b'symbol', None, None, None),
1229 1246 b'end': (0, None, None, None, None),
1230 1247 }
1231 1248
1232 1249
1233 1250 def _tokenize(text):
1234 1251 view = memoryview(text) # zero-copy slice
1235 1252 special = b'():+-& '
1236 1253 pos = 0
1237 1254 length = len(text)
1238 1255 while pos < length:
1239 1256 symbol = b''.join(
1240 1257 itertools.takewhile(
1241 1258 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1242 1259 )
1243 1260 )
1244 1261 if symbol:
1245 1262 yield (b'symbol', symbol, pos)
1246 1263 pos += len(symbol)
1247 1264 else: # special char, ignore space
1248 1265 if text[pos] != b' ':
1249 1266 yield (text[pos], None, pos)
1250 1267 pos += 1
1251 1268 yield (b'end', None, pos)
1252 1269
1253 1270
1254 1271 def _parse(text):
1255 1272 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1256 1273 if pos != len(text):
1257 1274 raise error.ParseError(b'invalid token', pos)
1258 1275 return tree
1259 1276
1260 1277
1261 1278 def _parsedrev(symbol):
1262 1279 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1263 1280 if symbol.startswith(b'D') and symbol[1:].isdigit():
1264 1281 return int(symbol[1:])
1265 1282 if symbol.isdigit():
1266 1283 return int(symbol)
1267 1284
1268 1285
1269 1286 def _prefetchdrevs(tree):
1270 1287 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1271 1288 drevs = set()
1272 1289 ancestordrevs = set()
1273 1290 op = tree[0]
1274 1291 if op == b'symbol':
1275 1292 r = _parsedrev(tree[1])
1276 1293 if r:
1277 1294 drevs.add(r)
1278 1295 elif op == b'ancestors':
1279 1296 r, a = _prefetchdrevs(tree[1])
1280 1297 drevs.update(r)
1281 1298 ancestordrevs.update(r)
1282 1299 ancestordrevs.update(a)
1283 1300 else:
1284 1301 for t in tree[1:]:
1285 1302 r, a = _prefetchdrevs(t)
1286 1303 drevs.update(r)
1287 1304 ancestordrevs.update(a)
1288 1305 return drevs, ancestordrevs
1289 1306
1290 1307
1291 1308 def querydrev(repo, spec):
1292 1309 """return a list of "Differential Revision" dicts
1293 1310
1294 1311 spec is a string using a simple query language, see docstring in phabread
1295 1312 for details.
1296 1313
1297 1314 A "Differential Revision dict" looks like:
1298 1315
1299 1316 {
1300 1317 "id": "2",
1301 1318 "phid": "PHID-DREV-672qvysjcczopag46qty",
1302 1319 "title": "example",
1303 1320 "uri": "https://phab.example.com/D2",
1304 1321 "dateCreated": "1499181406",
1305 1322 "dateModified": "1499182103",
1306 1323 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1307 1324 "status": "0",
1308 1325 "statusName": "Needs Review",
1309 1326 "properties": [],
1310 1327 "branch": null,
1311 1328 "summary": "",
1312 1329 "testPlan": "",
1313 1330 "lineCount": "2",
1314 1331 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1315 1332 "diffs": [
1316 1333 "3",
1317 1334 "4",
1318 1335 ],
1319 1336 "commits": [],
1320 1337 "reviewers": [],
1321 1338 "ccs": [],
1322 1339 "hashes": [],
1323 1340 "auxiliary": {
1324 1341 "phabricator:projects": [],
1325 1342 "phabricator:depends-on": [
1326 1343 "PHID-DREV-gbapp366kutjebt7agcd"
1327 1344 ]
1328 1345 },
1329 1346 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1330 1347 "sourcePath": null
1331 1348 }
1332 1349 """
1333 1350
1334 1351 def fetch(params):
1335 1352 """params -> single drev or None"""
1336 1353 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1337 1354 if key in prefetched:
1338 1355 return prefetched[key]
1339 1356 drevs = callconduit(repo.ui, b'differential.query', params)
1340 1357 # Fill prefetched with the result
1341 1358 for drev in drevs:
1342 1359 prefetched[drev[b'phid']] = drev
1343 1360 prefetched[int(drev[b'id'])] = drev
1344 1361 if key not in prefetched:
1345 1362 raise error.Abort(
1346 1363 _(b'cannot get Differential Revision %r') % params
1347 1364 )
1348 1365 return prefetched[key]
1349 1366
1350 1367 def getstack(topdrevids):
1351 1368 """given a top, get a stack from the bottom, [id] -> [id]"""
1352 1369 visited = set()
1353 1370 result = []
1354 1371 queue = [{b'ids': [i]} for i in topdrevids]
1355 1372 while queue:
1356 1373 params = queue.pop()
1357 1374 drev = fetch(params)
1358 1375 if drev[b'id'] in visited:
1359 1376 continue
1360 1377 visited.add(drev[b'id'])
1361 1378 result.append(int(drev[b'id']))
1362 1379 auxiliary = drev.get(b'auxiliary', {})
1363 1380 depends = auxiliary.get(b'phabricator:depends-on', [])
1364 1381 for phid in depends:
1365 1382 queue.append({b'phids': [phid]})
1366 1383 result.reverse()
1367 1384 return smartset.baseset(result)
1368 1385
1369 1386 # Initialize prefetch cache
1370 1387 prefetched = {} # {id or phid: drev}
1371 1388
1372 1389 tree = _parse(spec)
1373 1390 drevs, ancestordrevs = _prefetchdrevs(tree)
1374 1391
1375 1392 # developer config: phabricator.batchsize
1376 1393 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1377 1394
1378 1395 # Prefetch Differential Revisions in batch
1379 1396 tofetch = set(drevs)
1380 1397 for r in ancestordrevs:
1381 1398 tofetch.update(range(max(1, r - batchsize), r + 1))
1382 1399 if drevs:
1383 1400 fetch({b'ids': list(tofetch)})
1384 1401 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1385 1402
1386 1403 # Walk through the tree, return smartsets
1387 1404 def walk(tree):
1388 1405 op = tree[0]
1389 1406 if op == b'symbol':
1390 1407 drev = _parsedrev(tree[1])
1391 1408 if drev:
1392 1409 return smartset.baseset([drev])
1393 1410 elif tree[1] in _knownstatusnames:
1394 1411 drevs = [
1395 1412 r
1396 1413 for r in validids
1397 1414 if _getstatusname(prefetched[r]) == tree[1]
1398 1415 ]
1399 1416 return smartset.baseset(drevs)
1400 1417 else:
1401 1418 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1402 1419 elif op in {b'and_', b'add', b'sub'}:
1403 1420 assert len(tree) == 3
1404 1421 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1405 1422 elif op == b'group':
1406 1423 return walk(tree[1])
1407 1424 elif op == b'ancestors':
1408 1425 return getstack(walk(tree[1]))
1409 1426 else:
1410 1427 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1411 1428
1412 1429 return [prefetched[r] for r in walk(tree)]
1413 1430
1414 1431
1415 1432 def getdescfromdrev(drev):
1416 1433 """get description (commit message) from "Differential Revision"
1417 1434
1418 1435 This is similar to differential.getcommitmessage API. But we only care
1419 1436 about limited fields: title, summary, test plan, and URL.
1420 1437 """
1421 1438 title = drev[b'title']
1422 1439 summary = drev[b'summary'].rstrip()
1423 1440 testplan = drev[b'testPlan'].rstrip()
1424 1441 if testplan:
1425 1442 testplan = b'Test Plan:\n%s' % testplan
1426 1443 uri = b'Differential Revision: %s' % drev[b'uri']
1427 1444 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1428 1445
1429 1446
1430 1447 def getdiffmeta(diff):
1431 1448 """get commit metadata (date, node, user, p1) from a diff object
1432 1449
1433 1450 The metadata could be "hg:meta", sent by phabsend, like:
1434 1451
1435 1452 "properties": {
1436 1453 "hg:meta": {
1437 1454 "date": "1499571514 25200",
1438 1455 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1439 1456 "user": "Foo Bar <foo@example.com>",
1440 1457 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1441 1458 }
1442 1459 }
1443 1460
1444 1461 Or converted from "local:commits", sent by "arc", like:
1445 1462
1446 1463 "properties": {
1447 1464 "local:commits": {
1448 1465 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1449 1466 "author": "Foo Bar",
1450 1467 "time": 1499546314,
1451 1468 "branch": "default",
1452 1469 "tag": "",
1453 1470 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1454 1471 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1455 1472 "local": "1000",
1456 1473 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1457 1474 "summary": "...",
1458 1475 "message": "...",
1459 1476 "authorEmail": "foo@example.com"
1460 1477 }
1461 1478 }
1462 1479 }
1463 1480
1464 1481 Note: metadata extracted from "local:commits" will lose time zone
1465 1482 information.
1466 1483 """
1467 1484 props = diff.get(b'properties') or {}
1468 1485 meta = props.get(b'hg:meta')
1469 1486 if not meta:
1470 1487 if props.get(b'local:commits'):
1471 1488 commit = sorted(props[b'local:commits'].values())[0]
1472 1489 meta = {}
1473 1490 if b'author' in commit and b'authorEmail' in commit:
1474 1491 meta[b'user'] = b'%s <%s>' % (
1475 1492 commit[b'author'],
1476 1493 commit[b'authorEmail'],
1477 1494 )
1478 1495 if b'time' in commit:
1479 1496 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1480 1497 if b'branch' in commit:
1481 1498 meta[b'branch'] = commit[b'branch']
1482 1499 node = commit.get(b'commit', commit.get(b'rev'))
1483 1500 if node:
1484 1501 meta[b'node'] = node
1485 1502 if len(commit.get(b'parents', ())) >= 1:
1486 1503 meta[b'parent'] = commit[b'parents'][0]
1487 1504 else:
1488 1505 meta = {}
1489 1506 if b'date' not in meta and b'dateCreated' in diff:
1490 1507 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1491 1508 if b'branch' not in meta and diff.get(b'branch'):
1492 1509 meta[b'branch'] = diff[b'branch']
1493 1510 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1494 1511 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1495 1512 return meta
1496 1513
1497 1514
1498 1515 def readpatch(repo, drevs, write):
1499 1516 """generate plain-text patch readable by 'hg import'
1500 1517
1501 1518 write is usually ui.write. drevs is what "querydrev" returns, results of
1502 1519 "differential.query".
1503 1520 """
1504 1521 # Prefetch hg:meta property for all diffs
1505 1522 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1506 1523 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1507 1524
1508 1525 # Generate patch for each drev
1509 1526 for drev in drevs:
1510 1527 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1511 1528
1512 1529 diffid = max(int(v) for v in drev[b'diffs'])
1513 1530 body = callconduit(
1514 1531 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1515 1532 )
1516 1533 desc = getdescfromdrev(drev)
1517 1534 header = b'# HG changeset patch\n'
1518 1535
1519 1536 # Try to preserve metadata from hg:meta property. Write hg patch
1520 1537 # headers that can be read by the "import" command. See patchheadermap
1521 1538 # and extract in mercurial/patch.py for supported headers.
1522 1539 meta = getdiffmeta(diffs[b'%d' % diffid])
1523 1540 for k in _metanamemap.keys():
1524 1541 if k in meta:
1525 1542 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1526 1543
1527 1544 content = b'%s%s\n%s' % (header, desc, body)
1528 1545 write(content)
1529 1546
1530 1547
1531 1548 @vcrcommand(
1532 1549 b'phabread',
1533 1550 [(b'', b'stack', False, _(b'read dependencies'))],
1534 1551 _(b'DREVSPEC [OPTIONS]'),
1535 1552 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1536 1553 )
1537 1554 def phabread(ui, repo, spec, **opts):
1538 1555 """print patches from Phabricator suitable for importing
1539 1556
1540 1557 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1541 1558 the number ``123``. It could also have common operators like ``+``, ``-``,
1542 1559 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1543 1560 select a stack.
1544 1561
1545 1562 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1546 1563 could be used to filter patches by status. For performance reason, they
1547 1564 only represent a subset of non-status selections and cannot be used alone.
1548 1565
1549 1566 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1550 1567 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1551 1568 stack up to D9.
1552 1569
1553 1570 If --stack is given, follow dependencies information and read all patches.
1554 1571 It is equivalent to the ``:`` operator.
1555 1572 """
1556 1573 opts = pycompat.byteskwargs(opts)
1557 1574 if opts.get(b'stack'):
1558 1575 spec = b':(%s)' % spec
1559 1576 drevs = querydrev(repo, spec)
1560 1577 readpatch(repo, drevs, ui.write)
1561 1578
1562 1579
1563 1580 @vcrcommand(
1564 1581 b'phabupdate',
1565 1582 [
1566 1583 (b'', b'accept', False, _(b'accept revisions')),
1567 1584 (b'', b'reject', False, _(b'reject revisions')),
1568 1585 (b'', b'abandon', False, _(b'abandon revisions')),
1569 1586 (b'', b'reclaim', False, _(b'reclaim revisions')),
1570 1587 (b'm', b'comment', b'', _(b'comment on the last revision')),
1571 1588 ],
1572 1589 _(b'DREVSPEC [OPTIONS]'),
1573 1590 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1574 1591 )
1575 1592 def phabupdate(ui, repo, spec, **opts):
1576 1593 """update Differential Revision in batch
1577 1594
1578 1595 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1579 1596 """
1580 1597 opts = pycompat.byteskwargs(opts)
1581 1598 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1582 1599 if len(flags) > 1:
1583 1600 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1584 1601
1585 1602 actions = []
1586 1603 for f in flags:
1587 1604 actions.append({b'type': f, b'value': b'true'})
1588 1605
1589 1606 drevs = querydrev(repo, spec)
1590 1607 for i, drev in enumerate(drevs):
1591 1608 if i + 1 == len(drevs) and opts.get(b'comment'):
1592 1609 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1593 1610 if actions:
1594 1611 params = {
1595 1612 b'objectIdentifier': drev[b'phid'],
1596 1613 b'transactions': actions,
1597 1614 }
1598 1615 callconduit(ui, b'differential.revision.edit', params)
1599 1616
1600 1617
1601 1618 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1602 1619 def template_review(context, mapping):
1603 1620 """:phabreview: Object describing the review for this changeset.
1604 1621 Has attributes `url` and `id`.
1605 1622 """
1606 1623 ctx = context.resource(mapping, b'ctx')
1607 1624 m = _differentialrevisiondescre.search(ctx.description())
1608 1625 if m:
1609 1626 return templateutil.hybriddict(
1610 1627 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1611 1628 )
1612 1629 else:
1613 1630 tags = ctx.repo().nodetags(ctx.node())
1614 1631 for t in tags:
1615 1632 if _differentialrevisiontagre.match(t):
1616 1633 url = ctx.repo().ui.config(b'phabricator', b'url')
1617 1634 if not url.endswith(b'/'):
1618 1635 url += b'/'
1619 1636 url += t
1620 1637
1621 1638 return templateutil.hybriddict({b'url': url, b'id': t,})
1622 1639 return None
General Comments 0
You need to be logged in to leave comments. Login now