##// END OF EJS Templates
phabricator: add the uploadfile function...
Ian Moody -
r43458:24e8aac7 default
parent child Browse files
Show More
@@ -1,1419 +1,1457 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import base64
45 45 import contextlib
46 import hashlib
46 47 import itertools
47 48 import json
48 49 import operator
49 50 import re
50 51
51 52 from mercurial.node import bin, nullid
52 53 from mercurial.i18n import _
53 54 from mercurial.pycompat import getattr
54 55 from mercurial.thirdparty import attr
55 56 from mercurial import (
56 57 cmdutil,
57 58 context,
58 59 encoding,
59 60 error,
60 61 exthelper,
61 62 httpconnection as httpconnectionmod,
62 63 match,
63 64 mdiff,
64 65 obsutil,
65 66 parser,
66 67 patch,
67 68 phases,
68 69 pycompat,
69 70 scmutil,
70 71 smartset,
71 72 tags,
72 73 templatefilters,
73 74 templateutil,
74 75 url as urlmod,
75 76 util,
76 77 )
77 78 from mercurial.utils import (
78 79 procutil,
79 80 stringutil,
80 81 )
81 82
82 83 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
83 84 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
84 85 # be specifying the version(s) of Mercurial they are tested with, or
85 86 # leave the attribute unspecified.
86 87 testedwith = b'ships-with-hg-core'
87 88
88 89 eh = exthelper.exthelper()
89 90
90 91 cmdtable = eh.cmdtable
91 92 command = eh.command
92 93 configtable = eh.configtable
93 94 templatekeyword = eh.templatekeyword
94 95
95 96 # developer config: phabricator.batchsize
96 97 eh.configitem(
97 98 b'phabricator', b'batchsize', default=12,
98 99 )
99 100 eh.configitem(
100 101 b'phabricator', b'callsign', default=None,
101 102 )
102 103 eh.configitem(
103 104 b'phabricator', b'curlcmd', default=None,
104 105 )
105 106 # developer config: phabricator.repophid
106 107 eh.configitem(
107 108 b'phabricator', b'repophid', default=None,
108 109 )
109 110 eh.configitem(
110 111 b'phabricator', b'url', default=None,
111 112 )
112 113 eh.configitem(
113 114 b'phabsend', b'confirm', default=False,
114 115 )
115 116
116 117 colortable = {
117 118 b'phabricator.action.created': b'green',
118 119 b'phabricator.action.skipped': b'magenta',
119 120 b'phabricator.action.updated': b'magenta',
120 121 b'phabricator.desc': b'',
121 122 b'phabricator.drev': b'bold',
122 123 b'phabricator.node': b'',
123 124 }
124 125
125 126 _VCR_FLAGS = [
126 127 (
127 128 b'',
128 129 b'test-vcr',
129 130 b'',
130 131 _(
131 132 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
132 133 b', otherwise will mock all http requests using the specified vcr file.'
133 134 b' (ADVANCED)'
134 135 ),
135 136 ),
136 137 ]
137 138
138 139
139 140 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
140 141 fullflags = flags + _VCR_FLAGS
141 142
142 143 def hgmatcher(r1, r2):
143 144 if r1.uri != r2.uri or r1.method != r2.method:
144 145 return False
145 146 r1params = r1.body.split(b'&')
146 147 r2params = r2.body.split(b'&')
147 148 return set(r1params) == set(r2params)
148 149
149 150 def sanitiserequest(request):
150 151 request.body = re.sub(
151 152 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
152 153 )
153 154 return request
154 155
155 156 def sanitiseresponse(response):
156 157 if r'set-cookie' in response[r'headers']:
157 158 del response[r'headers'][r'set-cookie']
158 159 return response
159 160
160 161 def decorate(fn):
161 162 def inner(*args, **kwargs):
162 163 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
163 164 if cassette:
164 165 import hgdemandimport
165 166
166 167 with hgdemandimport.deactivated():
167 168 import vcr as vcrmod
168 169 import vcr.stubs as stubs
169 170
170 171 vcr = vcrmod.VCR(
171 172 serializer=r'json',
172 173 before_record_request=sanitiserequest,
173 174 before_record_response=sanitiseresponse,
174 175 custom_patches=[
175 176 (
176 177 urlmod,
177 178 r'httpconnection',
178 179 stubs.VCRHTTPConnection,
179 180 ),
180 181 (
181 182 urlmod,
182 183 r'httpsconnection',
183 184 stubs.VCRHTTPSConnection,
184 185 ),
185 186 ],
186 187 )
187 188 vcr.register_matcher(r'hgmatcher', hgmatcher)
188 189 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
189 190 return fn(*args, **kwargs)
190 191 return fn(*args, **kwargs)
191 192
192 193 inner.__name__ = fn.__name__
193 194 inner.__doc__ = fn.__doc__
194 195 return command(
195 196 name,
196 197 fullflags,
197 198 spec,
198 199 helpcategory=helpcategory,
199 200 optionalrepo=optionalrepo,
200 201 )(inner)
201 202
202 203 return decorate
203 204
204 205
205 206 def urlencodenested(params):
206 207 """like urlencode, but works with nested parameters.
207 208
208 209 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
209 210 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
210 211 urlencode. Note: the encoding is consistent with PHP's http_build_query.
211 212 """
212 213 flatparams = util.sortdict()
213 214
214 215 def process(prefix, obj):
215 216 if isinstance(obj, bool):
216 217 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
217 218 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
218 219 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
219 220 if items is None:
220 221 flatparams[prefix] = obj
221 222 else:
222 223 for k, v in items(obj):
223 224 if prefix:
224 225 process(b'%s[%s]' % (prefix, k), v)
225 226 else:
226 227 process(k, v)
227 228
228 229 process(b'', params)
229 230 return util.urlreq.urlencode(flatparams)
230 231
231 232
232 233 def readurltoken(ui):
233 234 """return conduit url, token and make sure they exist
234 235
235 236 Currently read from [auth] config section. In the future, it might
236 237 make sense to read from .arcconfig and .arcrc as well.
237 238 """
238 239 url = ui.config(b'phabricator', b'url')
239 240 if not url:
240 241 raise error.Abort(
241 242 _(b'config %s.%s is required') % (b'phabricator', b'url')
242 243 )
243 244
244 245 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
245 246 token = None
246 247
247 248 if res:
248 249 group, auth = res
249 250
250 251 ui.debug(b"using auth.%s.* for authentication\n" % group)
251 252
252 253 token = auth.get(b'phabtoken')
253 254
254 255 if not token:
255 256 raise error.Abort(
256 257 _(b'Can\'t find conduit token associated to %s') % (url,)
257 258 )
258 259
259 260 return url, token
260 261
261 262
262 263 def callconduit(ui, name, params):
263 264 """call Conduit API, params is a dict. return json.loads result, or None"""
264 265 host, token = readurltoken(ui)
265 266 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
266 267 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
267 268 params = params.copy()
268 269 params[b'api.token'] = token
269 270 data = urlencodenested(params)
270 271 curlcmd = ui.config(b'phabricator', b'curlcmd')
271 272 if curlcmd:
272 273 sin, sout = procutil.popen2(
273 274 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
274 275 )
275 276 sin.write(data)
276 277 sin.close()
277 278 body = sout.read()
278 279 else:
279 280 urlopener = urlmod.opener(ui, authinfo)
280 281 request = util.urlreq.request(pycompat.strurl(url), data=data)
281 282 with contextlib.closing(urlopener.open(request)) as rsp:
282 283 body = rsp.read()
283 284 ui.debug(b'Conduit Response: %s\n' % body)
284 285 parsed = pycompat.rapply(
285 286 lambda x: encoding.unitolocal(x)
286 287 if isinstance(x, pycompat.unicode)
287 288 else x,
288 289 # json.loads only accepts bytes from py3.6+
289 290 json.loads(encoding.unifromlocal(body)),
290 291 )
291 292 if parsed.get(b'error_code'):
292 293 msg = _(b'Conduit Error (%s): %s') % (
293 294 parsed[b'error_code'],
294 295 parsed[b'error_info'],
295 296 )
296 297 raise error.Abort(msg)
297 298 return parsed[b'result']
298 299
299 300
300 301 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
301 302 def debugcallconduit(ui, repo, name):
302 303 """call Conduit API
303 304
304 305 Call parameters are read from stdin as a JSON blob. Result will be written
305 306 to stdout as a JSON blob.
306 307 """
307 308 # json.loads only accepts bytes from 3.6+
308 309 rawparams = encoding.unifromlocal(ui.fin.read())
309 310 # json.loads only returns unicode strings
310 311 params = pycompat.rapply(
311 312 lambda x: encoding.unitolocal(x)
312 313 if isinstance(x, pycompat.unicode)
313 314 else x,
314 315 json.loads(rawparams),
315 316 )
316 317 # json.dumps only accepts unicode strings
317 318 result = pycompat.rapply(
318 319 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
319 320 callconduit(ui, name, params),
320 321 )
321 322 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
322 323 ui.write(b'%s\n' % encoding.unitolocal(s))
323 324
324 325
325 326 def getrepophid(repo):
326 327 """given callsign, return repository PHID or None"""
327 328 # developer config: phabricator.repophid
328 329 repophid = repo.ui.config(b'phabricator', b'repophid')
329 330 if repophid:
330 331 return repophid
331 332 callsign = repo.ui.config(b'phabricator', b'callsign')
332 333 if not callsign:
333 334 return None
334 335 query = callconduit(
335 336 repo.ui,
336 337 b'diffusion.repository.search',
337 338 {b'constraints': {b'callsigns': [callsign]}},
338 339 )
339 340 if len(query[b'data']) == 0:
340 341 return None
341 342 repophid = query[b'data'][0][b'phid']
342 343 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
343 344 return repophid
344 345
345 346
346 347 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
347 348 _differentialrevisiondescre = re.compile(
348 349 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
349 350 )
350 351
351 352
352 353 def getoldnodedrevmap(repo, nodelist):
353 354 """find previous nodes that has been sent to Phabricator
354 355
355 356 return {node: (oldnode, Differential diff, Differential Revision ID)}
356 357 for node in nodelist with known previous sent versions, or associated
357 358 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
358 359 be ``None``.
359 360
360 361 Examines commit messages like "Differential Revision:" to get the
361 362 association information.
362 363
363 364 If such commit message line is not found, examines all precursors and their
364 365 tags. Tags with format like "D1234" are considered a match and the node
365 366 with that tag, and the number after "D" (ex. 1234) will be returned.
366 367
367 368 The ``old node``, if not None, is guaranteed to be the last diff of
368 369 corresponding Differential Revision, and exist in the repo.
369 370 """
370 371 unfi = repo.unfiltered()
371 372 nodemap = unfi.changelog.nodemap
372 373
373 374 result = {} # {node: (oldnode?, lastdiff?, drev)}
374 375 toconfirm = {} # {node: (force, {precnode}, drev)}
375 376 for node in nodelist:
376 377 ctx = unfi[node]
377 378 # For tags like "D123", put them into "toconfirm" to verify later
378 379 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
379 380 for n in precnodes:
380 381 if n in nodemap:
381 382 for tag in unfi.nodetags(n):
382 383 m = _differentialrevisiontagre.match(tag)
383 384 if m:
384 385 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
385 386 continue
386 387
387 388 # Check commit message
388 389 m = _differentialrevisiondescre.search(ctx.description())
389 390 if m:
390 391 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
391 392
392 393 # Double check if tags are genuine by collecting all old nodes from
393 394 # Phabricator, and expect precursors overlap with it.
394 395 if toconfirm:
395 396 drevs = [drev for force, precs, drev in toconfirm.values()]
396 397 alldiffs = callconduit(
397 398 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
398 399 )
399 400 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
400 401 for newnode, (force, precset, drev) in toconfirm.items():
401 402 diffs = [
402 403 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
403 404 ]
404 405
405 406 # "precursors" as known by Phabricator
406 407 phprecset = set(getnode(d) for d in diffs)
407 408
408 409 # Ignore if precursors (Phabricator and local repo) do not overlap,
409 410 # and force is not set (when commit message says nothing)
410 411 if not force and not bool(phprecset & precset):
411 412 tagname = b'D%d' % drev
412 413 tags.tag(
413 414 repo,
414 415 tagname,
415 416 nullid,
416 417 message=None,
417 418 user=None,
418 419 date=None,
419 420 local=True,
420 421 )
421 422 unfi.ui.warn(
422 423 _(
423 424 b'D%s: local tag removed - does not match '
424 425 b'Differential history\n'
425 426 )
426 427 % drev
427 428 )
428 429 continue
429 430
430 431 # Find the last node using Phabricator metadata, and make sure it
431 432 # exists in the repo
432 433 oldnode = lastdiff = None
433 434 if diffs:
434 435 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
435 436 oldnode = getnode(lastdiff)
436 437 if oldnode and oldnode not in nodemap:
437 438 oldnode = None
438 439
439 440 result[newnode] = (oldnode, lastdiff, drev)
440 441
441 442 return result
442 443
443 444
444 445 def getdiff(ctx, diffopts):
445 446 """plain-text diff without header (user, commit message, etc)"""
446 447 output = util.stringio()
447 448 for chunk, _label in patch.diffui(
448 449 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
449 450 ):
450 451 output.write(chunk)
451 452 return output.getvalue()
452 453
453 454
454 455 class DiffChangeType(object):
455 456 ADD = 1
456 457 CHANGE = 2
457 458 DELETE = 3
458 459 MOVE_AWAY = 4
459 460 COPY_AWAY = 5
460 461 MOVE_HERE = 6
461 462 COPY_HERE = 7
462 463 MULTICOPY = 8
463 464
464 465
465 466 class DiffFileType(object):
466 467 TEXT = 1
467 468 IMAGE = 2
468 469 BINARY = 3
469 470
470 471
471 472 @attr.s
472 473 class phabhunk(dict):
473 474 """Represents a Differential hunk, which is owned by a Differential change
474 475 """
475 476
476 477 oldOffset = attr.ib(default=0) # camelcase-required
477 478 oldLength = attr.ib(default=0) # camelcase-required
478 479 newOffset = attr.ib(default=0) # camelcase-required
479 480 newLength = attr.ib(default=0) # camelcase-required
480 481 corpus = attr.ib(default='')
481 482 # These get added to the phabchange's equivalents
482 483 addLines = attr.ib(default=0) # camelcase-required
483 484 delLines = attr.ib(default=0) # camelcase-required
484 485
485 486
486 487 @attr.s
487 488 class phabchange(object):
488 489 """Represents a Differential change, owns Differential hunks and owned by a
489 490 Differential diff. Each one represents one file in a diff.
490 491 """
491 492
492 493 currentPath = attr.ib(default=None) # camelcase-required
493 494 oldPath = attr.ib(default=None) # camelcase-required
494 495 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
495 496 metadata = attr.ib(default=attr.Factory(dict))
496 497 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 498 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
498 499 type = attr.ib(default=DiffChangeType.CHANGE)
499 500 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
500 501 commitHash = attr.ib(default=None) # camelcase-required
501 502 addLines = attr.ib(default=0) # camelcase-required
502 503 delLines = attr.ib(default=0) # camelcase-required
503 504 hunks = attr.ib(default=attr.Factory(list))
504 505
505 506 def copynewmetadatatoold(self):
506 507 for key in list(self.metadata.keys()):
507 508 newkey = key.replace(b'new:', b'old:')
508 509 self.metadata[newkey] = self.metadata[key]
509 510
510 511 def addoldmode(self, value):
511 512 self.oldProperties[b'unix:filemode'] = value
512 513
513 514 def addnewmode(self, value):
514 515 self.newProperties[b'unix:filemode'] = value
515 516
516 517 def addhunk(self, hunk):
517 518 if not isinstance(hunk, phabhunk):
518 519 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
519 520 self.hunks.append(hunk)
520 521 # It's useful to include these stats since the Phab web UI shows them,
521 522 # and uses them to estimate how large a change a Revision is. Also used
522 523 # in email subjects for the [+++--] bit.
523 524 self.addLines += hunk.addLines
524 525 self.delLines += hunk.delLines
525 526
526 527
527 528 @attr.s
528 529 class phabdiff(object):
529 530 """Represents a Differential diff, owns Differential changes. Corresponds
530 531 to a commit.
531 532 """
532 533
533 534 # Doesn't seem to be any reason to send this (output of uname -n)
534 535 sourceMachine = attr.ib(default=b'') # camelcase-required
535 536 sourcePath = attr.ib(default=b'/') # camelcase-required
536 537 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
537 538 sourceControlPath = attr.ib(default=b'/') # camelcase-required
538 539 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
539 540 branch = attr.ib(default=b'default')
540 541 bookmark = attr.ib(default=None)
541 542 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
542 543 lintStatus = attr.ib(default=b'none') # camelcase-required
543 544 unitStatus = attr.ib(default=b'none') # camelcase-required
544 545 changes = attr.ib(default=attr.Factory(dict))
545 546 repositoryPHID = attr.ib(default=None) # camelcase-required
546 547
547 548 def addchange(self, change):
548 549 if not isinstance(change, phabchange):
549 550 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
550 551 self.changes[change.currentPath] = change
551 552
552 553
553 554 def maketext(pchange, ctx, fname):
554 555 """populate the phabchange for a text file"""
555 556 repo = ctx.repo()
556 557 fmatcher = match.exact([fname])
557 558 diffopts = mdiff.diffopts(git=True, context=32767)
558 559 _pfctx, _fctx, header, fhunks = next(
559 560 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
560 561 )
561 562
562 563 for fhunk in fhunks:
563 564 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
564 565 corpus = b''.join(lines[1:])
565 566 shunk = list(header)
566 567 shunk.extend(lines)
567 568 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
568 569 patch.diffstatdata(util.iterlines(shunk))
569 570 )
570 571 pchange.addhunk(
571 572 phabhunk(
572 573 oldOffset,
573 574 oldLength,
574 575 newOffset,
575 576 newLength,
576 577 corpus,
577 578 addLines,
578 579 delLines,
579 580 )
580 581 )
581 582
582 583
583 584 def uploadchunks(fctx, fphid):
584 585 """upload large binary files as separate chunks.
585 586 Phab requests chunking over 8MiB, and splits into 4MiB chunks
586 587 """
587 588 ui = fctx.repo().ui
588 589 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
589 590 progress = ui.makeprogress(
590 591 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
591 592 )
592 593 for chunk in chunks:
593 594 progress.increment()
594 595 if chunk[b'complete']:
595 596 continue
596 597 bstart = int(chunk[b'byteStart'])
597 598 bend = int(chunk[b'byteEnd'])
598 599 callconduit(
599 600 ui,
600 601 b'file.uploadchunk',
601 602 {
602 603 b'filePHID': fphid,
603 604 b'byteStart': bstart,
604 605 b'data': base64.b64encode(fctx.data()[bstart:bend]),
605 606 b'dataEncoding': b'base64',
606 607 },
607 608 )
608 609 progress.complete()
609 610
610 611
612 def uploadfile(fctx):
613 """upload binary files to Phabricator"""
614 repo = fctx.repo()
615 ui = repo.ui
616 fname = fctx.path()
617 size = fctx.size()
618 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
619
620 # an allocate call is required first to see if an upload is even required
621 # (Phab might already have it) and to determine if chunking is needed
622 allocateparams = {
623 b'name': fname,
624 b'contentLength': size,
625 b'contentHash': fhash,
626 }
627 filealloc = callconduit(ui, b'file.allocate', allocateparams)
628 fphid = filealloc[b'filePHID']
629
630 if filealloc[b'upload']:
631 ui.write(_(b'uploading %s\n') % bytes(fctx))
632 if not fphid:
633 uploadparams = {
634 b'name': fname,
635 b'data_base64': base64.b64encode(fctx.data()),
636 }
637 fphid = callconduit(ui, b'file.upload', uploadparams)
638 else:
639 uploadchunks(fctx, fphid)
640 else:
641 ui.debug(b'server already has %s\n' % bytes(fctx))
642
643 if not fphid:
644 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
645
646 return fphid
647
648
611 649 def creatediff(ctx):
612 650 """create a Differential Diff"""
613 651 repo = ctx.repo()
614 652 repophid = getrepophid(repo)
615 653 # Create a "Differential Diff" via "differential.createrawdiff" API
616 654 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
617 655 if repophid:
618 656 params[b'repositoryPHID'] = repophid
619 657 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
620 658 if not diff:
621 659 raise error.Abort(_(b'cannot create diff for %s') % ctx)
622 660 return diff
623 661
624 662
625 663 def writediffproperties(ctx, diff):
626 664 """write metadata to diff so patches could be applied losslessly"""
627 665 params = {
628 666 b'diff_id': diff[b'id'],
629 667 b'name': b'hg:meta',
630 668 b'data': templatefilters.json(
631 669 {
632 670 b'user': ctx.user(),
633 671 b'date': b'%d %d' % ctx.date(),
634 672 b'branch': ctx.branch(),
635 673 b'node': ctx.hex(),
636 674 b'parent': ctx.p1().hex(),
637 675 }
638 676 ),
639 677 }
640 678 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
641 679
642 680 params = {
643 681 b'diff_id': diff[b'id'],
644 682 b'name': b'local:commits',
645 683 b'data': templatefilters.json(
646 684 {
647 685 ctx.hex(): {
648 686 b'author': stringutil.person(ctx.user()),
649 687 b'authorEmail': stringutil.email(ctx.user()),
650 688 b'time': int(ctx.date()[0]),
651 689 b'commit': ctx.hex(),
652 690 b'parents': [ctx.p1().hex()],
653 691 b'branch': ctx.branch(),
654 692 },
655 693 }
656 694 ),
657 695 }
658 696 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
659 697
660 698
661 699 def createdifferentialrevision(
662 700 ctx,
663 701 revid=None,
664 702 parentrevphid=None,
665 703 oldnode=None,
666 704 olddiff=None,
667 705 actions=None,
668 706 comment=None,
669 707 ):
670 708 """create or update a Differential Revision
671 709
672 710 If revid is None, create a new Differential Revision, otherwise update
673 711 revid. If parentrevphid is not None, set it as a dependency.
674 712
675 713 If oldnode is not None, check if the patch content (without commit message
676 714 and metadata) has changed before creating another diff.
677 715
678 716 If actions is not None, they will be appended to the transaction.
679 717 """
680 718 repo = ctx.repo()
681 719 if oldnode:
682 720 diffopts = mdiff.diffopts(git=True, context=32767)
683 721 oldctx = repo.unfiltered()[oldnode]
684 722 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
685 723 else:
686 724 neednewdiff = True
687 725
688 726 transactions = []
689 727 if neednewdiff:
690 728 diff = creatediff(ctx)
691 729 transactions.append({b'type': b'update', b'value': diff[b'phid']})
692 730 if comment:
693 731 transactions.append({b'type': b'comment', b'value': comment})
694 732 else:
695 733 # Even if we don't need to upload a new diff because the patch content
696 734 # does not change. We might still need to update its metadata so
697 735 # pushers could know the correct node metadata.
698 736 assert olddiff
699 737 diff = olddiff
700 738 writediffproperties(ctx, diff)
701 739
702 740 # Set the parent Revision every time, so commit re-ordering is picked-up
703 741 if parentrevphid:
704 742 transactions.append(
705 743 {b'type': b'parents.set', b'value': [parentrevphid]}
706 744 )
707 745
708 746 if actions:
709 747 transactions += actions
710 748
711 749 # Parse commit message and update related fields.
712 750 desc = ctx.description()
713 751 info = callconduit(
714 752 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
715 753 )
716 754 for k, v in info[b'fields'].items():
717 755 if k in [b'title', b'summary', b'testPlan']:
718 756 transactions.append({b'type': k, b'value': v})
719 757
720 758 params = {b'transactions': transactions}
721 759 if revid is not None:
722 760 # Update an existing Differential Revision
723 761 params[b'objectIdentifier'] = revid
724 762
725 763 revision = callconduit(repo.ui, b'differential.revision.edit', params)
726 764 if not revision:
727 765 raise error.Abort(_(b'cannot create revision for %s') % ctx)
728 766
729 767 return revision, diff
730 768
731 769
732 770 def userphids(repo, names):
733 771 """convert user names to PHIDs"""
734 772 names = [name.lower() for name in names]
735 773 query = {b'constraints': {b'usernames': names}}
736 774 result = callconduit(repo.ui, b'user.search', query)
737 775 # username not found is not an error of the API. So check if we have missed
738 776 # some names here.
739 777 data = result[b'data']
740 778 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
741 779 unresolved = set(names) - resolved
742 780 if unresolved:
743 781 raise error.Abort(
744 782 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
745 783 )
746 784 return [entry[b'phid'] for entry in data]
747 785
748 786
749 787 @vcrcommand(
750 788 b'phabsend',
751 789 [
752 790 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
753 791 (b'', b'amend', True, _(b'update commit messages')),
754 792 (b'', b'reviewer', [], _(b'specify reviewers')),
755 793 (b'', b'blocker', [], _(b'specify blocking reviewers')),
756 794 (
757 795 b'm',
758 796 b'comment',
759 797 b'',
760 798 _(b'add a comment to Revisions with new/updated Diffs'),
761 799 ),
762 800 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
763 801 ],
764 802 _(b'REV [OPTIONS]'),
765 803 helpcategory=command.CATEGORY_IMPORT_EXPORT,
766 804 )
767 805 def phabsend(ui, repo, *revs, **opts):
768 806 """upload changesets to Phabricator
769 807
770 808 If there are multiple revisions specified, they will be send as a stack
771 809 with a linear dependencies relationship using the order specified by the
772 810 revset.
773 811
774 812 For the first time uploading changesets, local tags will be created to
775 813 maintain the association. After the first time, phabsend will check
776 814 obsstore and tags information so it can figure out whether to update an
777 815 existing Differential Revision, or create a new one.
778 816
779 817 If --amend is set, update commit messages so they have the
780 818 ``Differential Revision`` URL, remove related tags. This is similar to what
781 819 arcanist will do, and is more desired in author-push workflows. Otherwise,
782 820 use local tags to record the ``Differential Revision`` association.
783 821
784 822 The --confirm option lets you confirm changesets before sending them. You
785 823 can also add following to your configuration file to make it default
786 824 behaviour::
787 825
788 826 [phabsend]
789 827 confirm = true
790 828
791 829 phabsend will check obsstore and the above association to decide whether to
792 830 update an existing Differential Revision, or create a new one.
793 831 """
794 832 opts = pycompat.byteskwargs(opts)
795 833 revs = list(revs) + opts.get(b'rev', [])
796 834 revs = scmutil.revrange(repo, revs)
797 835
798 836 if not revs:
799 837 raise error.Abort(_(b'phabsend requires at least one changeset'))
800 838 if opts.get(b'amend'):
801 839 cmdutil.checkunfinished(repo)
802 840
803 841 # {newnode: (oldnode, olddiff, olddrev}
804 842 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
805 843
806 844 confirm = ui.configbool(b'phabsend', b'confirm')
807 845 confirm |= bool(opts.get(b'confirm'))
808 846 if confirm:
809 847 confirmed = _confirmbeforesend(repo, revs, oldmap)
810 848 if not confirmed:
811 849 raise error.Abort(_(b'phabsend cancelled'))
812 850
813 851 actions = []
814 852 reviewers = opts.get(b'reviewer', [])
815 853 blockers = opts.get(b'blocker', [])
816 854 phids = []
817 855 if reviewers:
818 856 phids.extend(userphids(repo, reviewers))
819 857 if blockers:
820 858 phids.extend(
821 859 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
822 860 )
823 861 if phids:
824 862 actions.append({b'type': b'reviewers.add', b'value': phids})
825 863
826 864 drevids = [] # [int]
827 865 diffmap = {} # {newnode: diff}
828 866
829 867 # Send patches one by one so we know their Differential Revision PHIDs and
830 868 # can provide dependency relationship
831 869 lastrevphid = None
832 870 for rev in revs:
833 871 ui.debug(b'sending rev %d\n' % rev)
834 872 ctx = repo[rev]
835 873
836 874 # Get Differential Revision ID
837 875 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
838 876 if oldnode != ctx.node() or opts.get(b'amend'):
839 877 # Create or update Differential Revision
840 878 revision, diff = createdifferentialrevision(
841 879 ctx,
842 880 revid,
843 881 lastrevphid,
844 882 oldnode,
845 883 olddiff,
846 884 actions,
847 885 opts.get(b'comment'),
848 886 )
849 887 diffmap[ctx.node()] = diff
850 888 newrevid = int(revision[b'object'][b'id'])
851 889 newrevphid = revision[b'object'][b'phid']
852 890 if revid:
853 891 action = b'updated'
854 892 else:
855 893 action = b'created'
856 894
857 895 # Create a local tag to note the association, if commit message
858 896 # does not have it already
859 897 m = _differentialrevisiondescre.search(ctx.description())
860 898 if not m or int(m.group(r'id')) != newrevid:
861 899 tagname = b'D%d' % newrevid
862 900 tags.tag(
863 901 repo,
864 902 tagname,
865 903 ctx.node(),
866 904 message=None,
867 905 user=None,
868 906 date=None,
869 907 local=True,
870 908 )
871 909 else:
872 910 # Nothing changed. But still set "newrevphid" so the next revision
873 911 # could depend on this one and "newrevid" for the summary line.
874 912 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
875 913 newrevid = revid
876 914 action = b'skipped'
877 915
878 916 actiondesc = ui.label(
879 917 {
880 918 b'created': _(b'created'),
881 919 b'skipped': _(b'skipped'),
882 920 b'updated': _(b'updated'),
883 921 }[action],
884 922 b'phabricator.action.%s' % action,
885 923 )
886 924 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
887 925 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
888 926 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
889 927 ui.write(
890 928 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
891 929 )
892 930 drevids.append(newrevid)
893 931 lastrevphid = newrevphid
894 932
895 933 # Update commit messages and remove tags
896 934 if opts.get(b'amend'):
897 935 unfi = repo.unfiltered()
898 936 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
899 937 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
900 938 wnode = unfi[b'.'].node()
901 939 mapping = {} # {oldnode: [newnode]}
902 940 for i, rev in enumerate(revs):
903 941 old = unfi[rev]
904 942 drevid = drevids[i]
905 943 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
906 944 newdesc = getdescfromdrev(drev)
907 945 # Make sure commit message contain "Differential Revision"
908 946 if old.description() != newdesc:
909 947 if old.phase() == phases.public:
910 948 ui.warn(
911 949 _(b"warning: not updating public commit %s\n")
912 950 % scmutil.formatchangeid(old)
913 951 )
914 952 continue
915 953 parents = [
916 954 mapping.get(old.p1().node(), (old.p1(),))[0],
917 955 mapping.get(old.p2().node(), (old.p2(),))[0],
918 956 ]
919 957 new = context.metadataonlyctx(
920 958 repo,
921 959 old,
922 960 parents=parents,
923 961 text=newdesc,
924 962 user=old.user(),
925 963 date=old.date(),
926 964 extra=old.extra(),
927 965 )
928 966
929 967 newnode = new.commit()
930 968
931 969 mapping[old.node()] = [newnode]
932 970 # Update diff property
933 971 # If it fails just warn and keep going, otherwise the DREV
934 972 # associations will be lost
935 973 try:
936 974 writediffproperties(unfi[newnode], diffmap[old.node()])
937 975 except util.urlerr.urlerror:
938 976 ui.warnnoi18n(
939 977 b'Failed to update metadata for D%s\n' % drevid
940 978 )
941 979 # Remove local tags since it's no longer necessary
942 980 tagname = b'D%d' % drevid
943 981 if tagname in repo.tags():
944 982 tags.tag(
945 983 repo,
946 984 tagname,
947 985 nullid,
948 986 message=None,
949 987 user=None,
950 988 date=None,
951 989 local=True,
952 990 )
953 991 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
954 992 if wnode in mapping:
955 993 unfi.setparents(mapping[wnode][0])
956 994
957 995
958 996 # Map from "hg:meta" keys to header understood by "hg import". The order is
959 997 # consistent with "hg export" output.
960 998 _metanamemap = util.sortdict(
961 999 [
962 1000 (b'user', b'User'),
963 1001 (b'date', b'Date'),
964 1002 (b'branch', b'Branch'),
965 1003 (b'node', b'Node ID'),
966 1004 (b'parent', b'Parent '),
967 1005 ]
968 1006 )
969 1007
970 1008
971 1009 def _confirmbeforesend(repo, revs, oldmap):
972 1010 url, token = readurltoken(repo.ui)
973 1011 ui = repo.ui
974 1012 for rev in revs:
975 1013 ctx = repo[rev]
976 1014 desc = ctx.description().splitlines()[0]
977 1015 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
978 1016 if drevid:
979 1017 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
980 1018 else:
981 1019 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
982 1020
983 1021 ui.write(
984 1022 _(b'%s - %s: %s\n')
985 1023 % (
986 1024 drevdesc,
987 1025 ui.label(bytes(ctx), b'phabricator.node'),
988 1026 ui.label(desc, b'phabricator.desc'),
989 1027 )
990 1028 )
991 1029
992 1030 if ui.promptchoice(
993 1031 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
994 1032 ):
995 1033 return False
996 1034
997 1035 return True
998 1036
999 1037
1000 1038 _knownstatusnames = {
1001 1039 b'accepted',
1002 1040 b'needsreview',
1003 1041 b'needsrevision',
1004 1042 b'closed',
1005 1043 b'abandoned',
1006 1044 }
1007 1045
1008 1046
1009 1047 def _getstatusname(drev):
1010 1048 """get normalized status name from a Differential Revision"""
1011 1049 return drev[b'statusName'].replace(b' ', b'').lower()
1012 1050
1013 1051
1014 1052 # Small language to specify differential revisions. Support symbols: (), :X,
1015 1053 # +, and -.
1016 1054
1017 1055 _elements = {
1018 1056 # token-type: binding-strength, primary, prefix, infix, suffix
1019 1057 b'(': (12, None, (b'group', 1, b')'), None, None),
1020 1058 b':': (8, None, (b'ancestors', 8), None, None),
1021 1059 b'&': (5, None, None, (b'and_', 5), None),
1022 1060 b'+': (4, None, None, (b'add', 4), None),
1023 1061 b'-': (4, None, None, (b'sub', 4), None),
1024 1062 b')': (0, None, None, None, None),
1025 1063 b'symbol': (0, b'symbol', None, None, None),
1026 1064 b'end': (0, None, None, None, None),
1027 1065 }
1028 1066
1029 1067
1030 1068 def _tokenize(text):
1031 1069 view = memoryview(text) # zero-copy slice
1032 1070 special = b'():+-& '
1033 1071 pos = 0
1034 1072 length = len(text)
1035 1073 while pos < length:
1036 1074 symbol = b''.join(
1037 1075 itertools.takewhile(
1038 1076 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1039 1077 )
1040 1078 )
1041 1079 if symbol:
1042 1080 yield (b'symbol', symbol, pos)
1043 1081 pos += len(symbol)
1044 1082 else: # special char, ignore space
1045 1083 if text[pos] != b' ':
1046 1084 yield (text[pos], None, pos)
1047 1085 pos += 1
1048 1086 yield (b'end', None, pos)
1049 1087
1050 1088
1051 1089 def _parse(text):
1052 1090 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1053 1091 if pos != len(text):
1054 1092 raise error.ParseError(b'invalid token', pos)
1055 1093 return tree
1056 1094
1057 1095
1058 1096 def _parsedrev(symbol):
1059 1097 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1060 1098 if symbol.startswith(b'D') and symbol[1:].isdigit():
1061 1099 return int(symbol[1:])
1062 1100 if symbol.isdigit():
1063 1101 return int(symbol)
1064 1102
1065 1103
1066 1104 def _prefetchdrevs(tree):
1067 1105 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1068 1106 drevs = set()
1069 1107 ancestordrevs = set()
1070 1108 op = tree[0]
1071 1109 if op == b'symbol':
1072 1110 r = _parsedrev(tree[1])
1073 1111 if r:
1074 1112 drevs.add(r)
1075 1113 elif op == b'ancestors':
1076 1114 r, a = _prefetchdrevs(tree[1])
1077 1115 drevs.update(r)
1078 1116 ancestordrevs.update(r)
1079 1117 ancestordrevs.update(a)
1080 1118 else:
1081 1119 for t in tree[1:]:
1082 1120 r, a = _prefetchdrevs(t)
1083 1121 drevs.update(r)
1084 1122 ancestordrevs.update(a)
1085 1123 return drevs, ancestordrevs
1086 1124
1087 1125
1088 1126 def querydrev(repo, spec):
1089 1127 """return a list of "Differential Revision" dicts
1090 1128
1091 1129 spec is a string using a simple query language, see docstring in phabread
1092 1130 for details.
1093 1131
1094 1132 A "Differential Revision dict" looks like:
1095 1133
1096 1134 {
1097 1135 "id": "2",
1098 1136 "phid": "PHID-DREV-672qvysjcczopag46qty",
1099 1137 "title": "example",
1100 1138 "uri": "https://phab.example.com/D2",
1101 1139 "dateCreated": "1499181406",
1102 1140 "dateModified": "1499182103",
1103 1141 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1104 1142 "status": "0",
1105 1143 "statusName": "Needs Review",
1106 1144 "properties": [],
1107 1145 "branch": null,
1108 1146 "summary": "",
1109 1147 "testPlan": "",
1110 1148 "lineCount": "2",
1111 1149 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1112 1150 "diffs": [
1113 1151 "3",
1114 1152 "4",
1115 1153 ],
1116 1154 "commits": [],
1117 1155 "reviewers": [],
1118 1156 "ccs": [],
1119 1157 "hashes": [],
1120 1158 "auxiliary": {
1121 1159 "phabricator:projects": [],
1122 1160 "phabricator:depends-on": [
1123 1161 "PHID-DREV-gbapp366kutjebt7agcd"
1124 1162 ]
1125 1163 },
1126 1164 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1127 1165 "sourcePath": null
1128 1166 }
1129 1167 """
1130 1168
1131 1169 def fetch(params):
1132 1170 """params -> single drev or None"""
1133 1171 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1134 1172 if key in prefetched:
1135 1173 return prefetched[key]
1136 1174 drevs = callconduit(repo.ui, b'differential.query', params)
1137 1175 # Fill prefetched with the result
1138 1176 for drev in drevs:
1139 1177 prefetched[drev[b'phid']] = drev
1140 1178 prefetched[int(drev[b'id'])] = drev
1141 1179 if key not in prefetched:
1142 1180 raise error.Abort(
1143 1181 _(b'cannot get Differential Revision %r') % params
1144 1182 )
1145 1183 return prefetched[key]
1146 1184
1147 1185 def getstack(topdrevids):
1148 1186 """given a top, get a stack from the bottom, [id] -> [id]"""
1149 1187 visited = set()
1150 1188 result = []
1151 1189 queue = [{b'ids': [i]} for i in topdrevids]
1152 1190 while queue:
1153 1191 params = queue.pop()
1154 1192 drev = fetch(params)
1155 1193 if drev[b'id'] in visited:
1156 1194 continue
1157 1195 visited.add(drev[b'id'])
1158 1196 result.append(int(drev[b'id']))
1159 1197 auxiliary = drev.get(b'auxiliary', {})
1160 1198 depends = auxiliary.get(b'phabricator:depends-on', [])
1161 1199 for phid in depends:
1162 1200 queue.append({b'phids': [phid]})
1163 1201 result.reverse()
1164 1202 return smartset.baseset(result)
1165 1203
1166 1204 # Initialize prefetch cache
1167 1205 prefetched = {} # {id or phid: drev}
1168 1206
1169 1207 tree = _parse(spec)
1170 1208 drevs, ancestordrevs = _prefetchdrevs(tree)
1171 1209
1172 1210 # developer config: phabricator.batchsize
1173 1211 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1174 1212
1175 1213 # Prefetch Differential Revisions in batch
1176 1214 tofetch = set(drevs)
1177 1215 for r in ancestordrevs:
1178 1216 tofetch.update(range(max(1, r - batchsize), r + 1))
1179 1217 if drevs:
1180 1218 fetch({b'ids': list(tofetch)})
1181 1219 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1182 1220
1183 1221 # Walk through the tree, return smartsets
1184 1222 def walk(tree):
1185 1223 op = tree[0]
1186 1224 if op == b'symbol':
1187 1225 drev = _parsedrev(tree[1])
1188 1226 if drev:
1189 1227 return smartset.baseset([drev])
1190 1228 elif tree[1] in _knownstatusnames:
1191 1229 drevs = [
1192 1230 r
1193 1231 for r in validids
1194 1232 if _getstatusname(prefetched[r]) == tree[1]
1195 1233 ]
1196 1234 return smartset.baseset(drevs)
1197 1235 else:
1198 1236 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1199 1237 elif op in {b'and_', b'add', b'sub'}:
1200 1238 assert len(tree) == 3
1201 1239 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1202 1240 elif op == b'group':
1203 1241 return walk(tree[1])
1204 1242 elif op == b'ancestors':
1205 1243 return getstack(walk(tree[1]))
1206 1244 else:
1207 1245 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1208 1246
1209 1247 return [prefetched[r] for r in walk(tree)]
1210 1248
1211 1249
1212 1250 def getdescfromdrev(drev):
1213 1251 """get description (commit message) from "Differential Revision"
1214 1252
1215 1253 This is similar to differential.getcommitmessage API. But we only care
1216 1254 about limited fields: title, summary, test plan, and URL.
1217 1255 """
1218 1256 title = drev[b'title']
1219 1257 summary = drev[b'summary'].rstrip()
1220 1258 testplan = drev[b'testPlan'].rstrip()
1221 1259 if testplan:
1222 1260 testplan = b'Test Plan:\n%s' % testplan
1223 1261 uri = b'Differential Revision: %s' % drev[b'uri']
1224 1262 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1225 1263
1226 1264
1227 1265 def getdiffmeta(diff):
1228 1266 """get commit metadata (date, node, user, p1) from a diff object
1229 1267
1230 1268 The metadata could be "hg:meta", sent by phabsend, like:
1231 1269
1232 1270 "properties": {
1233 1271 "hg:meta": {
1234 1272 "date": "1499571514 25200",
1235 1273 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1236 1274 "user": "Foo Bar <foo@example.com>",
1237 1275 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1238 1276 }
1239 1277 }
1240 1278
1241 1279 Or converted from "local:commits", sent by "arc", like:
1242 1280
1243 1281 "properties": {
1244 1282 "local:commits": {
1245 1283 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1246 1284 "author": "Foo Bar",
1247 1285 "time": 1499546314,
1248 1286 "branch": "default",
1249 1287 "tag": "",
1250 1288 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1251 1289 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1252 1290 "local": "1000",
1253 1291 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1254 1292 "summary": "...",
1255 1293 "message": "...",
1256 1294 "authorEmail": "foo@example.com"
1257 1295 }
1258 1296 }
1259 1297 }
1260 1298
1261 1299 Note: metadata extracted from "local:commits" will lose time zone
1262 1300 information.
1263 1301 """
1264 1302 props = diff.get(b'properties') or {}
1265 1303 meta = props.get(b'hg:meta')
1266 1304 if not meta:
1267 1305 if props.get(b'local:commits'):
1268 1306 commit = sorted(props[b'local:commits'].values())[0]
1269 1307 meta = {}
1270 1308 if b'author' in commit and b'authorEmail' in commit:
1271 1309 meta[b'user'] = b'%s <%s>' % (
1272 1310 commit[b'author'],
1273 1311 commit[b'authorEmail'],
1274 1312 )
1275 1313 if b'time' in commit:
1276 1314 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1277 1315 if b'branch' in commit:
1278 1316 meta[b'branch'] = commit[b'branch']
1279 1317 node = commit.get(b'commit', commit.get(b'rev'))
1280 1318 if node:
1281 1319 meta[b'node'] = node
1282 1320 if len(commit.get(b'parents', ())) >= 1:
1283 1321 meta[b'parent'] = commit[b'parents'][0]
1284 1322 else:
1285 1323 meta = {}
1286 1324 if b'date' not in meta and b'dateCreated' in diff:
1287 1325 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1288 1326 if b'branch' not in meta and diff.get(b'branch'):
1289 1327 meta[b'branch'] = diff[b'branch']
1290 1328 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1291 1329 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1292 1330 return meta
1293 1331
1294 1332
1295 1333 def readpatch(repo, drevs, write):
1296 1334 """generate plain-text patch readable by 'hg import'
1297 1335
1298 1336 write is usually ui.write. drevs is what "querydrev" returns, results of
1299 1337 "differential.query".
1300 1338 """
1301 1339 # Prefetch hg:meta property for all diffs
1302 1340 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1303 1341 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1304 1342
1305 1343 # Generate patch for each drev
1306 1344 for drev in drevs:
1307 1345 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1308 1346
1309 1347 diffid = max(int(v) for v in drev[b'diffs'])
1310 1348 body = callconduit(
1311 1349 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1312 1350 )
1313 1351 desc = getdescfromdrev(drev)
1314 1352 header = b'# HG changeset patch\n'
1315 1353
1316 1354 # Try to preserve metadata from hg:meta property. Write hg patch
1317 1355 # headers that can be read by the "import" command. See patchheadermap
1318 1356 # and extract in mercurial/patch.py for supported headers.
1319 1357 meta = getdiffmeta(diffs[b'%d' % diffid])
1320 1358 for k in _metanamemap.keys():
1321 1359 if k in meta:
1322 1360 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1323 1361
1324 1362 content = b'%s%s\n%s' % (header, desc, body)
1325 1363 write(content)
1326 1364
1327 1365
1328 1366 @vcrcommand(
1329 1367 b'phabread',
1330 1368 [(b'', b'stack', False, _(b'read dependencies'))],
1331 1369 _(b'DREVSPEC [OPTIONS]'),
1332 1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1333 1371 )
1334 1372 def phabread(ui, repo, spec, **opts):
1335 1373 """print patches from Phabricator suitable for importing
1336 1374
1337 1375 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1338 1376 the number ``123``. It could also have common operators like ``+``, ``-``,
1339 1377 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1340 1378 select a stack.
1341 1379
1342 1380 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1343 1381 could be used to filter patches by status. For performance reason, they
1344 1382 only represent a subset of non-status selections and cannot be used alone.
1345 1383
1346 1384 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1347 1385 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1348 1386 stack up to D9.
1349 1387
1350 1388 If --stack is given, follow dependencies information and read all patches.
1351 1389 It is equivalent to the ``:`` operator.
1352 1390 """
1353 1391 opts = pycompat.byteskwargs(opts)
1354 1392 if opts.get(b'stack'):
1355 1393 spec = b':(%s)' % spec
1356 1394 drevs = querydrev(repo, spec)
1357 1395 readpatch(repo, drevs, ui.write)
1358 1396
1359 1397
1360 1398 @vcrcommand(
1361 1399 b'phabupdate',
1362 1400 [
1363 1401 (b'', b'accept', False, _(b'accept revisions')),
1364 1402 (b'', b'reject', False, _(b'reject revisions')),
1365 1403 (b'', b'abandon', False, _(b'abandon revisions')),
1366 1404 (b'', b'reclaim', False, _(b'reclaim revisions')),
1367 1405 (b'm', b'comment', b'', _(b'comment on the last revision')),
1368 1406 ],
1369 1407 _(b'DREVSPEC [OPTIONS]'),
1370 1408 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1371 1409 )
1372 1410 def phabupdate(ui, repo, spec, **opts):
1373 1411 """update Differential Revision in batch
1374 1412
1375 1413 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1376 1414 """
1377 1415 opts = pycompat.byteskwargs(opts)
1378 1416 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1379 1417 if len(flags) > 1:
1380 1418 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1381 1419
1382 1420 actions = []
1383 1421 for f in flags:
1384 1422 actions.append({b'type': f, b'value': b'true'})
1385 1423
1386 1424 drevs = querydrev(repo, spec)
1387 1425 for i, drev in enumerate(drevs):
1388 1426 if i + 1 == len(drevs) and opts.get(b'comment'):
1389 1427 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1390 1428 if actions:
1391 1429 params = {
1392 1430 b'objectIdentifier': drev[b'phid'],
1393 1431 b'transactions': actions,
1394 1432 }
1395 1433 callconduit(ui, b'differential.revision.edit', params)
1396 1434
1397 1435
1398 1436 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1399 1437 def template_review(context, mapping):
1400 1438 """:phabreview: Object describing the review for this changeset.
1401 1439 Has attributes `url` and `id`.
1402 1440 """
1403 1441 ctx = context.resource(mapping, b'ctx')
1404 1442 m = _differentialrevisiondescre.search(ctx.description())
1405 1443 if m:
1406 1444 return templateutil.hybriddict(
1407 1445 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1408 1446 )
1409 1447 else:
1410 1448 tags = ctx.repo().nodetags(ctx.node())
1411 1449 for t in tags:
1412 1450 if _differentialrevisiontagre.match(t):
1413 1451 url = ctx.repo().ui.config(b'phabricator', b'url')
1414 1452 if not url.endswith(b'/'):
1415 1453 url += b'/'
1416 1454 url += t
1417 1455
1418 1456 return templateutil.hybriddict({b'url': url, b'id': t,})
1419 1457 return None
General Comments 0
You need to be logged in to leave comments. Login now