##// END OF EJS Templates
phabricator: add the uploadchunks function...
Ian Moody -
r43842:45307960 default
parent child Browse files
Show More
@@ -1,1390 +1,1419 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 import base64
44 45 import contextlib
45 46 import itertools
46 47 import json
47 48 import operator
48 49 import re
49 50
50 51 from mercurial.node import bin, nullid
51 52 from mercurial.i18n import _
52 53 from mercurial.pycompat import getattr
53 54 from mercurial.thirdparty import attr
54 55 from mercurial import (
55 56 cmdutil,
56 57 context,
57 58 encoding,
58 59 error,
59 60 exthelper,
60 61 httpconnection as httpconnectionmod,
61 62 match,
62 63 mdiff,
63 64 obsutil,
64 65 parser,
65 66 patch,
66 67 phases,
67 68 pycompat,
68 69 scmutil,
69 70 smartset,
70 71 tags,
71 72 templatefilters,
72 73 templateutil,
73 74 url as urlmod,
74 75 util,
75 76 )
76 77 from mercurial.utils import (
77 78 procutil,
78 79 stringutil,
79 80 )
80 81
81 82 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
82 83 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
83 84 # be specifying the version(s) of Mercurial they are tested with, or
84 85 # leave the attribute unspecified.
85 86 testedwith = b'ships-with-hg-core'
86 87
87 88 eh = exthelper.exthelper()
88 89
89 90 cmdtable = eh.cmdtable
90 91 command = eh.command
91 92 configtable = eh.configtable
92 93 templatekeyword = eh.templatekeyword
93 94
94 95 # developer config: phabricator.batchsize
95 96 eh.configitem(
96 97 b'phabricator', b'batchsize', default=12,
97 98 )
98 99 eh.configitem(
99 100 b'phabricator', b'callsign', default=None,
100 101 )
101 102 eh.configitem(
102 103 b'phabricator', b'curlcmd', default=None,
103 104 )
104 105 # developer config: phabricator.repophid
105 106 eh.configitem(
106 107 b'phabricator', b'repophid', default=None,
107 108 )
108 109 eh.configitem(
109 110 b'phabricator', b'url', default=None,
110 111 )
111 112 eh.configitem(
112 113 b'phabsend', b'confirm', default=False,
113 114 )
114 115
115 116 colortable = {
116 117 b'phabricator.action.created': b'green',
117 118 b'phabricator.action.skipped': b'magenta',
118 119 b'phabricator.action.updated': b'magenta',
119 120 b'phabricator.desc': b'',
120 121 b'phabricator.drev': b'bold',
121 122 b'phabricator.node': b'',
122 123 }
123 124
124 125 _VCR_FLAGS = [
125 126 (
126 127 b'',
127 128 b'test-vcr',
128 129 b'',
129 130 _(
130 131 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
131 132 b', otherwise will mock all http requests using the specified vcr file.'
132 133 b' (ADVANCED)'
133 134 ),
134 135 ),
135 136 ]
136 137
137 138
138 139 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
139 140 fullflags = flags + _VCR_FLAGS
140 141
141 142 def hgmatcher(r1, r2):
142 143 if r1.uri != r2.uri or r1.method != r2.method:
143 144 return False
144 145 r1params = r1.body.split(b'&')
145 146 r2params = r2.body.split(b'&')
146 147 return set(r1params) == set(r2params)
147 148
148 149 def sanitiserequest(request):
149 150 request.body = re.sub(
150 151 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
151 152 )
152 153 return request
153 154
154 155 def sanitiseresponse(response):
155 156 if r'set-cookie' in response[r'headers']:
156 157 del response[r'headers'][r'set-cookie']
157 158 return response
158 159
159 160 def decorate(fn):
160 161 def inner(*args, **kwargs):
161 162 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
162 163 if cassette:
163 164 import hgdemandimport
164 165
165 166 with hgdemandimport.deactivated():
166 167 import vcr as vcrmod
167 168 import vcr.stubs as stubs
168 169
169 170 vcr = vcrmod.VCR(
170 171 serializer=r'json',
171 172 before_record_request=sanitiserequest,
172 173 before_record_response=sanitiseresponse,
173 174 custom_patches=[
174 175 (
175 176 urlmod,
176 177 r'httpconnection',
177 178 stubs.VCRHTTPConnection,
178 179 ),
179 180 (
180 181 urlmod,
181 182 r'httpsconnection',
182 183 stubs.VCRHTTPSConnection,
183 184 ),
184 185 ],
185 186 )
186 187 vcr.register_matcher(r'hgmatcher', hgmatcher)
187 188 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
188 189 return fn(*args, **kwargs)
189 190 return fn(*args, **kwargs)
190 191
191 192 inner.__name__ = fn.__name__
192 193 inner.__doc__ = fn.__doc__
193 194 return command(
194 195 name,
195 196 fullflags,
196 197 spec,
197 198 helpcategory=helpcategory,
198 199 optionalrepo=optionalrepo,
199 200 )(inner)
200 201
201 202 return decorate
202 203
203 204
204 205 def urlencodenested(params):
205 206 """like urlencode, but works with nested parameters.
206 207
207 208 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
208 209 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
209 210 urlencode. Note: the encoding is consistent with PHP's http_build_query.
210 211 """
211 212 flatparams = util.sortdict()
212 213
213 214 def process(prefix, obj):
214 215 if isinstance(obj, bool):
215 216 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
216 217 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
217 218 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
218 219 if items is None:
219 220 flatparams[prefix] = obj
220 221 else:
221 222 for k, v in items(obj):
222 223 if prefix:
223 224 process(b'%s[%s]' % (prefix, k), v)
224 225 else:
225 226 process(k, v)
226 227
227 228 process(b'', params)
228 229 return util.urlreq.urlencode(flatparams)
229 230
230 231
231 232 def readurltoken(ui):
232 233 """return conduit url, token and make sure they exist
233 234
234 235 Currently read from [auth] config section. In the future, it might
235 236 make sense to read from .arcconfig and .arcrc as well.
236 237 """
237 238 url = ui.config(b'phabricator', b'url')
238 239 if not url:
239 240 raise error.Abort(
240 241 _(b'config %s.%s is required') % (b'phabricator', b'url')
241 242 )
242 243
243 244 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
244 245 token = None
245 246
246 247 if res:
247 248 group, auth = res
248 249
249 250 ui.debug(b"using auth.%s.* for authentication\n" % group)
250 251
251 252 token = auth.get(b'phabtoken')
252 253
253 254 if not token:
254 255 raise error.Abort(
255 256 _(b'Can\'t find conduit token associated to %s') % (url,)
256 257 )
257 258
258 259 return url, token
259 260
260 261
261 262 def callconduit(ui, name, params):
262 263 """call Conduit API, params is a dict. return json.loads result, or None"""
263 264 host, token = readurltoken(ui)
264 265 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
265 266 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
266 267 params = params.copy()
267 268 params[b'api.token'] = token
268 269 data = urlencodenested(params)
269 270 curlcmd = ui.config(b'phabricator', b'curlcmd')
270 271 if curlcmd:
271 272 sin, sout = procutil.popen2(
272 273 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
273 274 )
274 275 sin.write(data)
275 276 sin.close()
276 277 body = sout.read()
277 278 else:
278 279 urlopener = urlmod.opener(ui, authinfo)
279 280 request = util.urlreq.request(pycompat.strurl(url), data=data)
280 281 with contextlib.closing(urlopener.open(request)) as rsp:
281 282 body = rsp.read()
282 283 ui.debug(b'Conduit Response: %s\n' % body)
283 284 parsed = pycompat.rapply(
284 285 lambda x: encoding.unitolocal(x)
285 286 if isinstance(x, pycompat.unicode)
286 287 else x,
287 288 # json.loads only accepts bytes from py3.6+
288 289 json.loads(encoding.unifromlocal(body)),
289 290 )
290 291 if parsed.get(b'error_code'):
291 292 msg = _(b'Conduit Error (%s): %s') % (
292 293 parsed[b'error_code'],
293 294 parsed[b'error_info'],
294 295 )
295 296 raise error.Abort(msg)
296 297 return parsed[b'result']
297 298
298 299
299 300 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
300 301 def debugcallconduit(ui, repo, name):
301 302 """call Conduit API
302 303
303 304 Call parameters are read from stdin as a JSON blob. Result will be written
304 305 to stdout as a JSON blob.
305 306 """
306 307 # json.loads only accepts bytes from 3.6+
307 308 rawparams = encoding.unifromlocal(ui.fin.read())
308 309 # json.loads only returns unicode strings
309 310 params = pycompat.rapply(
310 311 lambda x: encoding.unitolocal(x)
311 312 if isinstance(x, pycompat.unicode)
312 313 else x,
313 314 json.loads(rawparams),
314 315 )
315 316 # json.dumps only accepts unicode strings
316 317 result = pycompat.rapply(
317 318 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
318 319 callconduit(ui, name, params),
319 320 )
320 321 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
321 322 ui.write(b'%s\n' % encoding.unitolocal(s))
322 323
323 324
324 325 def getrepophid(repo):
325 326 """given callsign, return repository PHID or None"""
326 327 # developer config: phabricator.repophid
327 328 repophid = repo.ui.config(b'phabricator', b'repophid')
328 329 if repophid:
329 330 return repophid
330 331 callsign = repo.ui.config(b'phabricator', b'callsign')
331 332 if not callsign:
332 333 return None
333 334 query = callconduit(
334 335 repo.ui,
335 336 b'diffusion.repository.search',
336 337 {b'constraints': {b'callsigns': [callsign]}},
337 338 )
338 339 if len(query[b'data']) == 0:
339 340 return None
340 341 repophid = query[b'data'][0][b'phid']
341 342 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
342 343 return repophid
343 344
344 345
345 346 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
346 347 _differentialrevisiondescre = re.compile(
347 348 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
348 349 )
349 350
350 351
351 352 def getoldnodedrevmap(repo, nodelist):
352 353 """find previous nodes that has been sent to Phabricator
353 354
354 355 return {node: (oldnode, Differential diff, Differential Revision ID)}
355 356 for node in nodelist with known previous sent versions, or associated
356 357 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
357 358 be ``None``.
358 359
359 360 Examines commit messages like "Differential Revision:" to get the
360 361 association information.
361 362
362 363 If such commit message line is not found, examines all precursors and their
363 364 tags. Tags with format like "D1234" are considered a match and the node
364 365 with that tag, and the number after "D" (ex. 1234) will be returned.
365 366
366 367 The ``old node``, if not None, is guaranteed to be the last diff of
367 368 corresponding Differential Revision, and exist in the repo.
368 369 """
369 370 unfi = repo.unfiltered()
370 371 nodemap = unfi.changelog.nodemap
371 372
372 373 result = {} # {node: (oldnode?, lastdiff?, drev)}
373 374 toconfirm = {} # {node: (force, {precnode}, drev)}
374 375 for node in nodelist:
375 376 ctx = unfi[node]
376 377 # For tags like "D123", put them into "toconfirm" to verify later
377 378 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
378 379 for n in precnodes:
379 380 if n in nodemap:
380 381 for tag in unfi.nodetags(n):
381 382 m = _differentialrevisiontagre.match(tag)
382 383 if m:
383 384 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
384 385 continue
385 386
386 387 # Check commit message
387 388 m = _differentialrevisiondescre.search(ctx.description())
388 389 if m:
389 390 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
390 391
391 392 # Double check if tags are genuine by collecting all old nodes from
392 393 # Phabricator, and expect precursors overlap with it.
393 394 if toconfirm:
394 395 drevs = [drev for force, precs, drev in toconfirm.values()]
395 396 alldiffs = callconduit(
396 397 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
397 398 )
398 399 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
399 400 for newnode, (force, precset, drev) in toconfirm.items():
400 401 diffs = [
401 402 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
402 403 ]
403 404
404 405 # "precursors" as known by Phabricator
405 406 phprecset = set(getnode(d) for d in diffs)
406 407
407 408 # Ignore if precursors (Phabricator and local repo) do not overlap,
408 409 # and force is not set (when commit message says nothing)
409 410 if not force and not bool(phprecset & precset):
410 411 tagname = b'D%d' % drev
411 412 tags.tag(
412 413 repo,
413 414 tagname,
414 415 nullid,
415 416 message=None,
416 417 user=None,
417 418 date=None,
418 419 local=True,
419 420 )
420 421 unfi.ui.warn(
421 422 _(
422 423 b'D%s: local tag removed - does not match '
423 424 b'Differential history\n'
424 425 )
425 426 % drev
426 427 )
427 428 continue
428 429
429 430 # Find the last node using Phabricator metadata, and make sure it
430 431 # exists in the repo
431 432 oldnode = lastdiff = None
432 433 if diffs:
433 434 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
434 435 oldnode = getnode(lastdiff)
435 436 if oldnode and oldnode not in nodemap:
436 437 oldnode = None
437 438
438 439 result[newnode] = (oldnode, lastdiff, drev)
439 440
440 441 return result
441 442
442 443
443 444 def getdiff(ctx, diffopts):
444 445 """plain-text diff without header (user, commit message, etc)"""
445 446 output = util.stringio()
446 447 for chunk, _label in patch.diffui(
447 448 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
448 449 ):
449 450 output.write(chunk)
450 451 return output.getvalue()
451 452
452 453
453 454 class DiffChangeType(object):
454 455 ADD = 1
455 456 CHANGE = 2
456 457 DELETE = 3
457 458 MOVE_AWAY = 4
458 459 COPY_AWAY = 5
459 460 MOVE_HERE = 6
460 461 COPY_HERE = 7
461 462 MULTICOPY = 8
462 463
463 464
464 465 class DiffFileType(object):
465 466 TEXT = 1
466 467 IMAGE = 2
467 468 BINARY = 3
468 469
469 470
470 471 @attr.s
471 472 class phabhunk(dict):
472 473 """Represents a Differential hunk, which is owned by a Differential change
473 474 """
474 475
475 476 oldOffset = attr.ib(default=0) # camelcase-required
476 477 oldLength = attr.ib(default=0) # camelcase-required
477 478 newOffset = attr.ib(default=0) # camelcase-required
478 479 newLength = attr.ib(default=0) # camelcase-required
479 480 corpus = attr.ib(default='')
480 481 # These get added to the phabchange's equivalents
481 482 addLines = attr.ib(default=0) # camelcase-required
482 483 delLines = attr.ib(default=0) # camelcase-required
483 484
484 485
485 486 @attr.s
486 487 class phabchange(object):
487 488 """Represents a Differential change, owns Differential hunks and owned by a
488 489 Differential diff. Each one represents one file in a diff.
489 490 """
490 491
491 492 currentPath = attr.ib(default=None) # camelcase-required
492 493 oldPath = attr.ib(default=None) # camelcase-required
493 494 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
494 495 metadata = attr.ib(default=attr.Factory(dict))
495 496 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 497 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 498 type = attr.ib(default=DiffChangeType.CHANGE)
498 499 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
499 500 commitHash = attr.ib(default=None) # camelcase-required
500 501 addLines = attr.ib(default=0) # camelcase-required
501 502 delLines = attr.ib(default=0) # camelcase-required
502 503 hunks = attr.ib(default=attr.Factory(list))
503 504
504 505 def copynewmetadatatoold(self):
505 506 for key in list(self.metadata.keys()):
506 507 newkey = key.replace(b'new:', b'old:')
507 508 self.metadata[newkey] = self.metadata[key]
508 509
509 510 def addoldmode(self, value):
510 511 self.oldProperties[b'unix:filemode'] = value
511 512
512 513 def addnewmode(self, value):
513 514 self.newProperties[b'unix:filemode'] = value
514 515
515 516 def addhunk(self, hunk):
516 517 if not isinstance(hunk, phabhunk):
517 518 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
518 519 self.hunks.append(hunk)
519 520 # It's useful to include these stats since the Phab web UI shows them,
520 521 # and uses them to estimate how large a change a Revision is. Also used
521 522 # in email subjects for the [+++--] bit.
522 523 self.addLines += hunk.addLines
523 524 self.delLines += hunk.delLines
524 525
525 526
526 527 @attr.s
527 528 class phabdiff(object):
528 529 """Represents a Differential diff, owns Differential changes. Corresponds
529 530 to a commit.
530 531 """
531 532
532 533 # Doesn't seem to be any reason to send this (output of uname -n)
533 534 sourceMachine = attr.ib(default=b'') # camelcase-required
534 535 sourcePath = attr.ib(default=b'/') # camelcase-required
535 536 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
536 537 sourceControlPath = attr.ib(default=b'/') # camelcase-required
537 538 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
538 539 branch = attr.ib(default=b'default')
539 540 bookmark = attr.ib(default=None)
540 541 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
541 542 lintStatus = attr.ib(default=b'none') # camelcase-required
542 543 unitStatus = attr.ib(default=b'none') # camelcase-required
543 544 changes = attr.ib(default=attr.Factory(dict))
544 545 repositoryPHID = attr.ib(default=None) # camelcase-required
545 546
546 547 def addchange(self, change):
547 548 if not isinstance(change, phabchange):
548 549 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
549 550 self.changes[change.currentPath] = change
550 551
551 552
552 553 def maketext(pchange, ctx, fname):
553 554 """populate the phabchange for a text file"""
554 555 repo = ctx.repo()
555 556 fmatcher = match.exact([fname])
556 557 diffopts = mdiff.diffopts(git=True, context=32767)
557 558 _pfctx, _fctx, header, fhunks = next(
558 559 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
559 560 )
560 561
561 562 for fhunk in fhunks:
562 563 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
563 564 corpus = b''.join(lines[1:])
564 565 shunk = list(header)
565 566 shunk.extend(lines)
566 567 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
567 568 patch.diffstatdata(util.iterlines(shunk))
568 569 )
569 570 pchange.addhunk(
570 571 phabhunk(
571 572 oldOffset,
572 573 oldLength,
573 574 newOffset,
574 575 newLength,
575 576 corpus,
576 577 addLines,
577 578 delLines,
578 579 )
579 580 )
580 581
581 582
583 def uploadchunks(fctx, fphid):
584 """upload large binary files as separate chunks.
585 Phab requests chunking over 8MiB, and splits into 4MiB chunks
586 """
587 ui = fctx.repo().ui
588 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
589 progress = ui.makeprogress(
590 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
591 )
592 for chunk in chunks:
593 progress.increment()
594 if chunk[b'complete']:
595 continue
596 bstart = int(chunk[b'byteStart'])
597 bend = int(chunk[b'byteEnd'])
598 callconduit(
599 ui,
600 b'file.uploadchunk',
601 {
602 b'filePHID': fphid,
603 b'byteStart': bstart,
604 b'data': base64.b64encode(fctx.data()[bstart:bend]),
605 b'dataEncoding': b'base64',
606 },
607 )
608 progress.complete()
609
610
582 611 def creatediff(ctx):
583 612 """create a Differential Diff"""
584 613 repo = ctx.repo()
585 614 repophid = getrepophid(repo)
586 615 # Create a "Differential Diff" via "differential.createrawdiff" API
587 616 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
588 617 if repophid:
589 618 params[b'repositoryPHID'] = repophid
590 619 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
591 620 if not diff:
592 621 raise error.Abort(_(b'cannot create diff for %s') % ctx)
593 622 return diff
594 623
595 624
596 625 def writediffproperties(ctx, diff):
597 626 """write metadata to diff so patches could be applied losslessly"""
598 627 params = {
599 628 b'diff_id': diff[b'id'],
600 629 b'name': b'hg:meta',
601 630 b'data': templatefilters.json(
602 631 {
603 632 b'user': ctx.user(),
604 633 b'date': b'%d %d' % ctx.date(),
605 634 b'branch': ctx.branch(),
606 635 b'node': ctx.hex(),
607 636 b'parent': ctx.p1().hex(),
608 637 }
609 638 ),
610 639 }
611 640 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
612 641
613 642 params = {
614 643 b'diff_id': diff[b'id'],
615 644 b'name': b'local:commits',
616 645 b'data': templatefilters.json(
617 646 {
618 647 ctx.hex(): {
619 648 b'author': stringutil.person(ctx.user()),
620 649 b'authorEmail': stringutil.email(ctx.user()),
621 650 b'time': int(ctx.date()[0]),
622 651 b'commit': ctx.hex(),
623 652 b'parents': [ctx.p1().hex()],
624 653 b'branch': ctx.branch(),
625 654 },
626 655 }
627 656 ),
628 657 }
629 658 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
630 659
631 660
632 661 def createdifferentialrevision(
633 662 ctx,
634 663 revid=None,
635 664 parentrevphid=None,
636 665 oldnode=None,
637 666 olddiff=None,
638 667 actions=None,
639 668 comment=None,
640 669 ):
641 670 """create or update a Differential Revision
642 671
643 672 If revid is None, create a new Differential Revision, otherwise update
644 673 revid. If parentrevphid is not None, set it as a dependency.
645 674
646 675 If oldnode is not None, check if the patch content (without commit message
647 676 and metadata) has changed before creating another diff.
648 677
649 678 If actions is not None, they will be appended to the transaction.
650 679 """
651 680 repo = ctx.repo()
652 681 if oldnode:
653 682 diffopts = mdiff.diffopts(git=True, context=32767)
654 683 oldctx = repo.unfiltered()[oldnode]
655 684 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
656 685 else:
657 686 neednewdiff = True
658 687
659 688 transactions = []
660 689 if neednewdiff:
661 690 diff = creatediff(ctx)
662 691 transactions.append({b'type': b'update', b'value': diff[b'phid']})
663 692 if comment:
664 693 transactions.append({b'type': b'comment', b'value': comment})
665 694 else:
666 695 # Even if we don't need to upload a new diff because the patch content
667 696 # does not change. We might still need to update its metadata so
668 697 # pushers could know the correct node metadata.
669 698 assert olddiff
670 699 diff = olddiff
671 700 writediffproperties(ctx, diff)
672 701
673 702 # Set the parent Revision every time, so commit re-ordering is picked-up
674 703 if parentrevphid:
675 704 transactions.append(
676 705 {b'type': b'parents.set', b'value': [parentrevphid]}
677 706 )
678 707
679 708 if actions:
680 709 transactions += actions
681 710
682 711 # Parse commit message and update related fields.
683 712 desc = ctx.description()
684 713 info = callconduit(
685 714 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
686 715 )
687 716 for k, v in info[b'fields'].items():
688 717 if k in [b'title', b'summary', b'testPlan']:
689 718 transactions.append({b'type': k, b'value': v})
690 719
691 720 params = {b'transactions': transactions}
692 721 if revid is not None:
693 722 # Update an existing Differential Revision
694 723 params[b'objectIdentifier'] = revid
695 724
696 725 revision = callconduit(repo.ui, b'differential.revision.edit', params)
697 726 if not revision:
698 727 raise error.Abort(_(b'cannot create revision for %s') % ctx)
699 728
700 729 return revision, diff
701 730
702 731
703 732 def userphids(repo, names):
704 733 """convert user names to PHIDs"""
705 734 names = [name.lower() for name in names]
706 735 query = {b'constraints': {b'usernames': names}}
707 736 result = callconduit(repo.ui, b'user.search', query)
708 737 # username not found is not an error of the API. So check if we have missed
709 738 # some names here.
710 739 data = result[b'data']
711 740 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
712 741 unresolved = set(names) - resolved
713 742 if unresolved:
714 743 raise error.Abort(
715 744 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
716 745 )
717 746 return [entry[b'phid'] for entry in data]
718 747
719 748
720 749 @vcrcommand(
721 750 b'phabsend',
722 751 [
723 752 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
724 753 (b'', b'amend', True, _(b'update commit messages')),
725 754 (b'', b'reviewer', [], _(b'specify reviewers')),
726 755 (b'', b'blocker', [], _(b'specify blocking reviewers')),
727 756 (
728 757 b'm',
729 758 b'comment',
730 759 b'',
731 760 _(b'add a comment to Revisions with new/updated Diffs'),
732 761 ),
733 762 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
734 763 ],
735 764 _(b'REV [OPTIONS]'),
736 765 helpcategory=command.CATEGORY_IMPORT_EXPORT,
737 766 )
738 767 def phabsend(ui, repo, *revs, **opts):
739 768 """upload changesets to Phabricator
740 769
741 770 If there are multiple revisions specified, they will be send as a stack
742 771 with a linear dependencies relationship using the order specified by the
743 772 revset.
744 773
745 774 For the first time uploading changesets, local tags will be created to
746 775 maintain the association. After the first time, phabsend will check
747 776 obsstore and tags information so it can figure out whether to update an
748 777 existing Differential Revision, or create a new one.
749 778
750 779 If --amend is set, update commit messages so they have the
751 780 ``Differential Revision`` URL, remove related tags. This is similar to what
752 781 arcanist will do, and is more desired in author-push workflows. Otherwise,
753 782 use local tags to record the ``Differential Revision`` association.
754 783
755 784 The --confirm option lets you confirm changesets before sending them. You
756 785 can also add following to your configuration file to make it default
757 786 behaviour::
758 787
759 788 [phabsend]
760 789 confirm = true
761 790
762 791 phabsend will check obsstore and the above association to decide whether to
763 792 update an existing Differential Revision, or create a new one.
764 793 """
765 794 opts = pycompat.byteskwargs(opts)
766 795 revs = list(revs) + opts.get(b'rev', [])
767 796 revs = scmutil.revrange(repo, revs)
768 797
769 798 if not revs:
770 799 raise error.Abort(_(b'phabsend requires at least one changeset'))
771 800 if opts.get(b'amend'):
772 801 cmdutil.checkunfinished(repo)
773 802
774 803 # {newnode: (oldnode, olddiff, olddrev}
775 804 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
776 805
777 806 confirm = ui.configbool(b'phabsend', b'confirm')
778 807 confirm |= bool(opts.get(b'confirm'))
779 808 if confirm:
780 809 confirmed = _confirmbeforesend(repo, revs, oldmap)
781 810 if not confirmed:
782 811 raise error.Abort(_(b'phabsend cancelled'))
783 812
784 813 actions = []
785 814 reviewers = opts.get(b'reviewer', [])
786 815 blockers = opts.get(b'blocker', [])
787 816 phids = []
788 817 if reviewers:
789 818 phids.extend(userphids(repo, reviewers))
790 819 if blockers:
791 820 phids.extend(
792 821 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
793 822 )
794 823 if phids:
795 824 actions.append({b'type': b'reviewers.add', b'value': phids})
796 825
797 826 drevids = [] # [int]
798 827 diffmap = {} # {newnode: diff}
799 828
800 829 # Send patches one by one so we know their Differential Revision PHIDs and
801 830 # can provide dependency relationship
802 831 lastrevphid = None
803 832 for rev in revs:
804 833 ui.debug(b'sending rev %d\n' % rev)
805 834 ctx = repo[rev]
806 835
807 836 # Get Differential Revision ID
808 837 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
809 838 if oldnode != ctx.node() or opts.get(b'amend'):
810 839 # Create or update Differential Revision
811 840 revision, diff = createdifferentialrevision(
812 841 ctx,
813 842 revid,
814 843 lastrevphid,
815 844 oldnode,
816 845 olddiff,
817 846 actions,
818 847 opts.get(b'comment'),
819 848 )
820 849 diffmap[ctx.node()] = diff
821 850 newrevid = int(revision[b'object'][b'id'])
822 851 newrevphid = revision[b'object'][b'phid']
823 852 if revid:
824 853 action = b'updated'
825 854 else:
826 855 action = b'created'
827 856
828 857 # Create a local tag to note the association, if commit message
829 858 # does not have it already
830 859 m = _differentialrevisiondescre.search(ctx.description())
831 860 if not m or int(m.group(r'id')) != newrevid:
832 861 tagname = b'D%d' % newrevid
833 862 tags.tag(
834 863 repo,
835 864 tagname,
836 865 ctx.node(),
837 866 message=None,
838 867 user=None,
839 868 date=None,
840 869 local=True,
841 870 )
842 871 else:
843 872 # Nothing changed. But still set "newrevphid" so the next revision
844 873 # could depend on this one and "newrevid" for the summary line.
845 874 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
846 875 newrevid = revid
847 876 action = b'skipped'
848 877
849 878 actiondesc = ui.label(
850 879 {
851 880 b'created': _(b'created'),
852 881 b'skipped': _(b'skipped'),
853 882 b'updated': _(b'updated'),
854 883 }[action],
855 884 b'phabricator.action.%s' % action,
856 885 )
857 886 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
858 887 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
859 888 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
860 889 ui.write(
861 890 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
862 891 )
863 892 drevids.append(newrevid)
864 893 lastrevphid = newrevphid
865 894
866 895 # Update commit messages and remove tags
867 896 if opts.get(b'amend'):
868 897 unfi = repo.unfiltered()
869 898 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
870 899 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
871 900 wnode = unfi[b'.'].node()
872 901 mapping = {} # {oldnode: [newnode]}
873 902 for i, rev in enumerate(revs):
874 903 old = unfi[rev]
875 904 drevid = drevids[i]
876 905 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
877 906 newdesc = getdescfromdrev(drev)
878 907 # Make sure commit message contain "Differential Revision"
879 908 if old.description() != newdesc:
880 909 if old.phase() == phases.public:
881 910 ui.warn(
882 911 _(b"warning: not updating public commit %s\n")
883 912 % scmutil.formatchangeid(old)
884 913 )
885 914 continue
886 915 parents = [
887 916 mapping.get(old.p1().node(), (old.p1(),))[0],
888 917 mapping.get(old.p2().node(), (old.p2(),))[0],
889 918 ]
890 919 new = context.metadataonlyctx(
891 920 repo,
892 921 old,
893 922 parents=parents,
894 923 text=newdesc,
895 924 user=old.user(),
896 925 date=old.date(),
897 926 extra=old.extra(),
898 927 )
899 928
900 929 newnode = new.commit()
901 930
902 931 mapping[old.node()] = [newnode]
903 932 # Update diff property
904 933 # If it fails just warn and keep going, otherwise the DREV
905 934 # associations will be lost
906 935 try:
907 936 writediffproperties(unfi[newnode], diffmap[old.node()])
908 937 except util.urlerr.urlerror:
909 938 ui.warnnoi18n(
910 939 b'Failed to update metadata for D%s\n' % drevid
911 940 )
912 941 # Remove local tags since it's no longer necessary
913 942 tagname = b'D%d' % drevid
914 943 if tagname in repo.tags():
915 944 tags.tag(
916 945 repo,
917 946 tagname,
918 947 nullid,
919 948 message=None,
920 949 user=None,
921 950 date=None,
922 951 local=True,
923 952 )
924 953 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
925 954 if wnode in mapping:
926 955 unfi.setparents(mapping[wnode][0])
927 956
928 957
929 958 # Map from "hg:meta" keys to header understood by "hg import". The order is
930 959 # consistent with "hg export" output.
931 960 _metanamemap = util.sortdict(
932 961 [
933 962 (b'user', b'User'),
934 963 (b'date', b'Date'),
935 964 (b'branch', b'Branch'),
936 965 (b'node', b'Node ID'),
937 966 (b'parent', b'Parent '),
938 967 ]
939 968 )
940 969
941 970
942 971 def _confirmbeforesend(repo, revs, oldmap):
943 972 url, token = readurltoken(repo.ui)
944 973 ui = repo.ui
945 974 for rev in revs:
946 975 ctx = repo[rev]
947 976 desc = ctx.description().splitlines()[0]
948 977 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
949 978 if drevid:
950 979 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
951 980 else:
952 981 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
953 982
954 983 ui.write(
955 984 _(b'%s - %s: %s\n')
956 985 % (
957 986 drevdesc,
958 987 ui.label(bytes(ctx), b'phabricator.node'),
959 988 ui.label(desc, b'phabricator.desc'),
960 989 )
961 990 )
962 991
963 992 if ui.promptchoice(
964 993 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
965 994 ):
966 995 return False
967 996
968 997 return True
969 998
970 999
971 1000 _knownstatusnames = {
972 1001 b'accepted',
973 1002 b'needsreview',
974 1003 b'needsrevision',
975 1004 b'closed',
976 1005 b'abandoned',
977 1006 }
978 1007
979 1008
980 1009 def _getstatusname(drev):
981 1010 """get normalized status name from a Differential Revision"""
982 1011 return drev[b'statusName'].replace(b' ', b'').lower()
983 1012
984 1013
985 1014 # Small language to specify differential revisions. Support symbols: (), :X,
986 1015 # +, and -.
987 1016
988 1017 _elements = {
989 1018 # token-type: binding-strength, primary, prefix, infix, suffix
990 1019 b'(': (12, None, (b'group', 1, b')'), None, None),
991 1020 b':': (8, None, (b'ancestors', 8), None, None),
992 1021 b'&': (5, None, None, (b'and_', 5), None),
993 1022 b'+': (4, None, None, (b'add', 4), None),
994 1023 b'-': (4, None, None, (b'sub', 4), None),
995 1024 b')': (0, None, None, None, None),
996 1025 b'symbol': (0, b'symbol', None, None, None),
997 1026 b'end': (0, None, None, None, None),
998 1027 }
999 1028
1000 1029
1001 1030 def _tokenize(text):
1002 1031 view = memoryview(text) # zero-copy slice
1003 1032 special = b'():+-& '
1004 1033 pos = 0
1005 1034 length = len(text)
1006 1035 while pos < length:
1007 1036 symbol = b''.join(
1008 1037 itertools.takewhile(
1009 1038 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1010 1039 )
1011 1040 )
1012 1041 if symbol:
1013 1042 yield (b'symbol', symbol, pos)
1014 1043 pos += len(symbol)
1015 1044 else: # special char, ignore space
1016 1045 if text[pos] != b' ':
1017 1046 yield (text[pos], None, pos)
1018 1047 pos += 1
1019 1048 yield (b'end', None, pos)
1020 1049
1021 1050
1022 1051 def _parse(text):
1023 1052 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1024 1053 if pos != len(text):
1025 1054 raise error.ParseError(b'invalid token', pos)
1026 1055 return tree
1027 1056
1028 1057
1029 1058 def _parsedrev(symbol):
1030 1059 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1031 1060 if symbol.startswith(b'D') and symbol[1:].isdigit():
1032 1061 return int(symbol[1:])
1033 1062 if symbol.isdigit():
1034 1063 return int(symbol)
1035 1064
1036 1065
1037 1066 def _prefetchdrevs(tree):
1038 1067 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1039 1068 drevs = set()
1040 1069 ancestordrevs = set()
1041 1070 op = tree[0]
1042 1071 if op == b'symbol':
1043 1072 r = _parsedrev(tree[1])
1044 1073 if r:
1045 1074 drevs.add(r)
1046 1075 elif op == b'ancestors':
1047 1076 r, a = _prefetchdrevs(tree[1])
1048 1077 drevs.update(r)
1049 1078 ancestordrevs.update(r)
1050 1079 ancestordrevs.update(a)
1051 1080 else:
1052 1081 for t in tree[1:]:
1053 1082 r, a = _prefetchdrevs(t)
1054 1083 drevs.update(r)
1055 1084 ancestordrevs.update(a)
1056 1085 return drevs, ancestordrevs
1057 1086
1058 1087
1059 1088 def querydrev(repo, spec):
1060 1089 """return a list of "Differential Revision" dicts
1061 1090
1062 1091 spec is a string using a simple query language, see docstring in phabread
1063 1092 for details.
1064 1093
1065 1094 A "Differential Revision dict" looks like:
1066 1095
1067 1096 {
1068 1097 "id": "2",
1069 1098 "phid": "PHID-DREV-672qvysjcczopag46qty",
1070 1099 "title": "example",
1071 1100 "uri": "https://phab.example.com/D2",
1072 1101 "dateCreated": "1499181406",
1073 1102 "dateModified": "1499182103",
1074 1103 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1075 1104 "status": "0",
1076 1105 "statusName": "Needs Review",
1077 1106 "properties": [],
1078 1107 "branch": null,
1079 1108 "summary": "",
1080 1109 "testPlan": "",
1081 1110 "lineCount": "2",
1082 1111 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1083 1112 "diffs": [
1084 1113 "3",
1085 1114 "4",
1086 1115 ],
1087 1116 "commits": [],
1088 1117 "reviewers": [],
1089 1118 "ccs": [],
1090 1119 "hashes": [],
1091 1120 "auxiliary": {
1092 1121 "phabricator:projects": [],
1093 1122 "phabricator:depends-on": [
1094 1123 "PHID-DREV-gbapp366kutjebt7agcd"
1095 1124 ]
1096 1125 },
1097 1126 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1098 1127 "sourcePath": null
1099 1128 }
1100 1129 """
1101 1130
1102 1131 def fetch(params):
1103 1132 """params -> single drev or None"""
1104 1133 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1105 1134 if key in prefetched:
1106 1135 return prefetched[key]
1107 1136 drevs = callconduit(repo.ui, b'differential.query', params)
1108 1137 # Fill prefetched with the result
1109 1138 for drev in drevs:
1110 1139 prefetched[drev[b'phid']] = drev
1111 1140 prefetched[int(drev[b'id'])] = drev
1112 1141 if key not in prefetched:
1113 1142 raise error.Abort(
1114 1143 _(b'cannot get Differential Revision %r') % params
1115 1144 )
1116 1145 return prefetched[key]
1117 1146
1118 1147 def getstack(topdrevids):
1119 1148 """given a top, get a stack from the bottom, [id] -> [id]"""
1120 1149 visited = set()
1121 1150 result = []
1122 1151 queue = [{b'ids': [i]} for i in topdrevids]
1123 1152 while queue:
1124 1153 params = queue.pop()
1125 1154 drev = fetch(params)
1126 1155 if drev[b'id'] in visited:
1127 1156 continue
1128 1157 visited.add(drev[b'id'])
1129 1158 result.append(int(drev[b'id']))
1130 1159 auxiliary = drev.get(b'auxiliary', {})
1131 1160 depends = auxiliary.get(b'phabricator:depends-on', [])
1132 1161 for phid in depends:
1133 1162 queue.append({b'phids': [phid]})
1134 1163 result.reverse()
1135 1164 return smartset.baseset(result)
1136 1165
1137 1166 # Initialize prefetch cache
1138 1167 prefetched = {} # {id or phid: drev}
1139 1168
1140 1169 tree = _parse(spec)
1141 1170 drevs, ancestordrevs = _prefetchdrevs(tree)
1142 1171
1143 1172 # developer config: phabricator.batchsize
1144 1173 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1145 1174
1146 1175 # Prefetch Differential Revisions in batch
1147 1176 tofetch = set(drevs)
1148 1177 for r in ancestordrevs:
1149 1178 tofetch.update(range(max(1, r - batchsize), r + 1))
1150 1179 if drevs:
1151 1180 fetch({b'ids': list(tofetch)})
1152 1181 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1153 1182
1154 1183 # Walk through the tree, return smartsets
1155 1184 def walk(tree):
1156 1185 op = tree[0]
1157 1186 if op == b'symbol':
1158 1187 drev = _parsedrev(tree[1])
1159 1188 if drev:
1160 1189 return smartset.baseset([drev])
1161 1190 elif tree[1] in _knownstatusnames:
1162 1191 drevs = [
1163 1192 r
1164 1193 for r in validids
1165 1194 if _getstatusname(prefetched[r]) == tree[1]
1166 1195 ]
1167 1196 return smartset.baseset(drevs)
1168 1197 else:
1169 1198 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1170 1199 elif op in {b'and_', b'add', b'sub'}:
1171 1200 assert len(tree) == 3
1172 1201 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1173 1202 elif op == b'group':
1174 1203 return walk(tree[1])
1175 1204 elif op == b'ancestors':
1176 1205 return getstack(walk(tree[1]))
1177 1206 else:
1178 1207 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1179 1208
1180 1209 return [prefetched[r] for r in walk(tree)]
1181 1210
1182 1211
1183 1212 def getdescfromdrev(drev):
1184 1213 """get description (commit message) from "Differential Revision"
1185 1214
1186 1215 This is similar to differential.getcommitmessage API. But we only care
1187 1216 about limited fields: title, summary, test plan, and URL.
1188 1217 """
1189 1218 title = drev[b'title']
1190 1219 summary = drev[b'summary'].rstrip()
1191 1220 testplan = drev[b'testPlan'].rstrip()
1192 1221 if testplan:
1193 1222 testplan = b'Test Plan:\n%s' % testplan
1194 1223 uri = b'Differential Revision: %s' % drev[b'uri']
1195 1224 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1196 1225
1197 1226
1198 1227 def getdiffmeta(diff):
1199 1228 """get commit metadata (date, node, user, p1) from a diff object
1200 1229
1201 1230 The metadata could be "hg:meta", sent by phabsend, like:
1202 1231
1203 1232 "properties": {
1204 1233 "hg:meta": {
1205 1234 "date": "1499571514 25200",
1206 1235 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1207 1236 "user": "Foo Bar <foo@example.com>",
1208 1237 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1209 1238 }
1210 1239 }
1211 1240
1212 1241 Or converted from "local:commits", sent by "arc", like:
1213 1242
1214 1243 "properties": {
1215 1244 "local:commits": {
1216 1245 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1217 1246 "author": "Foo Bar",
1218 1247 "time": 1499546314,
1219 1248 "branch": "default",
1220 1249 "tag": "",
1221 1250 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1222 1251 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1223 1252 "local": "1000",
1224 1253 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1225 1254 "summary": "...",
1226 1255 "message": "...",
1227 1256 "authorEmail": "foo@example.com"
1228 1257 }
1229 1258 }
1230 1259 }
1231 1260
1232 1261 Note: metadata extracted from "local:commits" will lose time zone
1233 1262 information.
1234 1263 """
1235 1264 props = diff.get(b'properties') or {}
1236 1265 meta = props.get(b'hg:meta')
1237 1266 if not meta:
1238 1267 if props.get(b'local:commits'):
1239 1268 commit = sorted(props[b'local:commits'].values())[0]
1240 1269 meta = {}
1241 1270 if b'author' in commit and b'authorEmail' in commit:
1242 1271 meta[b'user'] = b'%s <%s>' % (
1243 1272 commit[b'author'],
1244 1273 commit[b'authorEmail'],
1245 1274 )
1246 1275 if b'time' in commit:
1247 1276 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1248 1277 if b'branch' in commit:
1249 1278 meta[b'branch'] = commit[b'branch']
1250 1279 node = commit.get(b'commit', commit.get(b'rev'))
1251 1280 if node:
1252 1281 meta[b'node'] = node
1253 1282 if len(commit.get(b'parents', ())) >= 1:
1254 1283 meta[b'parent'] = commit[b'parents'][0]
1255 1284 else:
1256 1285 meta = {}
1257 1286 if b'date' not in meta and b'dateCreated' in diff:
1258 1287 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1259 1288 if b'branch' not in meta and diff.get(b'branch'):
1260 1289 meta[b'branch'] = diff[b'branch']
1261 1290 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1262 1291 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1263 1292 return meta
1264 1293
1265 1294
1266 1295 def readpatch(repo, drevs, write):
1267 1296 """generate plain-text patch readable by 'hg import'
1268 1297
1269 1298 write is usually ui.write. drevs is what "querydrev" returns, results of
1270 1299 "differential.query".
1271 1300 """
1272 1301 # Prefetch hg:meta property for all diffs
1273 1302 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1274 1303 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1275 1304
1276 1305 # Generate patch for each drev
1277 1306 for drev in drevs:
1278 1307 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1279 1308
1280 1309 diffid = max(int(v) for v in drev[b'diffs'])
1281 1310 body = callconduit(
1282 1311 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1283 1312 )
1284 1313 desc = getdescfromdrev(drev)
1285 1314 header = b'# HG changeset patch\n'
1286 1315
1287 1316 # Try to preserve metadata from hg:meta property. Write hg patch
1288 1317 # headers that can be read by the "import" command. See patchheadermap
1289 1318 # and extract in mercurial/patch.py for supported headers.
1290 1319 meta = getdiffmeta(diffs[b'%d' % diffid])
1291 1320 for k in _metanamemap.keys():
1292 1321 if k in meta:
1293 1322 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1294 1323
1295 1324 content = b'%s%s\n%s' % (header, desc, body)
1296 1325 write(content)
1297 1326
1298 1327
1299 1328 @vcrcommand(
1300 1329 b'phabread',
1301 1330 [(b'', b'stack', False, _(b'read dependencies'))],
1302 1331 _(b'DREVSPEC [OPTIONS]'),
1303 1332 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1304 1333 )
1305 1334 def phabread(ui, repo, spec, **opts):
1306 1335 """print patches from Phabricator suitable for importing
1307 1336
1308 1337 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1309 1338 the number ``123``. It could also have common operators like ``+``, ``-``,
1310 1339 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1311 1340 select a stack.
1312 1341
1313 1342 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1314 1343 could be used to filter patches by status. For performance reason, they
1315 1344 only represent a subset of non-status selections and cannot be used alone.
1316 1345
1317 1346 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1318 1347 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1319 1348 stack up to D9.
1320 1349
1321 1350 If --stack is given, follow dependencies information and read all patches.
1322 1351 It is equivalent to the ``:`` operator.
1323 1352 """
1324 1353 opts = pycompat.byteskwargs(opts)
1325 1354 if opts.get(b'stack'):
1326 1355 spec = b':(%s)' % spec
1327 1356 drevs = querydrev(repo, spec)
1328 1357 readpatch(repo, drevs, ui.write)
1329 1358
1330 1359
1331 1360 @vcrcommand(
1332 1361 b'phabupdate',
1333 1362 [
1334 1363 (b'', b'accept', False, _(b'accept revisions')),
1335 1364 (b'', b'reject', False, _(b'reject revisions')),
1336 1365 (b'', b'abandon', False, _(b'abandon revisions')),
1337 1366 (b'', b'reclaim', False, _(b'reclaim revisions')),
1338 1367 (b'm', b'comment', b'', _(b'comment on the last revision')),
1339 1368 ],
1340 1369 _(b'DREVSPEC [OPTIONS]'),
1341 1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1342 1371 )
1343 1372 def phabupdate(ui, repo, spec, **opts):
1344 1373 """update Differential Revision in batch
1345 1374
1346 1375 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1347 1376 """
1348 1377 opts = pycompat.byteskwargs(opts)
1349 1378 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1350 1379 if len(flags) > 1:
1351 1380 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1352 1381
1353 1382 actions = []
1354 1383 for f in flags:
1355 1384 actions.append({b'type': f, b'value': b'true'})
1356 1385
1357 1386 drevs = querydrev(repo, spec)
1358 1387 for i, drev in enumerate(drevs):
1359 1388 if i + 1 == len(drevs) and opts.get(b'comment'):
1360 1389 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1361 1390 if actions:
1362 1391 params = {
1363 1392 b'objectIdentifier': drev[b'phid'],
1364 1393 b'transactions': actions,
1365 1394 }
1366 1395 callconduit(ui, b'differential.revision.edit', params)
1367 1396
1368 1397
1369 1398 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1370 1399 def template_review(context, mapping):
1371 1400 """:phabreview: Object describing the review for this changeset.
1372 1401 Has attributes `url` and `id`.
1373 1402 """
1374 1403 ctx = context.resource(mapping, b'ctx')
1375 1404 m = _differentialrevisiondescre.search(ctx.description())
1376 1405 if m:
1377 1406 return templateutil.hybriddict(
1378 1407 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1379 1408 )
1380 1409 else:
1381 1410 tags = ctx.repo().nodetags(ctx.node())
1382 1411 for t in tags:
1383 1412 if _differentialrevisiontagre.match(t):
1384 1413 url = ctx.repo().ui.config(b'phabricator', b'url')
1385 1414 if not url.endswith(b'/'):
1386 1415 url += b'/'
1387 1416 url += t
1388 1417
1389 1418 return templateutil.hybriddict({b'url': url, b'id': t,})
1390 1419 return None
General Comments 0
You need to be logged in to leave comments. Login now