##// END OF EJS Templates
py3: byte-stringify literals in contrib/phabricator.py as example...
Yuya Nishihara -
r38411:81a4be70 default
parent child Browse files
Show More
@@ -1,979 +1,982 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 15 changeset from being sent. The requirement could be disabled by changing
16 16 ``differential.require-test-plan-field`` config server side.
17 17
18 18 Config::
19 19
20 20 [phabricator]
21 21 # Phabricator URL
22 22 url = https://phab.example.com/
23 23
24 24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 25 # callsign is "FOO".
26 26 callsign = FOO
27 27
28 28 # curl command to use. If not set (default), use builtin HTTP library to
29 29 # communicate. If set, use the specified curl command. This could be useful
30 30 # if you need to specify advanced options that is not easily supported by
31 31 # the internal library.
32 32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33 33
34 34 [auth]
35 35 example.schemes = https
36 36 example.prefix = phab.example.com
37 37
38 38 # API token. Get it from https://$HOST/conduit/login/
39 39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import itertools
45 45 import json
46 46 import operator
47 47 import re
48 48
49 49 from mercurial.node import bin, nullid
50 50 from mercurial.i18n import _
51 51 from mercurial import (
52 52 cmdutil,
53 53 context,
54 54 encoding,
55 55 error,
56 56 httpconnection as httpconnectionmod,
57 57 mdiff,
58 58 obsutil,
59 59 parser,
60 60 patch,
61 61 registrar,
62 62 scmutil,
63 63 smartset,
64 64 tags,
65 65 url as urlmod,
66 66 util,
67 67 )
68 68 from mercurial.utils import (
69 69 procutil,
70 70 stringutil,
71 71 )
72 72
73 73 cmdtable = {}
74 74 command = registrar.command(cmdtable)
75 75
76 76 configtable = {}
77 77 configitem = registrar.configitem(configtable)
78 78
79 79 # developer config: phabricator.batchsize
80 configitem('phabricator', 'batchsize',
80 configitem(b'phabricator', b'batchsize',
81 81 default=12,
82 82 )
83 configitem('phabricator', 'callsign',
83 configitem(b'phabricator', b'callsign',
84 84 default=None,
85 85 )
86 configitem('phabricator', 'curlcmd',
86 configitem(b'phabricator', b'curlcmd',
87 87 default=None,
88 88 )
89 89 # developer config: phabricator.repophid
90 configitem('phabricator', 'repophid',
90 configitem(b'phabricator', b'repophid',
91 91 default=None,
92 92 )
93 configitem('phabricator', 'url',
93 configitem(b'phabricator', b'url',
94 94 default=None,
95 95 )
96 configitem('phabsend', 'confirm',
96 configitem(b'phabsend', b'confirm',
97 97 default=False,
98 98 )
99 99
100 100 colortable = {
101 'phabricator.action.created': 'green',
102 'phabricator.action.skipped': 'magenta',
103 'phabricator.action.updated': 'magenta',
104 'phabricator.desc': '',
105 'phabricator.drev': 'bold',
106 'phabricator.node': '',
101 b'phabricator.action.created': b'green',
102 b'phabricator.action.skipped': b'magenta',
103 b'phabricator.action.updated': b'magenta',
104 b'phabricator.desc': b'',
105 b'phabricator.drev': b'bold',
106 b'phabricator.node': b'',
107 107 }
108 108
109 109 def urlencodenested(params):
110 110 """like urlencode, but works with nested parameters.
111 111
112 112 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
113 113 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
114 114 urlencode. Note: the encoding is consistent with PHP's http_build_query.
115 115 """
116 116 flatparams = util.sortdict()
117 117 def process(prefix, obj):
118 118 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
119 119 if items is None:
120 120 flatparams[prefix] = obj
121 121 else:
122 122 for k, v in items(obj):
123 123 if prefix:
124 process('%s[%s]' % (prefix, k), v)
124 process(b'%s[%s]' % (prefix, k), v)
125 125 else:
126 126 process(k, v)
127 process('', params)
127 process(b'', params)
128 128 return util.urlreq.urlencode(flatparams)
129 129
130 130 printed_token_warning = False
131 131
132 132 def readlegacytoken(repo, url):
133 133 """Transitional support for old phabricator tokens.
134 134
135 135 Remove before the 4.7 release.
136 136 """
137 137 groups = {}
138 for key, val in repo.ui.configitems('phabricator.auth'):
139 if '.' not in key:
140 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
138 for key, val in repo.ui.configitems(b'phabricator.auth'):
139 if b'.' not in key:
140 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n")
141 141 % key)
142 142 continue
143 group, setting = key.rsplit('.', 1)
143 group, setting = key.rsplit(b'.', 1)
144 144 groups.setdefault(group, {})[setting] = val
145 145
146 146 token = None
147 147 for group, auth in groups.iteritems():
148 if url != auth.get('url'):
148 if url != auth.get(b'url'):
149 149 continue
150 token = auth.get('token')
150 token = auth.get(b'token')
151 151 if token:
152 152 break
153 153
154 154 global printed_token_warning
155 155
156 156 if token and not printed_token_warning:
157 157 printed_token_warning = True
158 repo.ui.warn(_('phabricator.auth.token is deprecated - please '
159 'migrate to auth.phabtoken.\n'))
158 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please '
159 b'migrate to auth.phabtoken.\n'))
160 160 return token
161 161
162 162 def readurltoken(repo):
163 163 """return conduit url, token and make sure they exist
164 164
165 165 Currently read from [auth] config section. In the future, it might
166 166 make sense to read from .arcconfig and .arcrc as well.
167 167 """
168 url = repo.ui.config('phabricator', 'url')
168 url = repo.ui.config(b'phabricator', b'url')
169 169 if not url:
170 raise error.Abort(_('config %s.%s is required')
171 % ('phabricator', 'url'))
170 raise error.Abort(_(b'config %s.%s is required')
171 % (b'phabricator', b'url'))
172 172
173 173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
174 174 token = None
175 175
176 176 if res:
177 177 group, auth = res
178 178
179 repo.ui.debug("using auth.%s.* for authentication\n" % group)
179 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
180 180
181 token = auth.get('phabtoken')
181 token = auth.get(b'phabtoken')
182 182
183 183 if not token:
184 184 token = readlegacytoken(repo, url)
185 185 if not token:
186 raise error.Abort(_('Can\'t find conduit token associated to %s')
186 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
187 187 % (url,))
188 188
189 189 return url, token
190 190
191 191 def callconduit(repo, name, params):
192 192 """call Conduit API, params is a dict. return json.loads result, or None"""
193 193 host, token = readurltoken(repo)
194 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
195 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
194 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
195 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
196 196 params = params.copy()
197 params['api.token'] = token
197 params[b'api.token'] = token
198 198 data = urlencodenested(params)
199 curlcmd = repo.ui.config('phabricator', 'curlcmd')
199 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
200 200 if curlcmd:
201 sin, sout = procutil.popen2('%s -d @- %s'
201 sin, sout = procutil.popen2(b'%s -d @- %s'
202 202 % (curlcmd, procutil.shellquote(url)))
203 203 sin.write(data)
204 204 sin.close()
205 205 body = sout.read()
206 206 else:
207 207 urlopener = urlmod.opener(repo.ui, authinfo)
208 208 request = util.urlreq.request(url, data=data)
209 209 body = urlopener.open(request).read()
210 repo.ui.debug('Conduit Response: %s\n' % body)
210 repo.ui.debug(b'Conduit Response: %s\n' % body)
211 211 parsed = json.loads(body)
212 212 if parsed.get(r'error_code'):
213 msg = (_('Conduit Error (%s): %s')
213 msg = (_(b'Conduit Error (%s): %s')
214 214 % (parsed[r'error_code'], parsed[r'error_info']))
215 215 raise error.Abort(msg)
216 216 return parsed[r'result']
217 217
218 @command('debugcallconduit', [], _('METHOD'))
218 @command(b'debugcallconduit', [], _(b'METHOD'))
219 219 def debugcallconduit(ui, repo, name):
220 220 """call Conduit API
221 221
222 222 Call parameters are read from stdin as a JSON blob. Result will be written
223 223 to stdout as a JSON blob.
224 224 """
225 225 params = json.loads(ui.fin.read())
226 226 result = callconduit(repo, name, params)
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
228 ui.write('%s\n' % s)
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
228 ui.write(b'%s\n' % s)
229 229
230 230 def getrepophid(repo):
231 231 """given callsign, return repository PHID or None"""
232 232 # developer config: phabricator.repophid
233 repophid = repo.ui.config('phabricator', 'repophid')
233 repophid = repo.ui.config(b'phabricator', b'repophid')
234 234 if repophid:
235 235 return repophid
236 callsign = repo.ui.config('phabricator', 'callsign')
236 callsign = repo.ui.config(b'phabricator', b'callsign')
237 237 if not callsign:
238 238 return None
239 query = callconduit(repo, 'diffusion.repository.search',
240 {'constraints': {'callsigns': [callsign]}})
239 query = callconduit(repo, b'diffusion.repository.search',
240 {b'constraints': {b'callsigns': [callsign]}})
241 241 if len(query[r'data']) == 0:
242 242 return None
243 243 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
244 repo.ui.setconfig('phabricator', 'repophid', repophid)
244 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
245 245 return repophid
246 246
247 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
247 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
248 248 _differentialrevisiondescre = re.compile(
249 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
249 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
250 250
251 251 def getoldnodedrevmap(repo, nodelist):
252 252 """find previous nodes that has been sent to Phabricator
253 253
254 254 return {node: (oldnode, Differential diff, Differential Revision ID)}
255 255 for node in nodelist with known previous sent versions, or associated
256 256 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
257 257 be ``None``.
258 258
259 259 Examines commit messages like "Differential Revision:" to get the
260 260 association information.
261 261
262 262 If such commit message line is not found, examines all precursors and their
263 263 tags. Tags with format like "D1234" are considered a match and the node
264 264 with that tag, and the number after "D" (ex. 1234) will be returned.
265 265
266 266 The ``old node``, if not None, is guaranteed to be the last diff of
267 267 corresponding Differential Revision, and exist in the repo.
268 268 """
269 269 url, token = readurltoken(repo)
270 270 unfi = repo.unfiltered()
271 271 nodemap = unfi.changelog.nodemap
272 272
273 273 result = {} # {node: (oldnode?, lastdiff?, drev)}
274 274 toconfirm = {} # {node: (force, {precnode}, drev)}
275 275 for node in nodelist:
276 276 ctx = unfi[node]
277 277 # For tags like "D123", put them into "toconfirm" to verify later
278 278 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
279 279 for n in precnodes:
280 280 if n in nodemap:
281 281 for tag in unfi.nodetags(n):
282 282 m = _differentialrevisiontagre.match(tag)
283 283 if m:
284 284 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
285 285 continue
286 286
287 287 # Check commit message
288 288 m = _differentialrevisiondescre.search(ctx.description())
289 289 if m:
290 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
290 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
291 291
292 292 # Double check if tags are genuine by collecting all old nodes from
293 293 # Phabricator, and expect precursors overlap with it.
294 294 if toconfirm:
295 295 drevs = [drev for force, precs, drev in toconfirm.values()]
296 alldiffs = callconduit(unfi, 'differential.querydiffs',
297 {'revisionIDs': drevs})
296 alldiffs = callconduit(unfi, b'differential.querydiffs',
297 {b'revisionIDs': drevs})
298 298 getnode = lambda d: bin(encoding.unitolocal(
299 getdiffmeta(d).get(r'node', ''))) or None
299 getdiffmeta(d).get(r'node', b''))) or None
300 300 for newnode, (force, precset, drev) in toconfirm.items():
301 301 diffs = [d for d in alldiffs.values()
302 302 if int(d[r'revisionID']) == drev]
303 303
304 304 # "precursors" as known by Phabricator
305 305 phprecset = set(getnode(d) for d in diffs)
306 306
307 307 # Ignore if precursors (Phabricator and local repo) do not overlap,
308 308 # and force is not set (when commit message says nothing)
309 309 if not force and not bool(phprecset & precset):
310 tagname = 'D%d' % drev
310 tagname = b'D%d' % drev
311 311 tags.tag(repo, tagname, nullid, message=None, user=None,
312 312 date=None, local=True)
313 unfi.ui.warn(_('D%s: local tag removed - does not match '
314 'Differential history\n') % drev)
313 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
314 b'Differential history\n') % drev)
315 315 continue
316 316
317 317 # Find the last node using Phabricator metadata, and make sure it
318 318 # exists in the repo
319 319 oldnode = lastdiff = None
320 320 if diffs:
321 321 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
322 322 oldnode = getnode(lastdiff)
323 323 if oldnode and oldnode not in nodemap:
324 324 oldnode = None
325 325
326 326 result[newnode] = (oldnode, lastdiff, drev)
327 327
328 328 return result
329 329
330 330 def getdiff(ctx, diffopts):
331 331 """plain-text diff without header (user, commit message, etc)"""
332 332 output = util.stringio()
333 333 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
334 334 None, opts=diffopts):
335 335 output.write(chunk)
336 336 return output.getvalue()
337 337
338 338 def creatediff(ctx):
339 339 """create a Differential Diff"""
340 340 repo = ctx.repo()
341 341 repophid = getrepophid(repo)
342 342 # Create a "Differential Diff" via "differential.createrawdiff" API
343 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
343 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
344 344 if repophid:
345 params['repositoryPHID'] = repophid
346 diff = callconduit(repo, 'differential.createrawdiff', params)
345 params[b'repositoryPHID'] = repophid
346 diff = callconduit(repo, b'differential.createrawdiff', params)
347 347 if not diff:
348 raise error.Abort(_('cannot create diff for %s') % ctx)
348 raise error.Abort(_(b'cannot create diff for %s') % ctx)
349 349 return diff
350 350
351 351 def writediffproperties(ctx, diff):
352 352 """write metadata to diff so patches could be applied losslessly"""
353 353 params = {
354 'diff_id': diff[r'id'],
355 'name': 'hg:meta',
356 'data': json.dumps({
357 'user': ctx.user(),
358 'date': '%d %d' % ctx.date(),
359 'node': ctx.hex(),
360 'parent': ctx.p1().hex(),
354 b'diff_id': diff[r'id'],
355 b'name': b'hg:meta',
356 b'data': json.dumps({
357 b'user': ctx.user(),
358 b'date': b'%d %d' % ctx.date(),
359 b'node': ctx.hex(),
360 b'parent': ctx.p1().hex(),
361 361 }),
362 362 }
363 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
363 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
364 364
365 365 params = {
366 'diff_id': diff[r'id'],
367 'name': 'local:commits',
368 'data': json.dumps({
366 b'diff_id': diff[r'id'],
367 b'name': b'local:commits',
368 b'data': json.dumps({
369 369 ctx.hex(): {
370 'author': stringutil.person(ctx.user()),
371 'authorEmail': stringutil.email(ctx.user()),
372 'time': ctx.date()[0],
370 b'author': stringutil.person(ctx.user()),
371 b'authorEmail': stringutil.email(ctx.user()),
372 b'time': ctx.date()[0],
373 373 },
374 374 }),
375 375 }
376 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
376 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
377 377
378 378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
379 379 olddiff=None, actions=None):
380 380 """create or update a Differential Revision
381 381
382 382 If revid is None, create a new Differential Revision, otherwise update
383 383 revid. If parentrevid is not None, set it as a dependency.
384 384
385 385 If oldnode is not None, check if the patch content (without commit message
386 386 and metadata) has changed before creating another diff.
387 387
388 388 If actions is not None, they will be appended to the transaction.
389 389 """
390 390 repo = ctx.repo()
391 391 if oldnode:
392 392 diffopts = mdiff.diffopts(git=True, context=32767)
393 393 oldctx = repo.unfiltered()[oldnode]
394 394 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
395 395 else:
396 396 neednewdiff = True
397 397
398 398 transactions = []
399 399 if neednewdiff:
400 400 diff = creatediff(ctx)
401 transactions.append({'type': 'update', 'value': diff[r'phid']})
401 transactions.append({b'type': b'update', b'value': diff[r'phid']})
402 402 else:
403 403 # Even if we don't need to upload a new diff because the patch content
404 404 # does not change. We might still need to update its metadata so
405 405 # pushers could know the correct node metadata.
406 406 assert olddiff
407 407 diff = olddiff
408 408 writediffproperties(ctx, diff)
409 409
410 410 # Use a temporary summary to set dependency. There might be better ways but
411 411 # I cannot find them for now. But do not do that if we are updating an
412 412 # existing revision (revid is not None) since that introduces visible
413 413 # churns (someone edited "Summary" twice) on the web page.
414 414 if parentrevid and revid is None:
415 summary = 'Depends on D%s' % parentrevid
416 transactions += [{'type': 'summary', 'value': summary},
417 {'type': 'summary', 'value': ' '}]
415 summary = b'Depends on D%s' % parentrevid
416 transactions += [{b'type': b'summary', b'value': summary},
417 {b'type': b'summary', b'value': b' '}]
418 418
419 419 if actions:
420 420 transactions += actions
421 421
422 422 # Parse commit message and update related fields.
423 423 desc = ctx.description()
424 info = callconduit(repo, 'differential.parsecommitmessage',
425 {'corpus': desc})
424 info = callconduit(repo, b'differential.parsecommitmessage',
425 {b'corpus': desc})
426 426 for k, v in info[r'fields'].items():
427 if k in ['title', 'summary', 'testPlan']:
428 transactions.append({'type': k, 'value': v})
427 if k in [b'title', b'summary', b'testPlan']:
428 transactions.append({b'type': k, b'value': v})
429 429
430 params = {'transactions': transactions}
430 params = {b'transactions': transactions}
431 431 if revid is not None:
432 432 # Update an existing Differential Revision
433 params['objectIdentifier'] = revid
433 params[b'objectIdentifier'] = revid
434 434
435 revision = callconduit(repo, 'differential.revision.edit', params)
435 revision = callconduit(repo, b'differential.revision.edit', params)
436 436 if not revision:
437 raise error.Abort(_('cannot create revision for %s') % ctx)
437 raise error.Abort(_(b'cannot create revision for %s') % ctx)
438 438
439 439 return revision, diff
440 440
441 441 def userphids(repo, names):
442 442 """convert user names to PHIDs"""
443 query = {'constraints': {'usernames': names}}
444 result = callconduit(repo, 'user.search', query)
443 query = {b'constraints': {b'usernames': names}}
444 result = callconduit(repo, b'user.search', query)
445 445 # username not found is not an error of the API. So check if we have missed
446 446 # some names here.
447 447 data = result[r'data']
448 448 resolved = set(entry[r'fields'][r'username'] for entry in data)
449 449 unresolved = set(names) - resolved
450 450 if unresolved:
451 raise error.Abort(_('unknown username: %s')
452 % ' '.join(sorted(unresolved)))
451 raise error.Abort(_(b'unknown username: %s')
452 % b' '.join(sorted(unresolved)))
453 453 return [entry[r'phid'] for entry in data]
454 454
455 @command('phabsend',
456 [('r', 'rev', [], _('revisions to send'), _('REV')),
457 ('', 'amend', True, _('update commit messages')),
458 ('', 'reviewer', [], _('specify reviewers')),
459 ('', 'confirm', None, _('ask for confirmation before sending'))],
460 _('REV [OPTIONS]'))
455 @command(b'phabsend',
456 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
457 (b'', b'amend', True, _(b'update commit messages')),
458 (b'', b'reviewer', [], _(b'specify reviewers')),
459 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
460 _(b'REV [OPTIONS]'))
461 461 def phabsend(ui, repo, *revs, **opts):
462 462 """upload changesets to Phabricator
463 463
464 464 If there are multiple revisions specified, they will be send as a stack
465 465 with a linear dependencies relationship using the order specified by the
466 466 revset.
467 467
468 468 For the first time uploading changesets, local tags will be created to
469 469 maintain the association. After the first time, phabsend will check
470 470 obsstore and tags information so it can figure out whether to update an
471 471 existing Differential Revision, or create a new one.
472 472
473 473 If --amend is set, update commit messages so they have the
474 474 ``Differential Revision`` URL, remove related tags. This is similar to what
475 475 arcanist will do, and is more desired in author-push workflows. Otherwise,
476 476 use local tags to record the ``Differential Revision`` association.
477 477
478 478 The --confirm option lets you confirm changesets before sending them. You
479 479 can also add following to your configuration file to make it default
480 480 behaviour::
481 481
482 482 [phabsend]
483 483 confirm = true
484 484
485 485 phabsend will check obsstore and the above association to decide whether to
486 486 update an existing Differential Revision, or create a new one.
487 487 """
488 revs = list(revs) + opts.get('rev', [])
488 revs = list(revs) + opts.get(b'rev', [])
489 489 revs = scmutil.revrange(repo, revs)
490 490
491 491 if not revs:
492 raise error.Abort(_('phabsend requires at least one changeset'))
493 if opts.get('amend'):
492 raise error.Abort(_(b'phabsend requires at least one changeset'))
493 if opts.get(b'amend'):
494 494 cmdutil.checkunfinished(repo)
495 495
496 496 # {newnode: (oldnode, olddiff, olddrev}
497 497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
498 498
499 confirm = ui.configbool('phabsend', 'confirm')
500 confirm |= bool(opts.get('confirm'))
499 confirm = ui.configbool(b'phabsend', b'confirm')
500 confirm |= bool(opts.get(b'confirm'))
501 501 if confirm:
502 502 confirmed = _confirmbeforesend(repo, revs, oldmap)
503 503 if not confirmed:
504 raise error.Abort(_('phabsend cancelled'))
504 raise error.Abort(_(b'phabsend cancelled'))
505 505
506 506 actions = []
507 reviewers = opts.get('reviewer', [])
507 reviewers = opts.get(b'reviewer', [])
508 508 if reviewers:
509 509 phids = userphids(repo, reviewers)
510 actions.append({'type': 'reviewers.add', 'value': phids})
510 actions.append({b'type': b'reviewers.add', b'value': phids})
511 511
512 512 drevids = [] # [int]
513 513 diffmap = {} # {newnode: diff}
514 514
515 515 # Send patches one by one so we know their Differential Revision IDs and
516 516 # can provide dependency relationship
517 517 lastrevid = None
518 518 for rev in revs:
519 ui.debug('sending rev %d\n' % rev)
519 ui.debug(b'sending rev %d\n' % rev)
520 520 ctx = repo[rev]
521 521
522 522 # Get Differential Revision ID
523 523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
524 if oldnode != ctx.node() or opts.get('amend'):
524 if oldnode != ctx.node() or opts.get(b'amend'):
525 525 # Create or update Differential Revision
526 526 revision, diff = createdifferentialrevision(
527 527 ctx, revid, lastrevid, oldnode, olddiff, actions)
528 528 diffmap[ctx.node()] = diff
529 529 newrevid = int(revision[r'object'][r'id'])
530 530 if revid:
531 action = 'updated'
531 action = b'updated'
532 532 else:
533 action = 'created'
533 action = b'created'
534 534
535 535 # Create a local tag to note the association, if commit message
536 536 # does not have it already
537 537 m = _differentialrevisiondescre.search(ctx.description())
538 if not m or int(m.group('id')) != newrevid:
539 tagname = 'D%d' % newrevid
538 if not m or int(m.group(b'id')) != newrevid:
539 tagname = b'D%d' % newrevid
540 540 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
541 541 date=None, local=True)
542 542 else:
543 543 # Nothing changed. But still set "newrevid" so the next revision
544 544 # could depend on this one.
545 545 newrevid = revid
546 action = 'skipped'
546 action = b'skipped'
547 547
548 548 actiondesc = ui.label(
549 {'created': _('created'),
550 'skipped': _('skipped'),
551 'updated': _('updated')}[action],
552 'phabricator.action.%s' % action)
553 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
554 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
555 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
556 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
549 {b'created': _(b'created'),
550 b'skipped': _(b'skipped'),
551 b'updated': _(b'updated')}[action],
552 b'phabricator.action.%s' % action)
553 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
554 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
555 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
556 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
557 557 desc))
558 558 drevids.append(newrevid)
559 559 lastrevid = newrevid
560 560
561 561 # Update commit messages and remove tags
562 if opts.get('amend'):
562 if opts.get(b'amend'):
563 563 unfi = repo.unfiltered()
564 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
565 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
566 wnode = unfi['.'].node()
564 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
565 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
566 wnode = unfi[b'.'].node()
567 567 mapping = {} # {oldnode: [newnode]}
568 568 for i, rev in enumerate(revs):
569 569 old = unfi[rev]
570 570 drevid = drevids[i]
571 571 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
572 572 newdesc = getdescfromdrev(drev)
573 573 # Make sure commit message contain "Differential Revision"
574 574 if old.description() != newdesc:
575 575 parents = [
576 576 mapping.get(old.p1().node(), (old.p1(),))[0],
577 577 mapping.get(old.p2().node(), (old.p2(),))[0],
578 578 ]
579 579 new = context.metadataonlyctx(
580 580 repo, old, parents=parents, text=newdesc,
581 581 user=old.user(), date=old.date(), extra=old.extra())
582 582
583 overrides = {('phases', 'new-commit'): old.phase()}
584 with ui.configoverride(overrides, 'phabsend'):
583 overrides = {(b'phases', b'new-commit'): old.phase()}
584 with ui.configoverride(overrides, b'phabsend'):
585 585 newnode = new.commit()
586 586
587 587 mapping[old.node()] = [newnode]
588 588 # Update diff property
589 589 writediffproperties(unfi[newnode], diffmap[old.node()])
590 590 # Remove local tags since it's no longer necessary
591 tagname = 'D%d' % drevid
591 tagname = b'D%d' % drevid
592 592 if tagname in repo.tags():
593 593 tags.tag(repo, tagname, nullid, message=None, user=None,
594 594 date=None, local=True)
595 scmutil.cleanupnodes(repo, mapping, 'phabsend')
595 scmutil.cleanupnodes(repo, mapping, b'phabsend')
596 596 if wnode in mapping:
597 597 unfi.setparents(mapping[wnode][0])
598 598
599 599 # Map from "hg:meta" keys to header understood by "hg import". The order is
600 600 # consistent with "hg export" output.
601 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
602 (r'node', 'Node ID'), (r'parent', 'Parent ')])
601 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
602 (r'node', b'Node ID'), (r'parent', b'Parent ')])
603 603
604 604 def _confirmbeforesend(repo, revs, oldmap):
605 605 url, token = readurltoken(repo)
606 606 ui = repo.ui
607 607 for rev in revs:
608 608 ctx = repo[rev]
609 609 desc = ctx.description().splitlines()[0]
610 610 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
611 611 if drevid:
612 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
612 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
613 613 else:
614 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
614 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
615 615
616 ui.write(_('%s - %s: %s\n') % (drevdesc,
617 ui.label(bytes(ctx), 'phabricator.node'),
618 ui.label(desc, 'phabricator.desc')))
616 ui.write(_(b'%s - %s: %s\n')
617 % (drevdesc,
618 ui.label(bytes(ctx), b'phabricator.node'),
619 ui.label(desc, b'phabricator.desc')))
619 620
620 if ui.promptchoice(_('Send the above changes to %s (yn)?'
621 '$$ &Yes $$ &No') % url):
621 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
622 b'$$ &Yes $$ &No') % url):
622 623 return False
623 624
624 625 return True
625 626
626 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
627 'abandoned'}
627 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
628 b'abandoned'}
628 629
629 630 def _getstatusname(drev):
630 631 """get normalized status name from a Differential Revision"""
631 return drev[r'statusName'].replace(' ', '').lower()
632 return drev[r'statusName'].replace(b' ', b'').lower()
632 633
633 634 # Small language to specify differential revisions. Support symbols: (), :X,
634 635 # +, and -.
635 636
636 637 _elements = {
637 638 # token-type: binding-strength, primary, prefix, infix, suffix
638 '(': (12, None, ('group', 1, ')'), None, None),
639 ':': (8, None, ('ancestors', 8), None, None),
640 '&': (5, None, None, ('and_', 5), None),
641 '+': (4, None, None, ('add', 4), None),
642 '-': (4, None, None, ('sub', 4), None),
643 ')': (0, None, None, None, None),
644 'symbol': (0, 'symbol', None, None, None),
645 'end': (0, None, None, None, None),
639 b'(': (12, None, (b'group', 1, b')'), None, None),
640 b':': (8, None, (b'ancestors', 8), None, None),
641 b'&': (5, None, None, (b'and_', 5), None),
642 b'+': (4, None, None, (b'add', 4), None),
643 b'-': (4, None, None, (b'sub', 4), None),
644 b')': (0, None, None, None, None),
645 b'symbol': (0, b'symbol', None, None, None),
646 b'end': (0, None, None, None, None),
646 647 }
647 648
648 649 def _tokenize(text):
649 650 view = memoryview(text) # zero-copy slice
650 special = '():+-& '
651 special = b'():+-& '
651 652 pos = 0
652 653 length = len(text)
653 654 while pos < length:
654 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
655 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
655 656 view[pos:]))
656 657 if symbol:
657 yield ('symbol', symbol, pos)
658 yield (b'symbol', symbol, pos)
658 659 pos += len(symbol)
659 660 else: # special char, ignore space
660 if text[pos] != ' ':
661 if text[pos] != b' ':
661 662 yield (text[pos], None, pos)
662 663 pos += 1
663 yield ('end', None, pos)
664 yield (b'end', None, pos)
664 665
665 666 def _parse(text):
666 667 tree, pos = parser.parser(_elements).parse(_tokenize(text))
667 668 if pos != len(text):
668 raise error.ParseError('invalid token', pos)
669 raise error.ParseError(b'invalid token', pos)
669 670 return tree
670 671
671 672 def _parsedrev(symbol):
672 673 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
673 if symbol.startswith('D') and symbol[1:].isdigit():
674 if symbol.startswith(b'D') and symbol[1:].isdigit():
674 675 return int(symbol[1:])
675 676 if symbol.isdigit():
676 677 return int(symbol)
677 678
678 679 def _prefetchdrevs(tree):
679 680 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
680 681 drevs = set()
681 682 ancestordrevs = set()
682 683 op = tree[0]
683 if op == 'symbol':
684 if op == b'symbol':
684 685 r = _parsedrev(tree[1])
685 686 if r:
686 687 drevs.add(r)
687 elif op == 'ancestors':
688 elif op == b'ancestors':
688 689 r, a = _prefetchdrevs(tree[1])
689 690 drevs.update(r)
690 691 ancestordrevs.update(r)
691 692 ancestordrevs.update(a)
692 693 else:
693 694 for t in tree[1:]:
694 695 r, a = _prefetchdrevs(t)
695 696 drevs.update(r)
696 697 ancestordrevs.update(a)
697 698 return drevs, ancestordrevs
698 699
699 700 def querydrev(repo, spec):
700 701 """return a list of "Differential Revision" dicts
701 702
702 703 spec is a string using a simple query language, see docstring in phabread
703 704 for details.
704 705
705 706 A "Differential Revision dict" looks like:
706 707
707 708 {
708 709 "id": "2",
709 710 "phid": "PHID-DREV-672qvysjcczopag46qty",
710 711 "title": "example",
711 712 "uri": "https://phab.example.com/D2",
712 713 "dateCreated": "1499181406",
713 714 "dateModified": "1499182103",
714 715 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
715 716 "status": "0",
716 717 "statusName": "Needs Review",
717 718 "properties": [],
718 719 "branch": null,
719 720 "summary": "",
720 721 "testPlan": "",
721 722 "lineCount": "2",
722 723 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
723 724 "diffs": [
724 725 "3",
725 726 "4",
726 727 ],
727 728 "commits": [],
728 729 "reviewers": [],
729 730 "ccs": [],
730 731 "hashes": [],
731 732 "auxiliary": {
732 733 "phabricator:projects": [],
733 734 "phabricator:depends-on": [
734 735 "PHID-DREV-gbapp366kutjebt7agcd"
735 736 ]
736 737 },
737 738 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
738 739 "sourcePath": null
739 740 }
740 741 """
741 742 def fetch(params):
742 743 """params -> single drev or None"""
743 744 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
744 745 if key in prefetched:
745 746 return prefetched[key]
746 drevs = callconduit(repo, 'differential.query', params)
747 drevs = callconduit(repo, b'differential.query', params)
747 748 # Fill prefetched with the result
748 749 for drev in drevs:
749 750 prefetched[drev[r'phid']] = drev
750 751 prefetched[int(drev[r'id'])] = drev
751 752 if key not in prefetched:
752 raise error.Abort(_('cannot get Differential Revision %r') % params)
753 raise error.Abort(_(b'cannot get Differential Revision %r')
754 % params)
753 755 return prefetched[key]
754 756
755 757 def getstack(topdrevids):
756 758 """given a top, get a stack from the bottom, [id] -> [id]"""
757 759 visited = set()
758 760 result = []
759 761 queue = [{r'ids': [i]} for i in topdrevids]
760 762 while queue:
761 763 params = queue.pop()
762 764 drev = fetch(params)
763 765 if drev[r'id'] in visited:
764 766 continue
765 767 visited.add(drev[r'id'])
766 768 result.append(int(drev[r'id']))
767 769 auxiliary = drev.get(r'auxiliary', {})
768 770 depends = auxiliary.get(r'phabricator:depends-on', [])
769 771 for phid in depends:
770 queue.append({'phids': [phid]})
772 queue.append({b'phids': [phid]})
771 773 result.reverse()
772 774 return smartset.baseset(result)
773 775
774 776 # Initialize prefetch cache
775 777 prefetched = {} # {id or phid: drev}
776 778
777 779 tree = _parse(spec)
778 780 drevs, ancestordrevs = _prefetchdrevs(tree)
779 781
780 782 # developer config: phabricator.batchsize
781 batchsize = repo.ui.configint('phabricator', 'batchsize')
783 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
782 784
783 785 # Prefetch Differential Revisions in batch
784 786 tofetch = set(drevs)
785 787 for r in ancestordrevs:
786 788 tofetch.update(range(max(1, r - batchsize), r + 1))
787 789 if drevs:
788 790 fetch({r'ids': list(tofetch)})
789 791 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
790 792
791 793 # Walk through the tree, return smartsets
792 794 def walk(tree):
793 795 op = tree[0]
794 if op == 'symbol':
796 if op == b'symbol':
795 797 drev = _parsedrev(tree[1])
796 798 if drev:
797 799 return smartset.baseset([drev])
798 800 elif tree[1] in _knownstatusnames:
799 801 drevs = [r for r in validids
800 802 if _getstatusname(prefetched[r]) == tree[1]]
801 803 return smartset.baseset(drevs)
802 804 else:
803 raise error.Abort(_('unknown symbol: %s') % tree[1])
804 elif op in {'and_', 'add', 'sub'}:
805 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
806 elif op in {b'and_', b'add', b'sub'}:
805 807 assert len(tree) == 3
806 808 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
807 elif op == 'group':
809 elif op == b'group':
808 810 return walk(tree[1])
809 elif op == 'ancestors':
811 elif op == b'ancestors':
810 812 return getstack(walk(tree[1]))
811 813 else:
812 raise error.ProgrammingError('illegal tree: %r' % tree)
814 raise error.ProgrammingError(b'illegal tree: %r' % tree)
813 815
814 816 return [prefetched[r] for r in walk(tree)]
815 817
816 818 def getdescfromdrev(drev):
817 819 """get description (commit message) from "Differential Revision"
818 820
819 821 This is similar to differential.getcommitmessage API. But we only care
820 822 about limited fields: title, summary, test plan, and URL.
821 823 """
822 824 title = drev[r'title']
823 825 summary = drev[r'summary'].rstrip()
824 826 testplan = drev[r'testPlan'].rstrip()
825 827 if testplan:
826 testplan = 'Test Plan:\n%s' % testplan
827 uri = 'Differential Revision: %s' % drev[r'uri']
828 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
828 testplan = b'Test Plan:\n%s' % testplan
829 uri = b'Differential Revision: %s' % drev[r'uri']
830 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
829 831
830 832 def getdiffmeta(diff):
831 833 """get commit metadata (date, node, user, p1) from a diff object
832 834
833 835 The metadata could be "hg:meta", sent by phabsend, like:
834 836
835 837 "properties": {
836 838 "hg:meta": {
837 839 "date": "1499571514 25200",
838 840 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
839 841 "user": "Foo Bar <foo@example.com>",
840 842 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
841 843 }
842 844 }
843 845
844 846 Or converted from "local:commits", sent by "arc", like:
845 847
846 848 "properties": {
847 849 "local:commits": {
848 850 "98c08acae292b2faf60a279b4189beb6cff1414d": {
849 851 "author": "Foo Bar",
850 852 "time": 1499546314,
851 853 "branch": "default",
852 854 "tag": "",
853 855 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
854 856 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 857 "local": "1000",
856 858 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
857 859 "summary": "...",
858 860 "message": "...",
859 861 "authorEmail": "foo@example.com"
860 862 }
861 863 }
862 864 }
863 865
864 866 Note: metadata extracted from "local:commits" will lose time zone
865 867 information.
866 868 """
867 869 props = diff.get(r'properties') or {}
868 870 meta = props.get(r'hg:meta')
869 871 if not meta and props.get(r'local:commits'):
870 872 commit = sorted(props[r'local:commits'].values())[0]
871 873 meta = {
872 874 r'date': r'%d 0' % commit[r'time'],
873 875 r'node': commit[r'rev'],
874 876 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
875 877 }
876 878 if len(commit.get(r'parents', ())) >= 1:
877 879 meta[r'parent'] = commit[r'parents'][0]
878 880 return meta or {}
879 881
880 882 def readpatch(repo, drevs, write):
881 883 """generate plain-text patch readable by 'hg import'
882 884
883 885 write is usually ui.write. drevs is what "querydrev" returns, results of
884 886 "differential.query".
885 887 """
886 888 # Prefetch hg:meta property for all diffs
887 889 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
888 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
890 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
889 891
890 892 # Generate patch for each drev
891 893 for drev in drevs:
892 repo.ui.note(_('reading D%s\n') % drev[r'id'])
894 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
893 895
894 896 diffid = max(int(v) for v in drev[r'diffs'])
895 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
897 body = callconduit(repo, b'differential.getrawdiff',
898 {b'diffID': diffid})
896 899 desc = getdescfromdrev(drev)
897 header = '# HG changeset patch\n'
900 header = b'# HG changeset patch\n'
898 901
899 902 # Try to preserve metadata from hg:meta property. Write hg patch
900 903 # headers that can be read by the "import" command. See patchheadermap
901 904 # and extract in mercurial/patch.py for supported headers.
902 905 meta = getdiffmeta(diffs[str(diffid)])
903 906 for k in _metanamemap.keys():
904 907 if k in meta:
905 header += '# %s %s\n' % (_metanamemap[k], meta[k])
908 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
906 909
907 content = '%s%s\n%s' % (header, desc, body)
910 content = b'%s%s\n%s' % (header, desc, body)
908 911 write(encoding.unitolocal(content))
909 912
910 @command('phabread',
911 [('', 'stack', False, _('read dependencies'))],
912 _('DREVSPEC [OPTIONS]'))
913 @command(b'phabread',
914 [(b'', b'stack', False, _(b'read dependencies'))],
915 _(b'DREVSPEC [OPTIONS]'))
913 916 def phabread(ui, repo, spec, **opts):
914 917 """print patches from Phabricator suitable for importing
915 918
916 919 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
917 920 the number ``123``. It could also have common operators like ``+``, ``-``,
918 921 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
919 922 select a stack.
920 923
921 924 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
922 925 could be used to filter patches by status. For performance reason, they
923 926 only represent a subset of non-status selections and cannot be used alone.
924 927
925 928 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
926 929 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
927 930 stack up to D9.
928 931
929 932 If --stack is given, follow dependencies information and read all patches.
930 933 It is equivalent to the ``:`` operator.
931 934 """
932 if opts.get('stack'):
933 spec = ':(%s)' % spec
935 if opts.get(b'stack'):
936 spec = b':(%s)' % spec
934 937 drevs = querydrev(repo, spec)
935 938 readpatch(repo, drevs, ui.write)
936 939
937 @command('phabupdate',
938 [('', 'accept', False, _('accept revisions')),
939 ('', 'reject', False, _('reject revisions')),
940 ('', 'abandon', False, _('abandon revisions')),
941 ('', 'reclaim', False, _('reclaim revisions')),
942 ('m', 'comment', '', _('comment on the last revision')),
943 ], _('DREVSPEC [OPTIONS]'))
940 @command(b'phabupdate',
941 [(b'', b'accept', False, _(b'accept revisions')),
942 (b'', b'reject', False, _(b'reject revisions')),
943 (b'', b'abandon', False, _(b'abandon revisions')),
944 (b'', b'reclaim', False, _(b'reclaim revisions')),
945 (b'm', b'comment', b'', _(b'comment on the last revision')),
946 ], _(b'DREVSPEC [OPTIONS]'))
944 947 def phabupdate(ui, repo, spec, **opts):
945 948 """update Differential Revision in batch
946 949
947 950 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
948 951 """
949 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
952 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
950 953 if len(flags) > 1:
951 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
954 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
952 955
953 956 actions = []
954 957 for f in flags:
955 actions.append({'type': f, 'value': 'true'})
958 actions.append({b'type': f, b'value': b'true'})
956 959
957 960 drevs = querydrev(repo, spec)
958 961 for i, drev in enumerate(drevs):
959 if i + 1 == len(drevs) and opts.get('comment'):
960 actions.append({'type': 'comment', 'value': opts['comment']})
962 if i + 1 == len(drevs) and opts.get(b'comment'):
963 actions.append({b'type': b'comment', b'value': opts[b'comment']})
961 964 if actions:
962 params = {'objectIdentifier': drev[r'phid'],
963 'transactions': actions}
964 callconduit(repo, 'differential.revision.edit', params)
965 params = {b'objectIdentifier': drev[r'phid'],
966 b'transactions': actions}
967 callconduit(repo, b'differential.revision.edit', params)
965 968
966 969 templatekeyword = registrar.templatekeyword()
967 970
968 @templatekeyword('phabreview', requires={'ctx'})
971 @templatekeyword(b'phabreview', requires={b'ctx'})
969 972 def template_review(context, mapping):
970 973 """:phabreview: Object describing the review for this changeset.
971 974 Has attributes `url` and `id`.
972 975 """
973 ctx = context.resource(mapping, 'ctx')
976 ctx = context.resource(mapping, b'ctx')
974 977 m = _differentialrevisiondescre.search(ctx.description())
975 978 if m:
976 979 return {
977 'url': m.group('url'),
978 'id': "D{}".format(m.group('id')),
980 b'url': m.group(b'url'),
981 b'id': b"D{}".format(m.group(b'id')),
979 982 }
General Comments 0
You need to be logged in to leave comments. Login now