##// END OF EJS Templates
phabricator: register config settings...
Matt Harbison -
r38053:5a7cf42b default
parent child Browse files
Show More
@@ -1,951 +1,975 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration
7 """simple Phabricator integration
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import itertools
44 import itertools
45 import json
45 import json
46 import operator
46 import operator
47 import re
47 import re
48
48
49 from mercurial.node import bin, nullid
49 from mercurial.node import bin, nullid
50 from mercurial.i18n import _
50 from mercurial.i18n import _
51 from mercurial import (
51 from mercurial import (
52 cmdutil,
52 cmdutil,
53 context,
53 context,
54 encoding,
54 encoding,
55 error,
55 error,
56 httpconnection as httpconnectionmod,
56 httpconnection as httpconnectionmod,
57 mdiff,
57 mdiff,
58 obsutil,
58 obsutil,
59 parser,
59 parser,
60 patch,
60 patch,
61 registrar,
61 registrar,
62 scmutil,
62 scmutil,
63 smartset,
63 smartset,
64 tags,
64 tags,
65 url as urlmod,
65 url as urlmod,
66 util,
66 util,
67 )
67 )
68 from mercurial.utils import (
68 from mercurial.utils import (
69 procutil,
69 procutil,
70 stringutil,
70 stringutil,
71 )
71 )
72
72
73 cmdtable = {}
73 cmdtable = {}
74 command = registrar.command(cmdtable)
74 command = registrar.command(cmdtable)
75
75
76 configtable = {}
77 configitem = registrar.configitem(configtable)
78
79 # developer config: phabricator.batchsize
80 configitem('phabricator', 'batchsize',
81 default=12,
82 )
83 configitem('phabricator', 'callsign',
84 default=None,
85 )
86 configitem('phabricator', 'curlcmd',
87 default=None,
88 )
89 # developer config: phabricator.repophid
90 configitem('phabricator', 'repophid',
91 default=None,
92 )
93 configitem('phabricator', 'url',
94 default=None,
95 )
96 configitem('phabsend', 'confirm',
97 default=False,
98 )
99
76 colortable = {
100 colortable = {
77 'phabricator.action.created': 'green',
101 'phabricator.action.created': 'green',
78 'phabricator.action.skipped': 'magenta',
102 'phabricator.action.skipped': 'magenta',
79 'phabricator.action.updated': 'magenta',
103 'phabricator.action.updated': 'magenta',
80 'phabricator.desc': '',
104 'phabricator.desc': '',
81 'phabricator.drev': 'bold',
105 'phabricator.drev': 'bold',
82 'phabricator.node': '',
106 'phabricator.node': '',
83 }
107 }
84
108
85 def urlencodenested(params):
109 def urlencodenested(params):
86 """like urlencode, but works with nested parameters.
110 """like urlencode, but works with nested parameters.
87
111
88 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
112 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
89 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
113 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
90 urlencode. Note: the encoding is consistent with PHP's http_build_query.
114 urlencode. Note: the encoding is consistent with PHP's http_build_query.
91 """
115 """
92 flatparams = util.sortdict()
116 flatparams = util.sortdict()
93 def process(prefix, obj):
117 def process(prefix, obj):
94 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
118 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
95 if items is None:
119 if items is None:
96 flatparams[prefix] = obj
120 flatparams[prefix] = obj
97 else:
121 else:
98 for k, v in items(obj):
122 for k, v in items(obj):
99 if prefix:
123 if prefix:
100 process('%s[%s]' % (prefix, k), v)
124 process('%s[%s]' % (prefix, k), v)
101 else:
125 else:
102 process(k, v)
126 process(k, v)
103 process('', params)
127 process('', params)
104 return util.urlreq.urlencode(flatparams)
128 return util.urlreq.urlencode(flatparams)
105
129
106 printed_token_warning = False
130 printed_token_warning = False
107
131
108 def readlegacytoken(repo, url):
132 def readlegacytoken(repo, url):
109 """Transitional support for old phabricator tokens.
133 """Transitional support for old phabricator tokens.
110
134
111 Remove before the 4.7 release.
135 Remove before the 4.7 release.
112 """
136 """
113 groups = {}
137 groups = {}
114 for key, val in repo.ui.configitems('phabricator.auth'):
138 for key, val in repo.ui.configitems('phabricator.auth'):
115 if '.' not in key:
139 if '.' not in key:
116 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
140 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
117 % key)
141 % key)
118 continue
142 continue
119 group, setting = key.rsplit('.', 1)
143 group, setting = key.rsplit('.', 1)
120 groups.setdefault(group, {})[setting] = val
144 groups.setdefault(group, {})[setting] = val
121
145
122 token = None
146 token = None
123 for group, auth in groups.iteritems():
147 for group, auth in groups.iteritems():
124 if url != auth.get('url'):
148 if url != auth.get('url'):
125 continue
149 continue
126 token = auth.get('token')
150 token = auth.get('token')
127 if token:
151 if token:
128 break
152 break
129
153
130 global printed_token_warning
154 global printed_token_warning
131
155
132 if token and not printed_token_warning:
156 if token and not printed_token_warning:
133 printed_token_warning = True
157 printed_token_warning = True
134 repo.ui.warn(_('phabricator.auth.token is deprecated - please '
158 repo.ui.warn(_('phabricator.auth.token is deprecated - please '
135 'migrate to auth.phabtoken.\n'))
159 'migrate to auth.phabtoken.\n'))
136 return token
160 return token
137
161
138 def readurltoken(repo):
162 def readurltoken(repo):
139 """return conduit url, token and make sure they exist
163 """return conduit url, token and make sure they exist
140
164
141 Currently read from [auth] config section. In the future, it might
165 Currently read from [auth] config section. In the future, it might
142 make sense to read from .arcconfig and .arcrc as well.
166 make sense to read from .arcconfig and .arcrc as well.
143 """
167 """
144 url = repo.ui.config('phabricator', 'url')
168 url = repo.ui.config('phabricator', 'url')
145 if not url:
169 if not url:
146 raise error.Abort(_('config %s.%s is required')
170 raise error.Abort(_('config %s.%s is required')
147 % ('phabricator', 'url'))
171 % ('phabricator', 'url'))
148
172
149 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
150 token = None
174 token = None
151
175
152 if res:
176 if res:
153 group, auth = res
177 group, auth = res
154
178
155 repo.ui.debug("using auth.%s.* for authentication\n" % group)
179 repo.ui.debug("using auth.%s.* for authentication\n" % group)
156
180
157 token = auth.get('phabtoken')
181 token = auth.get('phabtoken')
158
182
159 if not token:
183 if not token:
160 token = readlegacytoken(repo, url)
184 token = readlegacytoken(repo, url)
161 if not token:
185 if not token:
162 raise error.Abort(_('Can\'t find conduit token associated to %s')
186 raise error.Abort(_('Can\'t find conduit token associated to %s')
163 % (url,))
187 % (url,))
164
188
165 return url, token
189 return url, token
166
190
167 def callconduit(repo, name, params):
191 def callconduit(repo, name, params):
168 """call Conduit API, params is a dict. return json.loads result, or None"""
192 """call Conduit API, params is a dict. return json.loads result, or None"""
169 host, token = readurltoken(repo)
193 host, token = readurltoken(repo)
170 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
194 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
171 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
195 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
172 params = params.copy()
196 params = params.copy()
173 params['api.token'] = token
197 params['api.token'] = token
174 data = urlencodenested(params)
198 data = urlencodenested(params)
175 curlcmd = repo.ui.config('phabricator', 'curlcmd')
199 curlcmd = repo.ui.config('phabricator', 'curlcmd')
176 if curlcmd:
200 if curlcmd:
177 sin, sout = procutil.popen2('%s -d @- %s'
201 sin, sout = procutil.popen2('%s -d @- %s'
178 % (curlcmd, procutil.shellquote(url)))
202 % (curlcmd, procutil.shellquote(url)))
179 sin.write(data)
203 sin.write(data)
180 sin.close()
204 sin.close()
181 body = sout.read()
205 body = sout.read()
182 else:
206 else:
183 urlopener = urlmod.opener(repo.ui, authinfo)
207 urlopener = urlmod.opener(repo.ui, authinfo)
184 request = util.urlreq.request(url, data=data)
208 request = util.urlreq.request(url, data=data)
185 body = urlopener.open(request).read()
209 body = urlopener.open(request).read()
186 repo.ui.debug('Conduit Response: %s\n' % body)
210 repo.ui.debug('Conduit Response: %s\n' % body)
187 parsed = json.loads(body)
211 parsed = json.loads(body)
188 if parsed.get(r'error_code'):
212 if parsed.get(r'error_code'):
189 msg = (_('Conduit Error (%s): %s')
213 msg = (_('Conduit Error (%s): %s')
190 % (parsed[r'error_code'], parsed[r'error_info']))
214 % (parsed[r'error_code'], parsed[r'error_info']))
191 raise error.Abort(msg)
215 raise error.Abort(msg)
192 return parsed[r'result']
216 return parsed[r'result']
193
217
194 @command('debugcallconduit', [], _('METHOD'))
218 @command('debugcallconduit', [], _('METHOD'))
195 def debugcallconduit(ui, repo, name):
219 def debugcallconduit(ui, repo, name):
196 """call Conduit API
220 """call Conduit API
197
221
198 Call parameters are read from stdin as a JSON blob. Result will be written
222 Call parameters are read from stdin as a JSON blob. Result will be written
199 to stdout as a JSON blob.
223 to stdout as a JSON blob.
200 """
224 """
201 params = json.loads(ui.fin.read())
225 params = json.loads(ui.fin.read())
202 result = callconduit(repo, name, params)
226 result = callconduit(repo, name, params)
203 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
204 ui.write('%s\n' % s)
228 ui.write('%s\n' % s)
205
229
206 def getrepophid(repo):
230 def getrepophid(repo):
207 """given callsign, return repository PHID or None"""
231 """given callsign, return repository PHID or None"""
208 # developer config: phabricator.repophid
232 # developer config: phabricator.repophid
209 repophid = repo.ui.config('phabricator', 'repophid')
233 repophid = repo.ui.config('phabricator', 'repophid')
210 if repophid:
234 if repophid:
211 return repophid
235 return repophid
212 callsign = repo.ui.config('phabricator', 'callsign')
236 callsign = repo.ui.config('phabricator', 'callsign')
213 if not callsign:
237 if not callsign:
214 return None
238 return None
215 query = callconduit(repo, 'diffusion.repository.search',
239 query = callconduit(repo, 'diffusion.repository.search',
216 {'constraints': {'callsigns': [callsign]}})
240 {'constraints': {'callsigns': [callsign]}})
217 if len(query[r'data']) == 0:
241 if len(query[r'data']) == 0:
218 return None
242 return None
219 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
243 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
220 repo.ui.setconfig('phabricator', 'repophid', repophid)
244 repo.ui.setconfig('phabricator', 'repophid', repophid)
221 return repophid
245 return repophid
222
246
223 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
247 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
224 _differentialrevisiondescre = re.compile(
248 _differentialrevisiondescre = re.compile(
225 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
249 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
226
250
227 def getoldnodedrevmap(repo, nodelist):
251 def getoldnodedrevmap(repo, nodelist):
228 """find previous nodes that has been sent to Phabricator
252 """find previous nodes that has been sent to Phabricator
229
253
230 return {node: (oldnode, Differential diff, Differential Revision ID)}
254 return {node: (oldnode, Differential diff, Differential Revision ID)}
231 for node in nodelist with known previous sent versions, or associated
255 for node in nodelist with known previous sent versions, or associated
232 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
256 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
233 be ``None``.
257 be ``None``.
234
258
235 Examines commit messages like "Differential Revision:" to get the
259 Examines commit messages like "Differential Revision:" to get the
236 association information.
260 association information.
237
261
238 If such commit message line is not found, examines all precursors and their
262 If such commit message line is not found, examines all precursors and their
239 tags. Tags with format like "D1234" are considered a match and the node
263 tags. Tags with format like "D1234" are considered a match and the node
240 with that tag, and the number after "D" (ex. 1234) will be returned.
264 with that tag, and the number after "D" (ex. 1234) will be returned.
241
265
242 The ``old node``, if not None, is guaranteed to be the last diff of
266 The ``old node``, if not None, is guaranteed to be the last diff of
243 corresponding Differential Revision, and exist in the repo.
267 corresponding Differential Revision, and exist in the repo.
244 """
268 """
245 url, token = readurltoken(repo)
269 url, token = readurltoken(repo)
246 unfi = repo.unfiltered()
270 unfi = repo.unfiltered()
247 nodemap = unfi.changelog.nodemap
271 nodemap = unfi.changelog.nodemap
248
272
249 result = {} # {node: (oldnode?, lastdiff?, drev)}
273 result = {} # {node: (oldnode?, lastdiff?, drev)}
250 toconfirm = {} # {node: (force, {precnode}, drev)}
274 toconfirm = {} # {node: (force, {precnode}, drev)}
251 for node in nodelist:
275 for node in nodelist:
252 ctx = unfi[node]
276 ctx = unfi[node]
253 # For tags like "D123", put them into "toconfirm" to verify later
277 # For tags like "D123", put them into "toconfirm" to verify later
254 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
278 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
255 for n in precnodes:
279 for n in precnodes:
256 if n in nodemap:
280 if n in nodemap:
257 for tag in unfi.nodetags(n):
281 for tag in unfi.nodetags(n):
258 m = _differentialrevisiontagre.match(tag)
282 m = _differentialrevisiontagre.match(tag)
259 if m:
283 if m:
260 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
284 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
261 continue
285 continue
262
286
263 # Check commit message
287 # Check commit message
264 m = _differentialrevisiondescre.search(ctx.description())
288 m = _differentialrevisiondescre.search(ctx.description())
265 if m:
289 if m:
266 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
290 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
267
291
268 # Double check if tags are genuine by collecting all old nodes from
292 # Double check if tags are genuine by collecting all old nodes from
269 # Phabricator, and expect precursors overlap with it.
293 # Phabricator, and expect precursors overlap with it.
270 if toconfirm:
294 if toconfirm:
271 drevs = [drev for force, precs, drev in toconfirm.values()]
295 drevs = [drev for force, precs, drev in toconfirm.values()]
272 alldiffs = callconduit(unfi, 'differential.querydiffs',
296 alldiffs = callconduit(unfi, 'differential.querydiffs',
273 {'revisionIDs': drevs})
297 {'revisionIDs': drevs})
274 getnode = lambda d: bin(encoding.unitolocal(
298 getnode = lambda d: bin(encoding.unitolocal(
275 getdiffmeta(d).get(r'node', ''))) or None
299 getdiffmeta(d).get(r'node', ''))) or None
276 for newnode, (force, precset, drev) in toconfirm.items():
300 for newnode, (force, precset, drev) in toconfirm.items():
277 diffs = [d for d in alldiffs.values()
301 diffs = [d for d in alldiffs.values()
278 if int(d[r'revisionID']) == drev]
302 if int(d[r'revisionID']) == drev]
279
303
280 # "precursors" as known by Phabricator
304 # "precursors" as known by Phabricator
281 phprecset = set(getnode(d) for d in diffs)
305 phprecset = set(getnode(d) for d in diffs)
282
306
283 # Ignore if precursors (Phabricator and local repo) do not overlap,
307 # Ignore if precursors (Phabricator and local repo) do not overlap,
284 # and force is not set (when commit message says nothing)
308 # and force is not set (when commit message says nothing)
285 if not force and not bool(phprecset & precset):
309 if not force and not bool(phprecset & precset):
286 tagname = 'D%d' % drev
310 tagname = 'D%d' % drev
287 tags.tag(repo, tagname, nullid, message=None, user=None,
311 tags.tag(repo, tagname, nullid, message=None, user=None,
288 date=None, local=True)
312 date=None, local=True)
289 unfi.ui.warn(_('D%s: local tag removed - does not match '
313 unfi.ui.warn(_('D%s: local tag removed - does not match '
290 'Differential history\n') % drev)
314 'Differential history\n') % drev)
291 continue
315 continue
292
316
293 # Find the last node using Phabricator metadata, and make sure it
317 # Find the last node using Phabricator metadata, and make sure it
294 # exists in the repo
318 # exists in the repo
295 oldnode = lastdiff = None
319 oldnode = lastdiff = None
296 if diffs:
320 if diffs:
297 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
321 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
298 oldnode = getnode(lastdiff)
322 oldnode = getnode(lastdiff)
299 if oldnode and oldnode not in nodemap:
323 if oldnode and oldnode not in nodemap:
300 oldnode = None
324 oldnode = None
301
325
302 result[newnode] = (oldnode, lastdiff, drev)
326 result[newnode] = (oldnode, lastdiff, drev)
303
327
304 return result
328 return result
305
329
306 def getdiff(ctx, diffopts):
330 def getdiff(ctx, diffopts):
307 """plain-text diff without header (user, commit message, etc)"""
331 """plain-text diff without header (user, commit message, etc)"""
308 output = util.stringio()
332 output = util.stringio()
309 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
333 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
310 None, opts=diffopts):
334 None, opts=diffopts):
311 output.write(chunk)
335 output.write(chunk)
312 return output.getvalue()
336 return output.getvalue()
313
337
314 def creatediff(ctx):
338 def creatediff(ctx):
315 """create a Differential Diff"""
339 """create a Differential Diff"""
316 repo = ctx.repo()
340 repo = ctx.repo()
317 repophid = getrepophid(repo)
341 repophid = getrepophid(repo)
318 # Create a "Differential Diff" via "differential.createrawdiff" API
342 # Create a "Differential Diff" via "differential.createrawdiff" API
319 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
343 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
320 if repophid:
344 if repophid:
321 params['repositoryPHID'] = repophid
345 params['repositoryPHID'] = repophid
322 diff = callconduit(repo, 'differential.createrawdiff', params)
346 diff = callconduit(repo, 'differential.createrawdiff', params)
323 if not diff:
347 if not diff:
324 raise error.Abort(_('cannot create diff for %s') % ctx)
348 raise error.Abort(_('cannot create diff for %s') % ctx)
325 return diff
349 return diff
326
350
327 def writediffproperties(ctx, diff):
351 def writediffproperties(ctx, diff):
328 """write metadata to diff so patches could be applied losslessly"""
352 """write metadata to diff so patches could be applied losslessly"""
329 params = {
353 params = {
330 'diff_id': diff[r'id'],
354 'diff_id': diff[r'id'],
331 'name': 'hg:meta',
355 'name': 'hg:meta',
332 'data': json.dumps({
356 'data': json.dumps({
333 'user': ctx.user(),
357 'user': ctx.user(),
334 'date': '%d %d' % ctx.date(),
358 'date': '%d %d' % ctx.date(),
335 'node': ctx.hex(),
359 'node': ctx.hex(),
336 'parent': ctx.p1().hex(),
360 'parent': ctx.p1().hex(),
337 }),
361 }),
338 }
362 }
339 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
363 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
340
364
341 params = {
365 params = {
342 'diff_id': diff[r'id'],
366 'diff_id': diff[r'id'],
343 'name': 'local:commits',
367 'name': 'local:commits',
344 'data': json.dumps({
368 'data': json.dumps({
345 ctx.hex(): {
369 ctx.hex(): {
346 'author': stringutil.person(ctx.user()),
370 'author': stringutil.person(ctx.user()),
347 'authorEmail': stringutil.email(ctx.user()),
371 'authorEmail': stringutil.email(ctx.user()),
348 'time': ctx.date()[0],
372 'time': ctx.date()[0],
349 },
373 },
350 }),
374 }),
351 }
375 }
352 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
376 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
353
377
354 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
355 olddiff=None, actions=None):
379 olddiff=None, actions=None):
356 """create or update a Differential Revision
380 """create or update a Differential Revision
357
381
358 If revid is None, create a new Differential Revision, otherwise update
382 If revid is None, create a new Differential Revision, otherwise update
359 revid. If parentrevid is not None, set it as a dependency.
383 revid. If parentrevid is not None, set it as a dependency.
360
384
361 If oldnode is not None, check if the patch content (without commit message
385 If oldnode is not None, check if the patch content (without commit message
362 and metadata) has changed before creating another diff.
386 and metadata) has changed before creating another diff.
363
387
364 If actions is not None, they will be appended to the transaction.
388 If actions is not None, they will be appended to the transaction.
365 """
389 """
366 repo = ctx.repo()
390 repo = ctx.repo()
367 if oldnode:
391 if oldnode:
368 diffopts = mdiff.diffopts(git=True, context=32767)
392 diffopts = mdiff.diffopts(git=True, context=32767)
369 oldctx = repo.unfiltered()[oldnode]
393 oldctx = repo.unfiltered()[oldnode]
370 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
394 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
371 else:
395 else:
372 neednewdiff = True
396 neednewdiff = True
373
397
374 transactions = []
398 transactions = []
375 if neednewdiff:
399 if neednewdiff:
376 diff = creatediff(ctx)
400 diff = creatediff(ctx)
377 transactions.append({'type': 'update', 'value': diff[r'phid']})
401 transactions.append({'type': 'update', 'value': diff[r'phid']})
378 else:
402 else:
379 # Even if we don't need to upload a new diff because the patch content
403 # Even if we don't need to upload a new diff because the patch content
380 # does not change. We might still need to update its metadata so
404 # does not change. We might still need to update its metadata so
381 # pushers could know the correct node metadata.
405 # pushers could know the correct node metadata.
382 assert olddiff
406 assert olddiff
383 diff = olddiff
407 diff = olddiff
384 writediffproperties(ctx, diff)
408 writediffproperties(ctx, diff)
385
409
386 # Use a temporary summary to set dependency. There might be better ways but
410 # Use a temporary summary to set dependency. There might be better ways but
387 # I cannot find them for now. But do not do that if we are updating an
411 # I cannot find them for now. But do not do that if we are updating an
388 # existing revision (revid is not None) since that introduces visible
412 # existing revision (revid is not None) since that introduces visible
389 # churns (someone edited "Summary" twice) on the web page.
413 # churns (someone edited "Summary" twice) on the web page.
390 if parentrevid and revid is None:
414 if parentrevid and revid is None:
391 summary = 'Depends on D%s' % parentrevid
415 summary = 'Depends on D%s' % parentrevid
392 transactions += [{'type': 'summary', 'value': summary},
416 transactions += [{'type': 'summary', 'value': summary},
393 {'type': 'summary', 'value': ' '}]
417 {'type': 'summary', 'value': ' '}]
394
418
395 if actions:
419 if actions:
396 transactions += actions
420 transactions += actions
397
421
398 # Parse commit message and update related fields.
422 # Parse commit message and update related fields.
399 desc = ctx.description()
423 desc = ctx.description()
400 info = callconduit(repo, 'differential.parsecommitmessage',
424 info = callconduit(repo, 'differential.parsecommitmessage',
401 {'corpus': desc})
425 {'corpus': desc})
402 for k, v in info[r'fields'].items():
426 for k, v in info[r'fields'].items():
403 if k in ['title', 'summary', 'testPlan']:
427 if k in ['title', 'summary', 'testPlan']:
404 transactions.append({'type': k, 'value': v})
428 transactions.append({'type': k, 'value': v})
405
429
406 params = {'transactions': transactions}
430 params = {'transactions': transactions}
407 if revid is not None:
431 if revid is not None:
408 # Update an existing Differential Revision
432 # Update an existing Differential Revision
409 params['objectIdentifier'] = revid
433 params['objectIdentifier'] = revid
410
434
411 revision = callconduit(repo, 'differential.revision.edit', params)
435 revision = callconduit(repo, 'differential.revision.edit', params)
412 if not revision:
436 if not revision:
413 raise error.Abort(_('cannot create revision for %s') % ctx)
437 raise error.Abort(_('cannot create revision for %s') % ctx)
414
438
415 return revision, diff
439 return revision, diff
416
440
417 def userphids(repo, names):
441 def userphids(repo, names):
418 """convert user names to PHIDs"""
442 """convert user names to PHIDs"""
419 query = {'constraints': {'usernames': names}}
443 query = {'constraints': {'usernames': names}}
420 result = callconduit(repo, 'user.search', query)
444 result = callconduit(repo, 'user.search', query)
421 # username not found is not an error of the API. So check if we have missed
445 # username not found is not an error of the API. So check if we have missed
422 # some names here.
446 # some names here.
423 data = result[r'data']
447 data = result[r'data']
424 resolved = set(entry[r'fields'][r'username'] for entry in data)
448 resolved = set(entry[r'fields'][r'username'] for entry in data)
425 unresolved = set(names) - resolved
449 unresolved = set(names) - resolved
426 if unresolved:
450 if unresolved:
427 raise error.Abort(_('unknown username: %s')
451 raise error.Abort(_('unknown username: %s')
428 % ' '.join(sorted(unresolved)))
452 % ' '.join(sorted(unresolved)))
429 return [entry[r'phid'] for entry in data]
453 return [entry[r'phid'] for entry in data]
430
454
431 @command('phabsend',
455 @command('phabsend',
432 [('r', 'rev', [], _('revisions to send'), _('REV')),
456 [('r', 'rev', [], _('revisions to send'), _('REV')),
433 ('', 'amend', True, _('update commit messages')),
457 ('', 'amend', True, _('update commit messages')),
434 ('', 'reviewer', [], _('specify reviewers')),
458 ('', 'reviewer', [], _('specify reviewers')),
435 ('', 'confirm', None, _('ask for confirmation before sending'))],
459 ('', 'confirm', None, _('ask for confirmation before sending'))],
436 _('REV [OPTIONS]'))
460 _('REV [OPTIONS]'))
437 def phabsend(ui, repo, *revs, **opts):
461 def phabsend(ui, repo, *revs, **opts):
438 """upload changesets to Phabricator
462 """upload changesets to Phabricator
439
463
440 If there are multiple revisions specified, they will be send as a stack
464 If there are multiple revisions specified, they will be send as a stack
441 with a linear dependencies relationship using the order specified by the
465 with a linear dependencies relationship using the order specified by the
442 revset.
466 revset.
443
467
444 For the first time uploading changesets, local tags will be created to
468 For the first time uploading changesets, local tags will be created to
445 maintain the association. After the first time, phabsend will check
469 maintain the association. After the first time, phabsend will check
446 obsstore and tags information so it can figure out whether to update an
470 obsstore and tags information so it can figure out whether to update an
447 existing Differential Revision, or create a new one.
471 existing Differential Revision, or create a new one.
448
472
449 If --amend is set, update commit messages so they have the
473 If --amend is set, update commit messages so they have the
450 ``Differential Revision`` URL, remove related tags. This is similar to what
474 ``Differential Revision`` URL, remove related tags. This is similar to what
451 arcanist will do, and is more desired in author-push workflows. Otherwise,
475 arcanist will do, and is more desired in author-push workflows. Otherwise,
452 use local tags to record the ``Differential Revision`` association.
476 use local tags to record the ``Differential Revision`` association.
453
477
454 The --confirm option lets you confirm changesets before sending them. You
478 The --confirm option lets you confirm changesets before sending them. You
455 can also add following to your configuration file to make it default
479 can also add following to your configuration file to make it default
456 behaviour::
480 behaviour::
457
481
458 [phabsend]
482 [phabsend]
459 confirm = true
483 confirm = true
460
484
461 phabsend will check obsstore and the above association to decide whether to
485 phabsend will check obsstore and the above association to decide whether to
462 update an existing Differential Revision, or create a new one.
486 update an existing Differential Revision, or create a new one.
463 """
487 """
464 revs = list(revs) + opts.get('rev', [])
488 revs = list(revs) + opts.get('rev', [])
465 revs = scmutil.revrange(repo, revs)
489 revs = scmutil.revrange(repo, revs)
466
490
467 if not revs:
491 if not revs:
468 raise error.Abort(_('phabsend requires at least one changeset'))
492 raise error.Abort(_('phabsend requires at least one changeset'))
469 if opts.get('amend'):
493 if opts.get('amend'):
470 cmdutil.checkunfinished(repo)
494 cmdutil.checkunfinished(repo)
471
495
472 # {newnode: (oldnode, olddiff, olddrev}
496 # {newnode: (oldnode, olddiff, olddrev}
473 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
474
498
475 confirm = ui.configbool('phabsend', 'confirm')
499 confirm = ui.configbool('phabsend', 'confirm')
476 confirm |= bool(opts.get('confirm'))
500 confirm |= bool(opts.get('confirm'))
477 if confirm:
501 if confirm:
478 confirmed = _confirmbeforesend(repo, revs, oldmap)
502 confirmed = _confirmbeforesend(repo, revs, oldmap)
479 if not confirmed:
503 if not confirmed:
480 raise error.Abort(_('phabsend cancelled'))
504 raise error.Abort(_('phabsend cancelled'))
481
505
482 actions = []
506 actions = []
483 reviewers = opts.get('reviewer', [])
507 reviewers = opts.get('reviewer', [])
484 if reviewers:
508 if reviewers:
485 phids = userphids(repo, reviewers)
509 phids = userphids(repo, reviewers)
486 actions.append({'type': 'reviewers.add', 'value': phids})
510 actions.append({'type': 'reviewers.add', 'value': phids})
487
511
488 drevids = [] # [int]
512 drevids = [] # [int]
489 diffmap = {} # {newnode: diff}
513 diffmap = {} # {newnode: diff}
490
514
491 # Send patches one by one so we know their Differential Revision IDs and
515 # Send patches one by one so we know their Differential Revision IDs and
492 # can provide dependency relationship
516 # can provide dependency relationship
493 lastrevid = None
517 lastrevid = None
494 for rev in revs:
518 for rev in revs:
495 ui.debug('sending rev %d\n' % rev)
519 ui.debug('sending rev %d\n' % rev)
496 ctx = repo[rev]
520 ctx = repo[rev]
497
521
498 # Get Differential Revision ID
522 # Get Differential Revision ID
499 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
500 if oldnode != ctx.node() or opts.get('amend'):
524 if oldnode != ctx.node() or opts.get('amend'):
501 # Create or update Differential Revision
525 # Create or update Differential Revision
502 revision, diff = createdifferentialrevision(
526 revision, diff = createdifferentialrevision(
503 ctx, revid, lastrevid, oldnode, olddiff, actions)
527 ctx, revid, lastrevid, oldnode, olddiff, actions)
504 diffmap[ctx.node()] = diff
528 diffmap[ctx.node()] = diff
505 newrevid = int(revision[r'object'][r'id'])
529 newrevid = int(revision[r'object'][r'id'])
506 if revid:
530 if revid:
507 action = 'updated'
531 action = 'updated'
508 else:
532 else:
509 action = 'created'
533 action = 'created'
510
534
511 # Create a local tag to note the association, if commit message
535 # Create a local tag to note the association, if commit message
512 # does not have it already
536 # does not have it already
513 m = _differentialrevisiondescre.search(ctx.description())
537 m = _differentialrevisiondescre.search(ctx.description())
514 if not m or int(m.group('id')) != newrevid:
538 if not m or int(m.group('id')) != newrevid:
515 tagname = 'D%d' % newrevid
539 tagname = 'D%d' % newrevid
516 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
540 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
517 date=None, local=True)
541 date=None, local=True)
518 else:
542 else:
519 # Nothing changed. But still set "newrevid" so the next revision
543 # Nothing changed. But still set "newrevid" so the next revision
520 # could depend on this one.
544 # could depend on this one.
521 newrevid = revid
545 newrevid = revid
522 action = 'skipped'
546 action = 'skipped'
523
547
524 actiondesc = ui.label(
548 actiondesc = ui.label(
525 {'created': _('created'),
549 {'created': _('created'),
526 'skipped': _('skipped'),
550 'skipped': _('skipped'),
527 'updated': _('updated')}[action],
551 'updated': _('updated')}[action],
528 'phabricator.action.%s' % action)
552 'phabricator.action.%s' % action)
529 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
553 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
530 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
554 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
531 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
555 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
532 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
556 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
533 desc))
557 desc))
534 drevids.append(newrevid)
558 drevids.append(newrevid)
535 lastrevid = newrevid
559 lastrevid = newrevid
536
560
537 # Update commit messages and remove tags
561 # Update commit messages and remove tags
538 if opts.get('amend'):
562 if opts.get('amend'):
539 unfi = repo.unfiltered()
563 unfi = repo.unfiltered()
540 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
564 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
541 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
565 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
542 wnode = unfi['.'].node()
566 wnode = unfi['.'].node()
543 mapping = {} # {oldnode: [newnode]}
567 mapping = {} # {oldnode: [newnode]}
544 for i, rev in enumerate(revs):
568 for i, rev in enumerate(revs):
545 old = unfi[rev]
569 old = unfi[rev]
546 drevid = drevids[i]
570 drevid = drevids[i]
547 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
571 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
548 newdesc = getdescfromdrev(drev)
572 newdesc = getdescfromdrev(drev)
549 # Make sure commit message contain "Differential Revision"
573 # Make sure commit message contain "Differential Revision"
550 if old.description() != newdesc:
574 if old.description() != newdesc:
551 parents = [
575 parents = [
552 mapping.get(old.p1().node(), (old.p1(),))[0],
576 mapping.get(old.p1().node(), (old.p1(),))[0],
553 mapping.get(old.p2().node(), (old.p2(),))[0],
577 mapping.get(old.p2().node(), (old.p2(),))[0],
554 ]
578 ]
555 new = context.metadataonlyctx(
579 new = context.metadataonlyctx(
556 repo, old, parents=parents, text=newdesc,
580 repo, old, parents=parents, text=newdesc,
557 user=old.user(), date=old.date(), extra=old.extra())
581 user=old.user(), date=old.date(), extra=old.extra())
558 newnode = new.commit()
582 newnode = new.commit()
559 mapping[old.node()] = [newnode]
583 mapping[old.node()] = [newnode]
560 # Update diff property
584 # Update diff property
561 writediffproperties(unfi[newnode], diffmap[old.node()])
585 writediffproperties(unfi[newnode], diffmap[old.node()])
562 # Remove local tags since it's no longer necessary
586 # Remove local tags since it's no longer necessary
563 tagname = 'D%d' % drevid
587 tagname = 'D%d' % drevid
564 if tagname in repo.tags():
588 if tagname in repo.tags():
565 tags.tag(repo, tagname, nullid, message=None, user=None,
589 tags.tag(repo, tagname, nullid, message=None, user=None,
566 date=None, local=True)
590 date=None, local=True)
567 scmutil.cleanupnodes(repo, mapping, 'phabsend')
591 scmutil.cleanupnodes(repo, mapping, 'phabsend')
568 if wnode in mapping:
592 if wnode in mapping:
569 unfi.setparents(mapping[wnode][0])
593 unfi.setparents(mapping[wnode][0])
570
594
571 # Map from "hg:meta" keys to header understood by "hg import". The order is
595 # Map from "hg:meta" keys to header understood by "hg import". The order is
572 # consistent with "hg export" output.
596 # consistent with "hg export" output.
573 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
597 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
574 (r'node', 'Node ID'), (r'parent', 'Parent ')])
598 (r'node', 'Node ID'), (r'parent', 'Parent ')])
575
599
576 def _confirmbeforesend(repo, revs, oldmap):
600 def _confirmbeforesend(repo, revs, oldmap):
577 url, token = readurltoken(repo)
601 url, token = readurltoken(repo)
578 ui = repo.ui
602 ui = repo.ui
579 for rev in revs:
603 for rev in revs:
580 ctx = repo[rev]
604 ctx = repo[rev]
581 desc = ctx.description().splitlines()[0]
605 desc = ctx.description().splitlines()[0]
582 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
606 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
583 if drevid:
607 if drevid:
584 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
608 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
585 else:
609 else:
586 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
610 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
587
611
588 ui.write(_('%s - %s: %s\n') % (drevdesc,
612 ui.write(_('%s - %s: %s\n') % (drevdesc,
589 ui.label(bytes(ctx), 'phabricator.node'),
613 ui.label(bytes(ctx), 'phabricator.node'),
590 ui.label(desc, 'phabricator.desc')))
614 ui.label(desc, 'phabricator.desc')))
591
615
592 if ui.promptchoice(_('Send the above changes to %s (yn)?'
616 if ui.promptchoice(_('Send the above changes to %s (yn)?'
593 '$$ &Yes $$ &No') % url):
617 '$$ &Yes $$ &No') % url):
594 return False
618 return False
595
619
596 return True
620 return True
597
621
598 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
622 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
599 'abandoned'}
623 'abandoned'}
600
624
601 def _getstatusname(drev):
625 def _getstatusname(drev):
602 """get normalized status name from a Differential Revision"""
626 """get normalized status name from a Differential Revision"""
603 return drev[r'statusName'].replace(' ', '').lower()
627 return drev[r'statusName'].replace(' ', '').lower()
604
628
605 # Small language to specify differential revisions. Support symbols: (), :X,
629 # Small language to specify differential revisions. Support symbols: (), :X,
606 # +, and -.
630 # +, and -.
607
631
608 _elements = {
632 _elements = {
609 # token-type: binding-strength, primary, prefix, infix, suffix
633 # token-type: binding-strength, primary, prefix, infix, suffix
610 '(': (12, None, ('group', 1, ')'), None, None),
634 '(': (12, None, ('group', 1, ')'), None, None),
611 ':': (8, None, ('ancestors', 8), None, None),
635 ':': (8, None, ('ancestors', 8), None, None),
612 '&': (5, None, None, ('and_', 5), None),
636 '&': (5, None, None, ('and_', 5), None),
613 '+': (4, None, None, ('add', 4), None),
637 '+': (4, None, None, ('add', 4), None),
614 '-': (4, None, None, ('sub', 4), None),
638 '-': (4, None, None, ('sub', 4), None),
615 ')': (0, None, None, None, None),
639 ')': (0, None, None, None, None),
616 'symbol': (0, 'symbol', None, None, None),
640 'symbol': (0, 'symbol', None, None, None),
617 'end': (0, None, None, None, None),
641 'end': (0, None, None, None, None),
618 }
642 }
619
643
620 def _tokenize(text):
644 def _tokenize(text):
621 view = memoryview(text) # zero-copy slice
645 view = memoryview(text) # zero-copy slice
622 special = '():+-& '
646 special = '():+-& '
623 pos = 0
647 pos = 0
624 length = len(text)
648 length = len(text)
625 while pos < length:
649 while pos < length:
626 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
650 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
627 view[pos:]))
651 view[pos:]))
628 if symbol:
652 if symbol:
629 yield ('symbol', symbol, pos)
653 yield ('symbol', symbol, pos)
630 pos += len(symbol)
654 pos += len(symbol)
631 else: # special char, ignore space
655 else: # special char, ignore space
632 if text[pos] != ' ':
656 if text[pos] != ' ':
633 yield (text[pos], None, pos)
657 yield (text[pos], None, pos)
634 pos += 1
658 pos += 1
635 yield ('end', None, pos)
659 yield ('end', None, pos)
636
660
637 def _parse(text):
661 def _parse(text):
638 tree, pos = parser.parser(_elements).parse(_tokenize(text))
662 tree, pos = parser.parser(_elements).parse(_tokenize(text))
639 if pos != len(text):
663 if pos != len(text):
640 raise error.ParseError('invalid token', pos)
664 raise error.ParseError('invalid token', pos)
641 return tree
665 return tree
642
666
643 def _parsedrev(symbol):
667 def _parsedrev(symbol):
644 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
668 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
645 if symbol.startswith('D') and symbol[1:].isdigit():
669 if symbol.startswith('D') and symbol[1:].isdigit():
646 return int(symbol[1:])
670 return int(symbol[1:])
647 if symbol.isdigit():
671 if symbol.isdigit():
648 return int(symbol)
672 return int(symbol)
649
673
650 def _prefetchdrevs(tree):
674 def _prefetchdrevs(tree):
651 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
675 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
652 drevs = set()
676 drevs = set()
653 ancestordrevs = set()
677 ancestordrevs = set()
654 op = tree[0]
678 op = tree[0]
655 if op == 'symbol':
679 if op == 'symbol':
656 r = _parsedrev(tree[1])
680 r = _parsedrev(tree[1])
657 if r:
681 if r:
658 drevs.add(r)
682 drevs.add(r)
659 elif op == 'ancestors':
683 elif op == 'ancestors':
660 r, a = _prefetchdrevs(tree[1])
684 r, a = _prefetchdrevs(tree[1])
661 drevs.update(r)
685 drevs.update(r)
662 ancestordrevs.update(r)
686 ancestordrevs.update(r)
663 ancestordrevs.update(a)
687 ancestordrevs.update(a)
664 else:
688 else:
665 for t in tree[1:]:
689 for t in tree[1:]:
666 r, a = _prefetchdrevs(t)
690 r, a = _prefetchdrevs(t)
667 drevs.update(r)
691 drevs.update(r)
668 ancestordrevs.update(a)
692 ancestordrevs.update(a)
669 return drevs, ancestordrevs
693 return drevs, ancestordrevs
670
694
671 def querydrev(repo, spec):
695 def querydrev(repo, spec):
672 """return a list of "Differential Revision" dicts
696 """return a list of "Differential Revision" dicts
673
697
674 spec is a string using a simple query language, see docstring in phabread
698 spec is a string using a simple query language, see docstring in phabread
675 for details.
699 for details.
676
700
677 A "Differential Revision dict" looks like:
701 A "Differential Revision dict" looks like:
678
702
679 {
703 {
680 "id": "2",
704 "id": "2",
681 "phid": "PHID-DREV-672qvysjcczopag46qty",
705 "phid": "PHID-DREV-672qvysjcczopag46qty",
682 "title": "example",
706 "title": "example",
683 "uri": "https://phab.example.com/D2",
707 "uri": "https://phab.example.com/D2",
684 "dateCreated": "1499181406",
708 "dateCreated": "1499181406",
685 "dateModified": "1499182103",
709 "dateModified": "1499182103",
686 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
710 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
687 "status": "0",
711 "status": "0",
688 "statusName": "Needs Review",
712 "statusName": "Needs Review",
689 "properties": [],
713 "properties": [],
690 "branch": null,
714 "branch": null,
691 "summary": "",
715 "summary": "",
692 "testPlan": "",
716 "testPlan": "",
693 "lineCount": "2",
717 "lineCount": "2",
694 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
718 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
695 "diffs": [
719 "diffs": [
696 "3",
720 "3",
697 "4",
721 "4",
698 ],
722 ],
699 "commits": [],
723 "commits": [],
700 "reviewers": [],
724 "reviewers": [],
701 "ccs": [],
725 "ccs": [],
702 "hashes": [],
726 "hashes": [],
703 "auxiliary": {
727 "auxiliary": {
704 "phabricator:projects": [],
728 "phabricator:projects": [],
705 "phabricator:depends-on": [
729 "phabricator:depends-on": [
706 "PHID-DREV-gbapp366kutjebt7agcd"
730 "PHID-DREV-gbapp366kutjebt7agcd"
707 ]
731 ]
708 },
732 },
709 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
733 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
710 "sourcePath": null
734 "sourcePath": null
711 }
735 }
712 """
736 """
713 def fetch(params):
737 def fetch(params):
714 """params -> single drev or None"""
738 """params -> single drev or None"""
715 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
739 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
716 if key in prefetched:
740 if key in prefetched:
717 return prefetched[key]
741 return prefetched[key]
718 drevs = callconduit(repo, 'differential.query', params)
742 drevs = callconduit(repo, 'differential.query', params)
719 # Fill prefetched with the result
743 # Fill prefetched with the result
720 for drev in drevs:
744 for drev in drevs:
721 prefetched[drev[r'phid']] = drev
745 prefetched[drev[r'phid']] = drev
722 prefetched[int(drev[r'id'])] = drev
746 prefetched[int(drev[r'id'])] = drev
723 if key not in prefetched:
747 if key not in prefetched:
724 raise error.Abort(_('cannot get Differential Revision %r') % params)
748 raise error.Abort(_('cannot get Differential Revision %r') % params)
725 return prefetched[key]
749 return prefetched[key]
726
750
727 def getstack(topdrevids):
751 def getstack(topdrevids):
728 """given a top, get a stack from the bottom, [id] -> [id]"""
752 """given a top, get a stack from the bottom, [id] -> [id]"""
729 visited = set()
753 visited = set()
730 result = []
754 result = []
731 queue = [{r'ids': [i]} for i in topdrevids]
755 queue = [{r'ids': [i]} for i in topdrevids]
732 while queue:
756 while queue:
733 params = queue.pop()
757 params = queue.pop()
734 drev = fetch(params)
758 drev = fetch(params)
735 if drev[r'id'] in visited:
759 if drev[r'id'] in visited:
736 continue
760 continue
737 visited.add(drev[r'id'])
761 visited.add(drev[r'id'])
738 result.append(int(drev[r'id']))
762 result.append(int(drev[r'id']))
739 auxiliary = drev.get(r'auxiliary', {})
763 auxiliary = drev.get(r'auxiliary', {})
740 depends = auxiliary.get(r'phabricator:depends-on', [])
764 depends = auxiliary.get(r'phabricator:depends-on', [])
741 for phid in depends:
765 for phid in depends:
742 queue.append({'phids': [phid]})
766 queue.append({'phids': [phid]})
743 result.reverse()
767 result.reverse()
744 return smartset.baseset(result)
768 return smartset.baseset(result)
745
769
746 # Initialize prefetch cache
770 # Initialize prefetch cache
747 prefetched = {} # {id or phid: drev}
771 prefetched = {} # {id or phid: drev}
748
772
749 tree = _parse(spec)
773 tree = _parse(spec)
750 drevs, ancestordrevs = _prefetchdrevs(tree)
774 drevs, ancestordrevs = _prefetchdrevs(tree)
751
775
752 # developer config: phabricator.batchsize
776 # developer config: phabricator.batchsize
753 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
777 batchsize = repo.ui.configint('phabricator', 'batchsize')
754
778
755 # Prefetch Differential Revisions in batch
779 # Prefetch Differential Revisions in batch
756 tofetch = set(drevs)
780 tofetch = set(drevs)
757 for r in ancestordrevs:
781 for r in ancestordrevs:
758 tofetch.update(range(max(1, r - batchsize), r + 1))
782 tofetch.update(range(max(1, r - batchsize), r + 1))
759 if drevs:
783 if drevs:
760 fetch({r'ids': list(tofetch)})
784 fetch({r'ids': list(tofetch)})
761 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
785 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
762
786
763 # Walk through the tree, return smartsets
787 # Walk through the tree, return smartsets
764 def walk(tree):
788 def walk(tree):
765 op = tree[0]
789 op = tree[0]
766 if op == 'symbol':
790 if op == 'symbol':
767 drev = _parsedrev(tree[1])
791 drev = _parsedrev(tree[1])
768 if drev:
792 if drev:
769 return smartset.baseset([drev])
793 return smartset.baseset([drev])
770 elif tree[1] in _knownstatusnames:
794 elif tree[1] in _knownstatusnames:
771 drevs = [r for r in validids
795 drevs = [r for r in validids
772 if _getstatusname(prefetched[r]) == tree[1]]
796 if _getstatusname(prefetched[r]) == tree[1]]
773 return smartset.baseset(drevs)
797 return smartset.baseset(drevs)
774 else:
798 else:
775 raise error.Abort(_('unknown symbol: %s') % tree[1])
799 raise error.Abort(_('unknown symbol: %s') % tree[1])
776 elif op in {'and_', 'add', 'sub'}:
800 elif op in {'and_', 'add', 'sub'}:
777 assert len(tree) == 3
801 assert len(tree) == 3
778 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
802 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
779 elif op == 'group':
803 elif op == 'group':
780 return walk(tree[1])
804 return walk(tree[1])
781 elif op == 'ancestors':
805 elif op == 'ancestors':
782 return getstack(walk(tree[1]))
806 return getstack(walk(tree[1]))
783 else:
807 else:
784 raise error.ProgrammingError('illegal tree: %r' % tree)
808 raise error.ProgrammingError('illegal tree: %r' % tree)
785
809
786 return [prefetched[r] for r in walk(tree)]
810 return [prefetched[r] for r in walk(tree)]
787
811
788 def getdescfromdrev(drev):
812 def getdescfromdrev(drev):
789 """get description (commit message) from "Differential Revision"
813 """get description (commit message) from "Differential Revision"
790
814
791 This is similar to differential.getcommitmessage API. But we only care
815 This is similar to differential.getcommitmessage API. But we only care
792 about limited fields: title, summary, test plan, and URL.
816 about limited fields: title, summary, test plan, and URL.
793 """
817 """
794 title = drev[r'title']
818 title = drev[r'title']
795 summary = drev[r'summary'].rstrip()
819 summary = drev[r'summary'].rstrip()
796 testplan = drev[r'testPlan'].rstrip()
820 testplan = drev[r'testPlan'].rstrip()
797 if testplan:
821 if testplan:
798 testplan = 'Test Plan:\n%s' % testplan
822 testplan = 'Test Plan:\n%s' % testplan
799 uri = 'Differential Revision: %s' % drev[r'uri']
823 uri = 'Differential Revision: %s' % drev[r'uri']
800 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
824 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
801
825
802 def getdiffmeta(diff):
826 def getdiffmeta(diff):
803 """get commit metadata (date, node, user, p1) from a diff object
827 """get commit metadata (date, node, user, p1) from a diff object
804
828
805 The metadata could be "hg:meta", sent by phabsend, like:
829 The metadata could be "hg:meta", sent by phabsend, like:
806
830
807 "properties": {
831 "properties": {
808 "hg:meta": {
832 "hg:meta": {
809 "date": "1499571514 25200",
833 "date": "1499571514 25200",
810 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
834 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
811 "user": "Foo Bar <foo@example.com>",
835 "user": "Foo Bar <foo@example.com>",
812 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
836 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
813 }
837 }
814 }
838 }
815
839
816 Or converted from "local:commits", sent by "arc", like:
840 Or converted from "local:commits", sent by "arc", like:
817
841
818 "properties": {
842 "properties": {
819 "local:commits": {
843 "local:commits": {
820 "98c08acae292b2faf60a279b4189beb6cff1414d": {
844 "98c08acae292b2faf60a279b4189beb6cff1414d": {
821 "author": "Foo Bar",
845 "author": "Foo Bar",
822 "time": 1499546314,
846 "time": 1499546314,
823 "branch": "default",
847 "branch": "default",
824 "tag": "",
848 "tag": "",
825 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
849 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
826 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
850 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
827 "local": "1000",
851 "local": "1000",
828 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
852 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
829 "summary": "...",
853 "summary": "...",
830 "message": "...",
854 "message": "...",
831 "authorEmail": "foo@example.com"
855 "authorEmail": "foo@example.com"
832 }
856 }
833 }
857 }
834 }
858 }
835
859
836 Note: metadata extracted from "local:commits" will lose time zone
860 Note: metadata extracted from "local:commits" will lose time zone
837 information.
861 information.
838 """
862 """
839 props = diff.get(r'properties') or {}
863 props = diff.get(r'properties') or {}
840 meta = props.get(r'hg:meta')
864 meta = props.get(r'hg:meta')
841 if not meta and props.get(r'local:commits'):
865 if not meta and props.get(r'local:commits'):
842 commit = sorted(props[r'local:commits'].values())[0]
866 commit = sorted(props[r'local:commits'].values())[0]
843 meta = {
867 meta = {
844 r'date': r'%d 0' % commit[r'time'],
868 r'date': r'%d 0' % commit[r'time'],
845 r'node': commit[r'rev'],
869 r'node': commit[r'rev'],
846 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
870 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
847 }
871 }
848 if len(commit.get(r'parents', ())) >= 1:
872 if len(commit.get(r'parents', ())) >= 1:
849 meta[r'parent'] = commit[r'parents'][0]
873 meta[r'parent'] = commit[r'parents'][0]
850 return meta or {}
874 return meta or {}
851
875
852 def readpatch(repo, drevs, write):
876 def readpatch(repo, drevs, write):
853 """generate plain-text patch readable by 'hg import'
877 """generate plain-text patch readable by 'hg import'
854
878
855 write is usually ui.write. drevs is what "querydrev" returns, results of
879 write is usually ui.write. drevs is what "querydrev" returns, results of
856 "differential.query".
880 "differential.query".
857 """
881 """
858 # Prefetch hg:meta property for all diffs
882 # Prefetch hg:meta property for all diffs
859 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
883 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
860 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
884 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
861
885
862 # Generate patch for each drev
886 # Generate patch for each drev
863 for drev in drevs:
887 for drev in drevs:
864 repo.ui.note(_('reading D%s\n') % drev[r'id'])
888 repo.ui.note(_('reading D%s\n') % drev[r'id'])
865
889
866 diffid = max(int(v) for v in drev[r'diffs'])
890 diffid = max(int(v) for v in drev[r'diffs'])
867 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
891 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
868 desc = getdescfromdrev(drev)
892 desc = getdescfromdrev(drev)
869 header = '# HG changeset patch\n'
893 header = '# HG changeset patch\n'
870
894
871 # Try to preserve metadata from hg:meta property. Write hg patch
895 # Try to preserve metadata from hg:meta property. Write hg patch
872 # headers that can be read by the "import" command. See patchheadermap
896 # headers that can be read by the "import" command. See patchheadermap
873 # and extract in mercurial/patch.py for supported headers.
897 # and extract in mercurial/patch.py for supported headers.
874 meta = getdiffmeta(diffs[str(diffid)])
898 meta = getdiffmeta(diffs[str(diffid)])
875 for k in _metanamemap.keys():
899 for k in _metanamemap.keys():
876 if k in meta:
900 if k in meta:
877 header += '# %s %s\n' % (_metanamemap[k], meta[k])
901 header += '# %s %s\n' % (_metanamemap[k], meta[k])
878
902
879 content = '%s%s\n%s' % (header, desc, body)
903 content = '%s%s\n%s' % (header, desc, body)
880 write(encoding.unitolocal(content))
904 write(encoding.unitolocal(content))
881
905
882 @command('phabread',
906 @command('phabread',
883 [('', 'stack', False, _('read dependencies'))],
907 [('', 'stack', False, _('read dependencies'))],
884 _('DREVSPEC [OPTIONS]'))
908 _('DREVSPEC [OPTIONS]'))
885 def phabread(ui, repo, spec, **opts):
909 def phabread(ui, repo, spec, **opts):
886 """print patches from Phabricator suitable for importing
910 """print patches from Phabricator suitable for importing
887
911
888 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
912 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
889 the number ``123``. It could also have common operators like ``+``, ``-``,
913 the number ``123``. It could also have common operators like ``+``, ``-``,
890 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
914 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
891 select a stack.
915 select a stack.
892
916
893 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
917 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
894 could be used to filter patches by status. For performance reason, they
918 could be used to filter patches by status. For performance reason, they
895 only represent a subset of non-status selections and cannot be used alone.
919 only represent a subset of non-status selections and cannot be used alone.
896
920
897 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
921 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
898 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
922 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
899 stack up to D9.
923 stack up to D9.
900
924
901 If --stack is given, follow dependencies information and read all patches.
925 If --stack is given, follow dependencies information and read all patches.
902 It is equivalent to the ``:`` operator.
926 It is equivalent to the ``:`` operator.
903 """
927 """
904 if opts.get('stack'):
928 if opts.get('stack'):
905 spec = ':(%s)' % spec
929 spec = ':(%s)' % spec
906 drevs = querydrev(repo, spec)
930 drevs = querydrev(repo, spec)
907 readpatch(repo, drevs, ui.write)
931 readpatch(repo, drevs, ui.write)
908
932
909 @command('phabupdate',
933 @command('phabupdate',
910 [('', 'accept', False, _('accept revisions')),
934 [('', 'accept', False, _('accept revisions')),
911 ('', 'reject', False, _('reject revisions')),
935 ('', 'reject', False, _('reject revisions')),
912 ('', 'abandon', False, _('abandon revisions')),
936 ('', 'abandon', False, _('abandon revisions')),
913 ('', 'reclaim', False, _('reclaim revisions')),
937 ('', 'reclaim', False, _('reclaim revisions')),
914 ('m', 'comment', '', _('comment on the last revision')),
938 ('m', 'comment', '', _('comment on the last revision')),
915 ], _('DREVSPEC [OPTIONS]'))
939 ], _('DREVSPEC [OPTIONS]'))
916 def phabupdate(ui, repo, spec, **opts):
940 def phabupdate(ui, repo, spec, **opts):
917 """update Differential Revision in batch
941 """update Differential Revision in batch
918
942
919 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
943 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
920 """
944 """
921 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
945 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
922 if len(flags) > 1:
946 if len(flags) > 1:
923 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
947 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
924
948
925 actions = []
949 actions = []
926 for f in flags:
950 for f in flags:
927 actions.append({'type': f, 'value': 'true'})
951 actions.append({'type': f, 'value': 'true'})
928
952
929 drevs = querydrev(repo, spec)
953 drevs = querydrev(repo, spec)
930 for i, drev in enumerate(drevs):
954 for i, drev in enumerate(drevs):
931 if i + 1 == len(drevs) and opts.get('comment'):
955 if i + 1 == len(drevs) and opts.get('comment'):
932 actions.append({'type': 'comment', 'value': opts['comment']})
956 actions.append({'type': 'comment', 'value': opts['comment']})
933 if actions:
957 if actions:
934 params = {'objectIdentifier': drev[r'phid'],
958 params = {'objectIdentifier': drev[r'phid'],
935 'transactions': actions}
959 'transactions': actions}
936 callconduit(repo, 'differential.revision.edit', params)
960 callconduit(repo, 'differential.revision.edit', params)
937
961
938 templatekeyword = registrar.templatekeyword()
962 templatekeyword = registrar.templatekeyword()
939
963
940 @templatekeyword('phabreview', requires={'ctx'})
964 @templatekeyword('phabreview', requires={'ctx'})
941 def template_review(context, mapping):
965 def template_review(context, mapping):
942 """:phabreview: Object describing the review for this changeset.
966 """:phabreview: Object describing the review for this changeset.
943 Has attributes `url` and `id`.
967 Has attributes `url` and `id`.
944 """
968 """
945 ctx = context.resource(mapping, 'ctx')
969 ctx = context.resource(mapping, 'ctx')
946 m = _differentialrevisiondescre.search(ctx.description())
970 m = _differentialrevisiondescre.search(ctx.description())
947 if m:
971 if m:
948 return {
972 return {
949 'url': m.group('url'),
973 'url': m.group('url'),
950 'id': "D{}".format(m.group('id')),
974 'id': "D{}".format(m.group('id')),
951 }
975 }
General Comments 0
You need to be logged in to leave comments. Login now