##// END OF EJS Templates
phabricator: print deprecation warning only once...
Joerg Sonnenberger -
r36835:98cbfbbe default
parent child Browse files
Show More
@@ -1,919 +1,923 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration
7 """simple Phabricator integration
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # API token. Get it from https://$HOST/conduit/login/
24 # API token. Get it from https://$HOST/conduit/login/
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
26
26
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # callsign is "FOO".
28 # callsign is "FOO".
29 callsign = FOO
29 callsign = FOO
30
30
31 # curl command to use. If not set (default), use builtin HTTP library to
31 # curl command to use. If not set (default), use builtin HTTP library to
32 # communicate. If set, use the specified curl command. This could be useful
32 # communicate. If set, use the specified curl command. This could be useful
33 # if you need to specify advanced options that is not easily supported by
33 # if you need to specify advanced options that is not easily supported by
34 # the internal library.
34 # the internal library.
35 curlcmd = curl --connect-timeout 2 --retry 3 --silent
35 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36
36
37 [phabricator.auth]
37 [phabricator.auth]
38 example.url = https://phab.example.com/
38 example.url = https://phab.example.com/
39 # API token. Get it from https://$HOST/conduit/login/
39 # API token. Get it from https://$HOST/conduit/login/
40 example.token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 example.token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 mdiff,
57 mdiff,
58 obsutil,
58 obsutil,
59 parser,
59 parser,
60 patch,
60 patch,
61 registrar,
61 registrar,
62 scmutil,
62 scmutil,
63 smartset,
63 smartset,
64 tags,
64 tags,
65 url as urlmod,
65 url as urlmod,
66 util,
66 util,
67 )
67 )
68
68
69 cmdtable = {}
69 cmdtable = {}
70 command = registrar.command(cmdtable)
70 command = registrar.command(cmdtable)
71
71
72 colortable = {
72 colortable = {
73 'phabricator.action.created': 'green',
73 'phabricator.action.created': 'green',
74 'phabricator.action.skipped': 'magenta',
74 'phabricator.action.skipped': 'magenta',
75 'phabricator.action.updated': 'magenta',
75 'phabricator.action.updated': 'magenta',
76 'phabricator.desc': '',
76 'phabricator.desc': '',
77 'phabricator.drev': 'bold',
77 'phabricator.drev': 'bold',
78 'phabricator.node': '',
78 'phabricator.node': '',
79 }
79 }
80
80
81 def urlencodenested(params):
81 def urlencodenested(params):
82 """like urlencode, but works with nested parameters.
82 """like urlencode, but works with nested parameters.
83
83
84 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
84 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
85 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
85 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
86 urlencode. Note: the encoding is consistent with PHP's http_build_query.
86 urlencode. Note: the encoding is consistent with PHP's http_build_query.
87 """
87 """
88 flatparams = util.sortdict()
88 flatparams = util.sortdict()
89 def process(prefix, obj):
89 def process(prefix, obj):
90 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
90 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
91 if items is None:
91 if items is None:
92 flatparams[prefix] = obj
92 flatparams[prefix] = obj
93 else:
93 else:
94 for k, v in items(obj):
94 for k, v in items(obj):
95 if prefix:
95 if prefix:
96 process('%s[%s]' % (prefix, k), v)
96 process('%s[%s]' % (prefix, k), v)
97 else:
97 else:
98 process(k, v)
98 process(k, v)
99 process('', params)
99 process('', params)
100 return util.urlreq.urlencode(flatparams)
100 return util.urlreq.urlencode(flatparams)
101
101
102 printed_token_warning = False
103
102 def readlegacytoken(repo):
104 def readlegacytoken(repo):
103 """Transitional support for old phabricator tokens.
105 """Transitional support for old phabricator tokens.
104
106
105 Remove before the 4.6 release.
107 Remove before the 4.6 release.
106 """
108 """
109 global printed_token_warning
107 token = repo.ui.config('phabricator', 'token')
110 token = repo.ui.config('phabricator', 'token')
108 if token:
111 if token and not printed_token_warning:
112 printed_token_warning = True
109 repo.ui.warn(_('phabricator.token is deprecated - please '
113 repo.ui.warn(_('phabricator.token is deprecated - please '
110 'migrate to the phabricator.auth section.\n'))
114 'migrate to the phabricator.auth section.\n'))
111 return token
115 return token
112
116
113 def readurltoken(repo):
117 def readurltoken(repo):
114 """return conduit url, token and make sure they exist
118 """return conduit url, token and make sure they exist
115
119
116 Currently read from [phabricator] config section. In the future, it might
120 Currently read from [phabricator] config section. In the future, it might
117 make sense to read from .arcconfig and .arcrc as well.
121 make sense to read from .arcconfig and .arcrc as well.
118 """
122 """
119 url = repo.ui.config('phabricator', 'url')
123 url = repo.ui.config('phabricator', 'url')
120 if not url:
124 if not url:
121 raise error.Abort(_('config %s.%s is required')
125 raise error.Abort(_('config %s.%s is required')
122 % ('phabricator', 'url'))
126 % ('phabricator', 'url'))
123
127
124 groups = {}
128 groups = {}
125 for key, val in repo.ui.configitems('phabricator.auth'):
129 for key, val in repo.ui.configitems('phabricator.auth'):
126 if '.' not in key:
130 if '.' not in key:
127 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
131 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
128 % key)
132 % key)
129 continue
133 continue
130 group, setting = key.rsplit('.', 1)
134 group, setting = key.rsplit('.', 1)
131 groups.setdefault(group, {})[setting] = val
135 groups.setdefault(group, {})[setting] = val
132
136
133 token = None
137 token = None
134 for group, auth in groups.iteritems():
138 for group, auth in groups.iteritems():
135 if url != auth.get('url'):
139 if url != auth.get('url'):
136 continue
140 continue
137 token = auth.get('token')
141 token = auth.get('token')
138 if token:
142 if token:
139 break
143 break
140
144
141 if not token:
145 if not token:
142 token = readlegacytoken(repo)
146 token = readlegacytoken(repo)
143 if not token:
147 if not token:
144 raise error.Abort(_('Can\'t find conduit token associated to %s')
148 raise error.Abort(_('Can\'t find conduit token associated to %s')
145 % (url,))
149 % (url,))
146
150
147 return url, token
151 return url, token
148
152
149 def callconduit(repo, name, params):
153 def callconduit(repo, name, params):
150 """call Conduit API, params is a dict. return json.loads result, or None"""
154 """call Conduit API, params is a dict. return json.loads result, or None"""
151 host, token = readurltoken(repo)
155 host, token = readurltoken(repo)
152 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
156 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
153 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
157 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
154 params = params.copy()
158 params = params.copy()
155 params['api.token'] = token
159 params['api.token'] = token
156 data = urlencodenested(params)
160 data = urlencodenested(params)
157 curlcmd = repo.ui.config('phabricator', 'curlcmd')
161 curlcmd = repo.ui.config('phabricator', 'curlcmd')
158 if curlcmd:
162 if curlcmd:
159 sin, sout = util.popen2('%s -d @- %s' % (curlcmd, util.shellquote(url)))
163 sin, sout = util.popen2('%s -d @- %s' % (curlcmd, util.shellquote(url)))
160 sin.write(data)
164 sin.write(data)
161 sin.close()
165 sin.close()
162 body = sout.read()
166 body = sout.read()
163 else:
167 else:
164 urlopener = urlmod.opener(repo.ui, authinfo)
168 urlopener = urlmod.opener(repo.ui, authinfo)
165 request = util.urlreq.request(url, data=data)
169 request = util.urlreq.request(url, data=data)
166 body = urlopener.open(request).read()
170 body = urlopener.open(request).read()
167 repo.ui.debug('Conduit Response: %s\n' % body)
171 repo.ui.debug('Conduit Response: %s\n' % body)
168 parsed = json.loads(body)
172 parsed = json.loads(body)
169 if parsed.get(r'error_code'):
173 if parsed.get(r'error_code'):
170 msg = (_('Conduit Error (%s): %s')
174 msg = (_('Conduit Error (%s): %s')
171 % (parsed[r'error_code'], parsed[r'error_info']))
175 % (parsed[r'error_code'], parsed[r'error_info']))
172 raise error.Abort(msg)
176 raise error.Abort(msg)
173 return parsed[r'result']
177 return parsed[r'result']
174
178
175 @command('debugcallconduit', [], _('METHOD'))
179 @command('debugcallconduit', [], _('METHOD'))
176 def debugcallconduit(ui, repo, name):
180 def debugcallconduit(ui, repo, name):
177 """call Conduit API
181 """call Conduit API
178
182
179 Call parameters are read from stdin as a JSON blob. Result will be written
183 Call parameters are read from stdin as a JSON blob. Result will be written
180 to stdout as a JSON blob.
184 to stdout as a JSON blob.
181 """
185 """
182 params = json.loads(ui.fin.read())
186 params = json.loads(ui.fin.read())
183 result = callconduit(repo, name, params)
187 result = callconduit(repo, name, params)
184 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
188 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
185 ui.write('%s\n' % s)
189 ui.write('%s\n' % s)
186
190
187 def getrepophid(repo):
191 def getrepophid(repo):
188 """given callsign, return repository PHID or None"""
192 """given callsign, return repository PHID or None"""
189 # developer config: phabricator.repophid
193 # developer config: phabricator.repophid
190 repophid = repo.ui.config('phabricator', 'repophid')
194 repophid = repo.ui.config('phabricator', 'repophid')
191 if repophid:
195 if repophid:
192 return repophid
196 return repophid
193 callsign = repo.ui.config('phabricator', 'callsign')
197 callsign = repo.ui.config('phabricator', 'callsign')
194 if not callsign:
198 if not callsign:
195 return None
199 return None
196 query = callconduit(repo, 'diffusion.repository.search',
200 query = callconduit(repo, 'diffusion.repository.search',
197 {'constraints': {'callsigns': [callsign]}})
201 {'constraints': {'callsigns': [callsign]}})
198 if len(query[r'data']) == 0:
202 if len(query[r'data']) == 0:
199 return None
203 return None
200 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
204 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
201 repo.ui.setconfig('phabricator', 'repophid', repophid)
205 repo.ui.setconfig('phabricator', 'repophid', repophid)
202 return repophid
206 return repophid
203
207
204 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
208 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
205 _differentialrevisiondescre = re.compile(
209 _differentialrevisiondescre = re.compile(
206 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
210 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
207
211
208 def getoldnodedrevmap(repo, nodelist):
212 def getoldnodedrevmap(repo, nodelist):
209 """find previous nodes that has been sent to Phabricator
213 """find previous nodes that has been sent to Phabricator
210
214
211 return {node: (oldnode, Differential diff, Differential Revision ID)}
215 return {node: (oldnode, Differential diff, Differential Revision ID)}
212 for node in nodelist with known previous sent versions, or associated
216 for node in nodelist with known previous sent versions, or associated
213 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
217 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
214 be ``None``.
218 be ``None``.
215
219
216 Examines commit messages like "Differential Revision:" to get the
220 Examines commit messages like "Differential Revision:" to get the
217 association information.
221 association information.
218
222
219 If such commit message line is not found, examines all precursors and their
223 If such commit message line is not found, examines all precursors and their
220 tags. Tags with format like "D1234" are considered a match and the node
224 tags. Tags with format like "D1234" are considered a match and the node
221 with that tag, and the number after "D" (ex. 1234) will be returned.
225 with that tag, and the number after "D" (ex. 1234) will be returned.
222
226
223 The ``old node``, if not None, is guaranteed to be the last diff of
227 The ``old node``, if not None, is guaranteed to be the last diff of
224 corresponding Differential Revision, and exist in the repo.
228 corresponding Differential Revision, and exist in the repo.
225 """
229 """
226 url, token = readurltoken(repo)
230 url, token = readurltoken(repo)
227 unfi = repo.unfiltered()
231 unfi = repo.unfiltered()
228 nodemap = unfi.changelog.nodemap
232 nodemap = unfi.changelog.nodemap
229
233
230 result = {} # {node: (oldnode?, lastdiff?, drev)}
234 result = {} # {node: (oldnode?, lastdiff?, drev)}
231 toconfirm = {} # {node: (force, {precnode}, drev)}
235 toconfirm = {} # {node: (force, {precnode}, drev)}
232 for node in nodelist:
236 for node in nodelist:
233 ctx = unfi[node]
237 ctx = unfi[node]
234 # For tags like "D123", put them into "toconfirm" to verify later
238 # For tags like "D123", put them into "toconfirm" to verify later
235 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
239 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
236 for n in precnodes:
240 for n in precnodes:
237 if n in nodemap:
241 if n in nodemap:
238 for tag in unfi.nodetags(n):
242 for tag in unfi.nodetags(n):
239 m = _differentialrevisiontagre.match(tag)
243 m = _differentialrevisiontagre.match(tag)
240 if m:
244 if m:
241 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
245 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
242 continue
246 continue
243
247
244 # Check commit message
248 # Check commit message
245 m = _differentialrevisiondescre.search(ctx.description())
249 m = _differentialrevisiondescre.search(ctx.description())
246 if m:
250 if m:
247 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
251 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
248
252
249 # Double check if tags are genuine by collecting all old nodes from
253 # Double check if tags are genuine by collecting all old nodes from
250 # Phabricator, and expect precursors overlap with it.
254 # Phabricator, and expect precursors overlap with it.
251 if toconfirm:
255 if toconfirm:
252 drevs = [drev for force, precs, drev in toconfirm.values()]
256 drevs = [drev for force, precs, drev in toconfirm.values()]
253 alldiffs = callconduit(unfi, 'differential.querydiffs',
257 alldiffs = callconduit(unfi, 'differential.querydiffs',
254 {'revisionIDs': drevs})
258 {'revisionIDs': drevs})
255 getnode = lambda d: bin(encoding.unitolocal(
259 getnode = lambda d: bin(encoding.unitolocal(
256 getdiffmeta(d).get(r'node', ''))) or None
260 getdiffmeta(d).get(r'node', ''))) or None
257 for newnode, (force, precset, drev) in toconfirm.items():
261 for newnode, (force, precset, drev) in toconfirm.items():
258 diffs = [d for d in alldiffs.values()
262 diffs = [d for d in alldiffs.values()
259 if int(d[r'revisionID']) == drev]
263 if int(d[r'revisionID']) == drev]
260
264
261 # "precursors" as known by Phabricator
265 # "precursors" as known by Phabricator
262 phprecset = set(getnode(d) for d in diffs)
266 phprecset = set(getnode(d) for d in diffs)
263
267
264 # Ignore if precursors (Phabricator and local repo) do not overlap,
268 # Ignore if precursors (Phabricator and local repo) do not overlap,
265 # and force is not set (when commit message says nothing)
269 # and force is not set (when commit message says nothing)
266 if not force and not bool(phprecset & precset):
270 if not force and not bool(phprecset & precset):
267 tagname = 'D%d' % drev
271 tagname = 'D%d' % drev
268 tags.tag(repo, tagname, nullid, message=None, user=None,
272 tags.tag(repo, tagname, nullid, message=None, user=None,
269 date=None, local=True)
273 date=None, local=True)
270 unfi.ui.warn(_('D%s: local tag removed - does not match '
274 unfi.ui.warn(_('D%s: local tag removed - does not match '
271 'Differential history\n') % drev)
275 'Differential history\n') % drev)
272 continue
276 continue
273
277
274 # Find the last node using Phabricator metadata, and make sure it
278 # Find the last node using Phabricator metadata, and make sure it
275 # exists in the repo
279 # exists in the repo
276 oldnode = lastdiff = None
280 oldnode = lastdiff = None
277 if diffs:
281 if diffs:
278 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
282 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
279 oldnode = getnode(lastdiff)
283 oldnode = getnode(lastdiff)
280 if oldnode and oldnode not in nodemap:
284 if oldnode and oldnode not in nodemap:
281 oldnode = None
285 oldnode = None
282
286
283 result[newnode] = (oldnode, lastdiff, drev)
287 result[newnode] = (oldnode, lastdiff, drev)
284
288
285 return result
289 return result
286
290
287 def getdiff(ctx, diffopts):
291 def getdiff(ctx, diffopts):
288 """plain-text diff without header (user, commit message, etc)"""
292 """plain-text diff without header (user, commit message, etc)"""
289 output = util.stringio()
293 output = util.stringio()
290 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
294 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
291 None, opts=diffopts):
295 None, opts=diffopts):
292 output.write(chunk)
296 output.write(chunk)
293 return output.getvalue()
297 return output.getvalue()
294
298
295 def creatediff(ctx):
299 def creatediff(ctx):
296 """create a Differential Diff"""
300 """create a Differential Diff"""
297 repo = ctx.repo()
301 repo = ctx.repo()
298 repophid = getrepophid(repo)
302 repophid = getrepophid(repo)
299 # Create a "Differential Diff" via "differential.createrawdiff" API
303 # Create a "Differential Diff" via "differential.createrawdiff" API
300 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
304 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
301 if repophid:
305 if repophid:
302 params['repositoryPHID'] = repophid
306 params['repositoryPHID'] = repophid
303 diff = callconduit(repo, 'differential.createrawdiff', params)
307 diff = callconduit(repo, 'differential.createrawdiff', params)
304 if not diff:
308 if not diff:
305 raise error.Abort(_('cannot create diff for %s') % ctx)
309 raise error.Abort(_('cannot create diff for %s') % ctx)
306 return diff
310 return diff
307
311
308 def writediffproperties(ctx, diff):
312 def writediffproperties(ctx, diff):
309 """write metadata to diff so patches could be applied losslessly"""
313 """write metadata to diff so patches could be applied losslessly"""
310 params = {
314 params = {
311 'diff_id': diff[r'id'],
315 'diff_id': diff[r'id'],
312 'name': 'hg:meta',
316 'name': 'hg:meta',
313 'data': json.dumps({
317 'data': json.dumps({
314 'user': ctx.user(),
318 'user': ctx.user(),
315 'date': '%d %d' % ctx.date(),
319 'date': '%d %d' % ctx.date(),
316 'node': ctx.hex(),
320 'node': ctx.hex(),
317 'parent': ctx.p1().hex(),
321 'parent': ctx.p1().hex(),
318 }),
322 }),
319 }
323 }
320 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
324 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
321
325
322 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
326 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
323 olddiff=None, actions=None):
327 olddiff=None, actions=None):
324 """create or update a Differential Revision
328 """create or update a Differential Revision
325
329
326 If revid is None, create a new Differential Revision, otherwise update
330 If revid is None, create a new Differential Revision, otherwise update
327 revid. If parentrevid is not None, set it as a dependency.
331 revid. If parentrevid is not None, set it as a dependency.
328
332
329 If oldnode is not None, check if the patch content (without commit message
333 If oldnode is not None, check if the patch content (without commit message
330 and metadata) has changed before creating another diff.
334 and metadata) has changed before creating another diff.
331
335
332 If actions is not None, they will be appended to the transaction.
336 If actions is not None, they will be appended to the transaction.
333 """
337 """
334 repo = ctx.repo()
338 repo = ctx.repo()
335 if oldnode:
339 if oldnode:
336 diffopts = mdiff.diffopts(git=True, context=32767)
340 diffopts = mdiff.diffopts(git=True, context=32767)
337 oldctx = repo.unfiltered()[oldnode]
341 oldctx = repo.unfiltered()[oldnode]
338 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
342 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
339 else:
343 else:
340 neednewdiff = True
344 neednewdiff = True
341
345
342 transactions = []
346 transactions = []
343 if neednewdiff:
347 if neednewdiff:
344 diff = creatediff(ctx)
348 diff = creatediff(ctx)
345 transactions.append({'type': 'update', 'value': diff[r'phid']})
349 transactions.append({'type': 'update', 'value': diff[r'phid']})
346 else:
350 else:
347 # Even if we don't need to upload a new diff because the patch content
351 # Even if we don't need to upload a new diff because the patch content
348 # does not change. We might still need to update its metadata so
352 # does not change. We might still need to update its metadata so
349 # pushers could know the correct node metadata.
353 # pushers could know the correct node metadata.
350 assert olddiff
354 assert olddiff
351 diff = olddiff
355 diff = olddiff
352 writediffproperties(ctx, diff)
356 writediffproperties(ctx, diff)
353
357
354 # Use a temporary summary to set dependency. There might be better ways but
358 # Use a temporary summary to set dependency. There might be better ways but
355 # I cannot find them for now. But do not do that if we are updating an
359 # I cannot find them for now. But do not do that if we are updating an
356 # existing revision (revid is not None) since that introduces visible
360 # existing revision (revid is not None) since that introduces visible
357 # churns (someone edited "Summary" twice) on the web page.
361 # churns (someone edited "Summary" twice) on the web page.
358 if parentrevid and revid is None:
362 if parentrevid and revid is None:
359 summary = 'Depends on D%s' % parentrevid
363 summary = 'Depends on D%s' % parentrevid
360 transactions += [{'type': 'summary', 'value': summary},
364 transactions += [{'type': 'summary', 'value': summary},
361 {'type': 'summary', 'value': ' '}]
365 {'type': 'summary', 'value': ' '}]
362
366
363 if actions:
367 if actions:
364 transactions += actions
368 transactions += actions
365
369
366 # Parse commit message and update related fields.
370 # Parse commit message and update related fields.
367 desc = ctx.description()
371 desc = ctx.description()
368 info = callconduit(repo, 'differential.parsecommitmessage',
372 info = callconduit(repo, 'differential.parsecommitmessage',
369 {'corpus': desc})
373 {'corpus': desc})
370 for k, v in info[r'fields'].items():
374 for k, v in info[r'fields'].items():
371 if k in ['title', 'summary', 'testPlan']:
375 if k in ['title', 'summary', 'testPlan']:
372 transactions.append({'type': k, 'value': v})
376 transactions.append({'type': k, 'value': v})
373
377
374 params = {'transactions': transactions}
378 params = {'transactions': transactions}
375 if revid is not None:
379 if revid is not None:
376 # Update an existing Differential Revision
380 # Update an existing Differential Revision
377 params['objectIdentifier'] = revid
381 params['objectIdentifier'] = revid
378
382
379 revision = callconduit(repo, 'differential.revision.edit', params)
383 revision = callconduit(repo, 'differential.revision.edit', params)
380 if not revision:
384 if not revision:
381 raise error.Abort(_('cannot create revision for %s') % ctx)
385 raise error.Abort(_('cannot create revision for %s') % ctx)
382
386
383 return revision, diff
387 return revision, diff
384
388
385 def userphids(repo, names):
389 def userphids(repo, names):
386 """convert user names to PHIDs"""
390 """convert user names to PHIDs"""
387 query = {'constraints': {'usernames': names}}
391 query = {'constraints': {'usernames': names}}
388 result = callconduit(repo, 'user.search', query)
392 result = callconduit(repo, 'user.search', query)
389 # username not found is not an error of the API. So check if we have missed
393 # username not found is not an error of the API. So check if we have missed
390 # some names here.
394 # some names here.
391 data = result[r'data']
395 data = result[r'data']
392 resolved = set(entry[r'fields'][r'username'] for entry in data)
396 resolved = set(entry[r'fields'][r'username'] for entry in data)
393 unresolved = set(names) - resolved
397 unresolved = set(names) - resolved
394 if unresolved:
398 if unresolved:
395 raise error.Abort(_('unknown username: %s')
399 raise error.Abort(_('unknown username: %s')
396 % ' '.join(sorted(unresolved)))
400 % ' '.join(sorted(unresolved)))
397 return [entry[r'phid'] for entry in data]
401 return [entry[r'phid'] for entry in data]
398
402
399 @command('phabsend',
403 @command('phabsend',
400 [('r', 'rev', [], _('revisions to send'), _('REV')),
404 [('r', 'rev', [], _('revisions to send'), _('REV')),
401 ('', 'amend', True, _('update commit messages')),
405 ('', 'amend', True, _('update commit messages')),
402 ('', 'reviewer', [], _('specify reviewers')),
406 ('', 'reviewer', [], _('specify reviewers')),
403 ('', 'confirm', None, _('ask for confirmation before sending'))],
407 ('', 'confirm', None, _('ask for confirmation before sending'))],
404 _('REV [OPTIONS]'))
408 _('REV [OPTIONS]'))
405 def phabsend(ui, repo, *revs, **opts):
409 def phabsend(ui, repo, *revs, **opts):
406 """upload changesets to Phabricator
410 """upload changesets to Phabricator
407
411
408 If there are multiple revisions specified, they will be send as a stack
412 If there are multiple revisions specified, they will be send as a stack
409 with a linear dependencies relationship using the order specified by the
413 with a linear dependencies relationship using the order specified by the
410 revset.
414 revset.
411
415
412 For the first time uploading changesets, local tags will be created to
416 For the first time uploading changesets, local tags will be created to
413 maintain the association. After the first time, phabsend will check
417 maintain the association. After the first time, phabsend will check
414 obsstore and tags information so it can figure out whether to update an
418 obsstore and tags information so it can figure out whether to update an
415 existing Differential Revision, or create a new one.
419 existing Differential Revision, or create a new one.
416
420
417 If --amend is set, update commit messages so they have the
421 If --amend is set, update commit messages so they have the
418 ``Differential Revision`` URL, remove related tags. This is similar to what
422 ``Differential Revision`` URL, remove related tags. This is similar to what
419 arcanist will do, and is more desired in author-push workflows. Otherwise,
423 arcanist will do, and is more desired in author-push workflows. Otherwise,
420 use local tags to record the ``Differential Revision`` association.
424 use local tags to record the ``Differential Revision`` association.
421
425
422 The --confirm option lets you confirm changesets before sending them. You
426 The --confirm option lets you confirm changesets before sending them. You
423 can also add following to your configuration file to make it default
427 can also add following to your configuration file to make it default
424 behaviour::
428 behaviour::
425
429
426 [phabsend]
430 [phabsend]
427 confirm = true
431 confirm = true
428
432
429 phabsend will check obsstore and the above association to decide whether to
433 phabsend will check obsstore and the above association to decide whether to
430 update an existing Differential Revision, or create a new one.
434 update an existing Differential Revision, or create a new one.
431 """
435 """
432 revs = list(revs) + opts.get('rev', [])
436 revs = list(revs) + opts.get('rev', [])
433 revs = scmutil.revrange(repo, revs)
437 revs = scmutil.revrange(repo, revs)
434
438
435 if not revs:
439 if not revs:
436 raise error.Abort(_('phabsend requires at least one changeset'))
440 raise error.Abort(_('phabsend requires at least one changeset'))
437 if opts.get('amend'):
441 if opts.get('amend'):
438 cmdutil.checkunfinished(repo)
442 cmdutil.checkunfinished(repo)
439
443
440 # {newnode: (oldnode, olddiff, olddrev}
444 # {newnode: (oldnode, olddiff, olddrev}
441 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
445 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
442
446
443 confirm = ui.configbool('phabsend', 'confirm')
447 confirm = ui.configbool('phabsend', 'confirm')
444 confirm |= bool(opts.get('confirm'))
448 confirm |= bool(opts.get('confirm'))
445 if confirm:
449 if confirm:
446 confirmed = _confirmbeforesend(repo, revs, oldmap)
450 confirmed = _confirmbeforesend(repo, revs, oldmap)
447 if not confirmed:
451 if not confirmed:
448 raise error.Abort(_('phabsend cancelled'))
452 raise error.Abort(_('phabsend cancelled'))
449
453
450 actions = []
454 actions = []
451 reviewers = opts.get('reviewer', [])
455 reviewers = opts.get('reviewer', [])
452 if reviewers:
456 if reviewers:
453 phids = userphids(repo, reviewers)
457 phids = userphids(repo, reviewers)
454 actions.append({'type': 'reviewers.add', 'value': phids})
458 actions.append({'type': 'reviewers.add', 'value': phids})
455
459
456 drevids = [] # [int]
460 drevids = [] # [int]
457 diffmap = {} # {newnode: diff}
461 diffmap = {} # {newnode: diff}
458
462
459 # Send patches one by one so we know their Differential Revision IDs and
463 # Send patches one by one so we know their Differential Revision IDs and
460 # can provide dependency relationship
464 # can provide dependency relationship
461 lastrevid = None
465 lastrevid = None
462 for rev in revs:
466 for rev in revs:
463 ui.debug('sending rev %d\n' % rev)
467 ui.debug('sending rev %d\n' % rev)
464 ctx = repo[rev]
468 ctx = repo[rev]
465
469
466 # Get Differential Revision ID
470 # Get Differential Revision ID
467 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
471 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
468 if oldnode != ctx.node() or opts.get('amend'):
472 if oldnode != ctx.node() or opts.get('amend'):
469 # Create or update Differential Revision
473 # Create or update Differential Revision
470 revision, diff = createdifferentialrevision(
474 revision, diff = createdifferentialrevision(
471 ctx, revid, lastrevid, oldnode, olddiff, actions)
475 ctx, revid, lastrevid, oldnode, olddiff, actions)
472 diffmap[ctx.node()] = diff
476 diffmap[ctx.node()] = diff
473 newrevid = int(revision[r'object'][r'id'])
477 newrevid = int(revision[r'object'][r'id'])
474 if revid:
478 if revid:
475 action = 'updated'
479 action = 'updated'
476 else:
480 else:
477 action = 'created'
481 action = 'created'
478
482
479 # Create a local tag to note the association, if commit message
483 # Create a local tag to note the association, if commit message
480 # does not have it already
484 # does not have it already
481 m = _differentialrevisiondescre.search(ctx.description())
485 m = _differentialrevisiondescre.search(ctx.description())
482 if not m or int(m.group('id')) != newrevid:
486 if not m or int(m.group('id')) != newrevid:
483 tagname = 'D%d' % newrevid
487 tagname = 'D%d' % newrevid
484 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
488 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
485 date=None, local=True)
489 date=None, local=True)
486 else:
490 else:
487 # Nothing changed. But still set "newrevid" so the next revision
491 # Nothing changed. But still set "newrevid" so the next revision
488 # could depend on this one.
492 # could depend on this one.
489 newrevid = revid
493 newrevid = revid
490 action = 'skipped'
494 action = 'skipped'
491
495
492 actiondesc = ui.label(
496 actiondesc = ui.label(
493 {'created': _('created'),
497 {'created': _('created'),
494 'skipped': _('skipped'),
498 'skipped': _('skipped'),
495 'updated': _('updated')}[action],
499 'updated': _('updated')}[action],
496 'phabricator.action.%s' % action)
500 'phabricator.action.%s' % action)
497 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
501 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
498 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
502 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
499 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
503 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
500 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
504 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
501 desc))
505 desc))
502 drevids.append(newrevid)
506 drevids.append(newrevid)
503 lastrevid = newrevid
507 lastrevid = newrevid
504
508
505 # Update commit messages and remove tags
509 # Update commit messages and remove tags
506 if opts.get('amend'):
510 if opts.get('amend'):
507 unfi = repo.unfiltered()
511 unfi = repo.unfiltered()
508 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
512 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
509 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
513 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
510 wnode = unfi['.'].node()
514 wnode = unfi['.'].node()
511 mapping = {} # {oldnode: [newnode]}
515 mapping = {} # {oldnode: [newnode]}
512 for i, rev in enumerate(revs):
516 for i, rev in enumerate(revs):
513 old = unfi[rev]
517 old = unfi[rev]
514 drevid = drevids[i]
518 drevid = drevids[i]
515 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
519 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
516 newdesc = getdescfromdrev(drev)
520 newdesc = getdescfromdrev(drev)
517 # Make sure commit message contain "Differential Revision"
521 # Make sure commit message contain "Differential Revision"
518 if old.description() != newdesc:
522 if old.description() != newdesc:
519 parents = [
523 parents = [
520 mapping.get(old.p1().node(), (old.p1(),))[0],
524 mapping.get(old.p1().node(), (old.p1(),))[0],
521 mapping.get(old.p2().node(), (old.p2(),))[0],
525 mapping.get(old.p2().node(), (old.p2(),))[0],
522 ]
526 ]
523 new = context.metadataonlyctx(
527 new = context.metadataonlyctx(
524 repo, old, parents=parents, text=newdesc,
528 repo, old, parents=parents, text=newdesc,
525 user=old.user(), date=old.date(), extra=old.extra())
529 user=old.user(), date=old.date(), extra=old.extra())
526 newnode = new.commit()
530 newnode = new.commit()
527 mapping[old.node()] = [newnode]
531 mapping[old.node()] = [newnode]
528 # Update diff property
532 # Update diff property
529 writediffproperties(unfi[newnode], diffmap[old.node()])
533 writediffproperties(unfi[newnode], diffmap[old.node()])
530 # Remove local tags since it's no longer necessary
534 # Remove local tags since it's no longer necessary
531 tagname = 'D%d' % drevid
535 tagname = 'D%d' % drevid
532 if tagname in repo.tags():
536 if tagname in repo.tags():
533 tags.tag(repo, tagname, nullid, message=None, user=None,
537 tags.tag(repo, tagname, nullid, message=None, user=None,
534 date=None, local=True)
538 date=None, local=True)
535 scmutil.cleanupnodes(repo, mapping, 'phabsend')
539 scmutil.cleanupnodes(repo, mapping, 'phabsend')
536 if wnode in mapping:
540 if wnode in mapping:
537 unfi.setparents(mapping[wnode][0])
541 unfi.setparents(mapping[wnode][0])
538
542
539 # Map from "hg:meta" keys to header understood by "hg import". The order is
543 # Map from "hg:meta" keys to header understood by "hg import". The order is
540 # consistent with "hg export" output.
544 # consistent with "hg export" output.
541 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
545 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
542 (r'node', 'Node ID'), (r'parent', 'Parent ')])
546 (r'node', 'Node ID'), (r'parent', 'Parent ')])
543
547
544 def _confirmbeforesend(repo, revs, oldmap):
548 def _confirmbeforesend(repo, revs, oldmap):
545 url, token = readurltoken(repo)
549 url, token = readurltoken(repo)
546 ui = repo.ui
550 ui = repo.ui
547 for rev in revs:
551 for rev in revs:
548 ctx = repo[rev]
552 ctx = repo[rev]
549 desc = ctx.description().splitlines()[0]
553 desc = ctx.description().splitlines()[0]
550 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
554 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
551 if drevid:
555 if drevid:
552 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
556 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
553 else:
557 else:
554 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
558 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
555
559
556 ui.write(_('%s - %s: %s\n') % (drevdesc,
560 ui.write(_('%s - %s: %s\n') % (drevdesc,
557 ui.label(bytes(ctx), 'phabricator.node'),
561 ui.label(bytes(ctx), 'phabricator.node'),
558 ui.label(desc, 'phabricator.desc')))
562 ui.label(desc, 'phabricator.desc')))
559
563
560 if ui.promptchoice(_('Send the above changes to %s (yn)?'
564 if ui.promptchoice(_('Send the above changes to %s (yn)?'
561 '$$ &Yes $$ &No') % url):
565 '$$ &Yes $$ &No') % url):
562 return False
566 return False
563
567
564 return True
568 return True
565
569
566 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
570 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
567 'abandoned'}
571 'abandoned'}
568
572
569 def _getstatusname(drev):
573 def _getstatusname(drev):
570 """get normalized status name from a Differential Revision"""
574 """get normalized status name from a Differential Revision"""
571 return drev[r'statusName'].replace(' ', '').lower()
575 return drev[r'statusName'].replace(' ', '').lower()
572
576
573 # Small language to specify differential revisions. Support symbols: (), :X,
577 # Small language to specify differential revisions. Support symbols: (), :X,
574 # +, and -.
578 # +, and -.
575
579
576 _elements = {
580 _elements = {
577 # token-type: binding-strength, primary, prefix, infix, suffix
581 # token-type: binding-strength, primary, prefix, infix, suffix
578 '(': (12, None, ('group', 1, ')'), None, None),
582 '(': (12, None, ('group', 1, ')'), None, None),
579 ':': (8, None, ('ancestors', 8), None, None),
583 ':': (8, None, ('ancestors', 8), None, None),
580 '&': (5, None, None, ('and_', 5), None),
584 '&': (5, None, None, ('and_', 5), None),
581 '+': (4, None, None, ('add', 4), None),
585 '+': (4, None, None, ('add', 4), None),
582 '-': (4, None, None, ('sub', 4), None),
586 '-': (4, None, None, ('sub', 4), None),
583 ')': (0, None, None, None, None),
587 ')': (0, None, None, None, None),
584 'symbol': (0, 'symbol', None, None, None),
588 'symbol': (0, 'symbol', None, None, None),
585 'end': (0, None, None, None, None),
589 'end': (0, None, None, None, None),
586 }
590 }
587
591
588 def _tokenize(text):
592 def _tokenize(text):
589 view = memoryview(text) # zero-copy slice
593 view = memoryview(text) # zero-copy slice
590 special = '():+-& '
594 special = '():+-& '
591 pos = 0
595 pos = 0
592 length = len(text)
596 length = len(text)
593 while pos < length:
597 while pos < length:
594 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
598 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
595 view[pos:]))
599 view[pos:]))
596 if symbol:
600 if symbol:
597 yield ('symbol', symbol, pos)
601 yield ('symbol', symbol, pos)
598 pos += len(symbol)
602 pos += len(symbol)
599 else: # special char, ignore space
603 else: # special char, ignore space
600 if text[pos] != ' ':
604 if text[pos] != ' ':
601 yield (text[pos], None, pos)
605 yield (text[pos], None, pos)
602 pos += 1
606 pos += 1
603 yield ('end', None, pos)
607 yield ('end', None, pos)
604
608
605 def _parse(text):
609 def _parse(text):
606 tree, pos = parser.parser(_elements).parse(_tokenize(text))
610 tree, pos = parser.parser(_elements).parse(_tokenize(text))
607 if pos != len(text):
611 if pos != len(text):
608 raise error.ParseError('invalid token', pos)
612 raise error.ParseError('invalid token', pos)
609 return tree
613 return tree
610
614
611 def _parsedrev(symbol):
615 def _parsedrev(symbol):
612 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
616 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
613 if symbol.startswith('D') and symbol[1:].isdigit():
617 if symbol.startswith('D') and symbol[1:].isdigit():
614 return int(symbol[1:])
618 return int(symbol[1:])
615 if symbol.isdigit():
619 if symbol.isdigit():
616 return int(symbol)
620 return int(symbol)
617
621
618 def _prefetchdrevs(tree):
622 def _prefetchdrevs(tree):
619 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
623 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
620 drevs = set()
624 drevs = set()
621 ancestordrevs = set()
625 ancestordrevs = set()
622 op = tree[0]
626 op = tree[0]
623 if op == 'symbol':
627 if op == 'symbol':
624 r = _parsedrev(tree[1])
628 r = _parsedrev(tree[1])
625 if r:
629 if r:
626 drevs.add(r)
630 drevs.add(r)
627 elif op == 'ancestors':
631 elif op == 'ancestors':
628 r, a = _prefetchdrevs(tree[1])
632 r, a = _prefetchdrevs(tree[1])
629 drevs.update(r)
633 drevs.update(r)
630 ancestordrevs.update(r)
634 ancestordrevs.update(r)
631 ancestordrevs.update(a)
635 ancestordrevs.update(a)
632 else:
636 else:
633 for t in tree[1:]:
637 for t in tree[1:]:
634 r, a = _prefetchdrevs(t)
638 r, a = _prefetchdrevs(t)
635 drevs.update(r)
639 drevs.update(r)
636 ancestordrevs.update(a)
640 ancestordrevs.update(a)
637 return drevs, ancestordrevs
641 return drevs, ancestordrevs
638
642
639 def querydrev(repo, spec):
643 def querydrev(repo, spec):
640 """return a list of "Differential Revision" dicts
644 """return a list of "Differential Revision" dicts
641
645
642 spec is a string using a simple query language, see docstring in phabread
646 spec is a string using a simple query language, see docstring in phabread
643 for details.
647 for details.
644
648
645 A "Differential Revision dict" looks like:
649 A "Differential Revision dict" looks like:
646
650
647 {
651 {
648 "id": "2",
652 "id": "2",
649 "phid": "PHID-DREV-672qvysjcczopag46qty",
653 "phid": "PHID-DREV-672qvysjcczopag46qty",
650 "title": "example",
654 "title": "example",
651 "uri": "https://phab.example.com/D2",
655 "uri": "https://phab.example.com/D2",
652 "dateCreated": "1499181406",
656 "dateCreated": "1499181406",
653 "dateModified": "1499182103",
657 "dateModified": "1499182103",
654 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
658 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
655 "status": "0",
659 "status": "0",
656 "statusName": "Needs Review",
660 "statusName": "Needs Review",
657 "properties": [],
661 "properties": [],
658 "branch": null,
662 "branch": null,
659 "summary": "",
663 "summary": "",
660 "testPlan": "",
664 "testPlan": "",
661 "lineCount": "2",
665 "lineCount": "2",
662 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
666 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
663 "diffs": [
667 "diffs": [
664 "3",
668 "3",
665 "4",
669 "4",
666 ],
670 ],
667 "commits": [],
671 "commits": [],
668 "reviewers": [],
672 "reviewers": [],
669 "ccs": [],
673 "ccs": [],
670 "hashes": [],
674 "hashes": [],
671 "auxiliary": {
675 "auxiliary": {
672 "phabricator:projects": [],
676 "phabricator:projects": [],
673 "phabricator:depends-on": [
677 "phabricator:depends-on": [
674 "PHID-DREV-gbapp366kutjebt7agcd"
678 "PHID-DREV-gbapp366kutjebt7agcd"
675 ]
679 ]
676 },
680 },
677 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
681 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
678 "sourcePath": null
682 "sourcePath": null
679 }
683 }
680 """
684 """
681 def fetch(params):
685 def fetch(params):
682 """params -> single drev or None"""
686 """params -> single drev or None"""
683 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
687 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
684 if key in prefetched:
688 if key in prefetched:
685 return prefetched[key]
689 return prefetched[key]
686 drevs = callconduit(repo, 'differential.query', params)
690 drevs = callconduit(repo, 'differential.query', params)
687 # Fill prefetched with the result
691 # Fill prefetched with the result
688 for drev in drevs:
692 for drev in drevs:
689 prefetched[drev[r'phid']] = drev
693 prefetched[drev[r'phid']] = drev
690 prefetched[int(drev[r'id'])] = drev
694 prefetched[int(drev[r'id'])] = drev
691 if key not in prefetched:
695 if key not in prefetched:
692 raise error.Abort(_('cannot get Differential Revision %r') % params)
696 raise error.Abort(_('cannot get Differential Revision %r') % params)
693 return prefetched[key]
697 return prefetched[key]
694
698
695 def getstack(topdrevids):
699 def getstack(topdrevids):
696 """given a top, get a stack from the bottom, [id] -> [id]"""
700 """given a top, get a stack from the bottom, [id] -> [id]"""
697 visited = set()
701 visited = set()
698 result = []
702 result = []
699 queue = [{r'ids': [i]} for i in topdrevids]
703 queue = [{r'ids': [i]} for i in topdrevids]
700 while queue:
704 while queue:
701 params = queue.pop()
705 params = queue.pop()
702 drev = fetch(params)
706 drev = fetch(params)
703 if drev[r'id'] in visited:
707 if drev[r'id'] in visited:
704 continue
708 continue
705 visited.add(drev[r'id'])
709 visited.add(drev[r'id'])
706 result.append(int(drev[r'id']))
710 result.append(int(drev[r'id']))
707 auxiliary = drev.get(r'auxiliary', {})
711 auxiliary = drev.get(r'auxiliary', {})
708 depends = auxiliary.get(r'phabricator:depends-on', [])
712 depends = auxiliary.get(r'phabricator:depends-on', [])
709 for phid in depends:
713 for phid in depends:
710 queue.append({'phids': [phid]})
714 queue.append({'phids': [phid]})
711 result.reverse()
715 result.reverse()
712 return smartset.baseset(result)
716 return smartset.baseset(result)
713
717
714 # Initialize prefetch cache
718 # Initialize prefetch cache
715 prefetched = {} # {id or phid: drev}
719 prefetched = {} # {id or phid: drev}
716
720
717 tree = _parse(spec)
721 tree = _parse(spec)
718 drevs, ancestordrevs = _prefetchdrevs(tree)
722 drevs, ancestordrevs = _prefetchdrevs(tree)
719
723
720 # developer config: phabricator.batchsize
724 # developer config: phabricator.batchsize
721 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
725 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
722
726
723 # Prefetch Differential Revisions in batch
727 # Prefetch Differential Revisions in batch
724 tofetch = set(drevs)
728 tofetch = set(drevs)
725 for r in ancestordrevs:
729 for r in ancestordrevs:
726 tofetch.update(range(max(1, r - batchsize), r + 1))
730 tofetch.update(range(max(1, r - batchsize), r + 1))
727 if drevs:
731 if drevs:
728 fetch({r'ids': list(tofetch)})
732 fetch({r'ids': list(tofetch)})
729 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
733 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
730
734
731 # Walk through the tree, return smartsets
735 # Walk through the tree, return smartsets
732 def walk(tree):
736 def walk(tree):
733 op = tree[0]
737 op = tree[0]
734 if op == 'symbol':
738 if op == 'symbol':
735 drev = _parsedrev(tree[1])
739 drev = _parsedrev(tree[1])
736 if drev:
740 if drev:
737 return smartset.baseset([drev])
741 return smartset.baseset([drev])
738 elif tree[1] in _knownstatusnames:
742 elif tree[1] in _knownstatusnames:
739 drevs = [r for r in validids
743 drevs = [r for r in validids
740 if _getstatusname(prefetched[r]) == tree[1]]
744 if _getstatusname(prefetched[r]) == tree[1]]
741 return smartset.baseset(drevs)
745 return smartset.baseset(drevs)
742 else:
746 else:
743 raise error.Abort(_('unknown symbol: %s') % tree[1])
747 raise error.Abort(_('unknown symbol: %s') % tree[1])
744 elif op in {'and_', 'add', 'sub'}:
748 elif op in {'and_', 'add', 'sub'}:
745 assert len(tree) == 3
749 assert len(tree) == 3
746 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
750 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
747 elif op == 'group':
751 elif op == 'group':
748 return walk(tree[1])
752 return walk(tree[1])
749 elif op == 'ancestors':
753 elif op == 'ancestors':
750 return getstack(walk(tree[1]))
754 return getstack(walk(tree[1]))
751 else:
755 else:
752 raise error.ProgrammingError('illegal tree: %r' % tree)
756 raise error.ProgrammingError('illegal tree: %r' % tree)
753
757
754 return [prefetched[r] for r in walk(tree)]
758 return [prefetched[r] for r in walk(tree)]
755
759
756 def getdescfromdrev(drev):
760 def getdescfromdrev(drev):
757 """get description (commit message) from "Differential Revision"
761 """get description (commit message) from "Differential Revision"
758
762
759 This is similar to differential.getcommitmessage API. But we only care
763 This is similar to differential.getcommitmessage API. But we only care
760 about limited fields: title, summary, test plan, and URL.
764 about limited fields: title, summary, test plan, and URL.
761 """
765 """
762 title = drev[r'title']
766 title = drev[r'title']
763 summary = drev[r'summary'].rstrip()
767 summary = drev[r'summary'].rstrip()
764 testplan = drev[r'testPlan'].rstrip()
768 testplan = drev[r'testPlan'].rstrip()
765 if testplan:
769 if testplan:
766 testplan = 'Test Plan:\n%s' % testplan
770 testplan = 'Test Plan:\n%s' % testplan
767 uri = 'Differential Revision: %s' % drev[r'uri']
771 uri = 'Differential Revision: %s' % drev[r'uri']
768 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
772 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
769
773
770 def getdiffmeta(diff):
774 def getdiffmeta(diff):
771 """get commit metadata (date, node, user, p1) from a diff object
775 """get commit metadata (date, node, user, p1) from a diff object
772
776
773 The metadata could be "hg:meta", sent by phabsend, like:
777 The metadata could be "hg:meta", sent by phabsend, like:
774
778
775 "properties": {
779 "properties": {
776 "hg:meta": {
780 "hg:meta": {
777 "date": "1499571514 25200",
781 "date": "1499571514 25200",
778 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
782 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
779 "user": "Foo Bar <foo@example.com>",
783 "user": "Foo Bar <foo@example.com>",
780 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
784 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
781 }
785 }
782 }
786 }
783
787
784 Or converted from "local:commits", sent by "arc", like:
788 Or converted from "local:commits", sent by "arc", like:
785
789
786 "properties": {
790 "properties": {
787 "local:commits": {
791 "local:commits": {
788 "98c08acae292b2faf60a279b4189beb6cff1414d": {
792 "98c08acae292b2faf60a279b4189beb6cff1414d": {
789 "author": "Foo Bar",
793 "author": "Foo Bar",
790 "time": 1499546314,
794 "time": 1499546314,
791 "branch": "default",
795 "branch": "default",
792 "tag": "",
796 "tag": "",
793 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
797 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
794 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
798 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
795 "local": "1000",
799 "local": "1000",
796 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
800 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
797 "summary": "...",
801 "summary": "...",
798 "message": "...",
802 "message": "...",
799 "authorEmail": "foo@example.com"
803 "authorEmail": "foo@example.com"
800 }
804 }
801 }
805 }
802 }
806 }
803
807
804 Note: metadata extracted from "local:commits" will lose time zone
808 Note: metadata extracted from "local:commits" will lose time zone
805 information.
809 information.
806 """
810 """
807 props = diff.get(r'properties') or {}
811 props = diff.get(r'properties') or {}
808 meta = props.get(r'hg:meta')
812 meta = props.get(r'hg:meta')
809 if not meta and props.get(r'local:commits'):
813 if not meta and props.get(r'local:commits'):
810 commit = sorted(props[r'local:commits'].values())[0]
814 commit = sorted(props[r'local:commits'].values())[0]
811 meta = {
815 meta = {
812 r'date': r'%d 0' % commit[r'time'],
816 r'date': r'%d 0' % commit[r'time'],
813 r'node': commit[r'rev'],
817 r'node': commit[r'rev'],
814 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
818 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
815 }
819 }
816 if len(commit.get(r'parents', ())) >= 1:
820 if len(commit.get(r'parents', ())) >= 1:
817 meta[r'parent'] = commit[r'parents'][0]
821 meta[r'parent'] = commit[r'parents'][0]
818 return meta or {}
822 return meta or {}
819
823
820 def readpatch(repo, drevs, write):
824 def readpatch(repo, drevs, write):
821 """generate plain-text patch readable by 'hg import'
825 """generate plain-text patch readable by 'hg import'
822
826
823 write is usually ui.write. drevs is what "querydrev" returns, results of
827 write is usually ui.write. drevs is what "querydrev" returns, results of
824 "differential.query".
828 "differential.query".
825 """
829 """
826 # Prefetch hg:meta property for all diffs
830 # Prefetch hg:meta property for all diffs
827 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
831 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
828 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
832 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
829
833
830 # Generate patch for each drev
834 # Generate patch for each drev
831 for drev in drevs:
835 for drev in drevs:
832 repo.ui.note(_('reading D%s\n') % drev[r'id'])
836 repo.ui.note(_('reading D%s\n') % drev[r'id'])
833
837
834 diffid = max(int(v) for v in drev[r'diffs'])
838 diffid = max(int(v) for v in drev[r'diffs'])
835 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
839 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
836 desc = getdescfromdrev(drev)
840 desc = getdescfromdrev(drev)
837 header = '# HG changeset patch\n'
841 header = '# HG changeset patch\n'
838
842
839 # Try to preserve metadata from hg:meta property. Write hg patch
843 # Try to preserve metadata from hg:meta property. Write hg patch
840 # headers that can be read by the "import" command. See patchheadermap
844 # headers that can be read by the "import" command. See patchheadermap
841 # and extract in mercurial/patch.py for supported headers.
845 # and extract in mercurial/patch.py for supported headers.
842 meta = getdiffmeta(diffs[str(diffid)])
846 meta = getdiffmeta(diffs[str(diffid)])
843 for k in _metanamemap.keys():
847 for k in _metanamemap.keys():
844 if k in meta:
848 if k in meta:
845 header += '# %s %s\n' % (_metanamemap[k], meta[k])
849 header += '# %s %s\n' % (_metanamemap[k], meta[k])
846
850
847 content = '%s%s\n%s' % (header, desc, body)
851 content = '%s%s\n%s' % (header, desc, body)
848 write(encoding.unitolocal(content))
852 write(encoding.unitolocal(content))
849
853
850 @command('phabread',
854 @command('phabread',
851 [('', 'stack', False, _('read dependencies'))],
855 [('', 'stack', False, _('read dependencies'))],
852 _('DREVSPEC [OPTIONS]'))
856 _('DREVSPEC [OPTIONS]'))
853 def phabread(ui, repo, spec, **opts):
857 def phabread(ui, repo, spec, **opts):
854 """print patches from Phabricator suitable for importing
858 """print patches from Phabricator suitable for importing
855
859
856 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
860 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
857 the number ``123``. It could also have common operators like ``+``, ``-``,
861 the number ``123``. It could also have common operators like ``+``, ``-``,
858 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
862 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
859 select a stack.
863 select a stack.
860
864
861 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
865 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
862 could be used to filter patches by status. For performance reason, they
866 could be used to filter patches by status. For performance reason, they
863 only represent a subset of non-status selections and cannot be used alone.
867 only represent a subset of non-status selections and cannot be used alone.
864
868
865 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
869 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
866 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
870 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
867 stack up to D9.
871 stack up to D9.
868
872
869 If --stack is given, follow dependencies information and read all patches.
873 If --stack is given, follow dependencies information and read all patches.
870 It is equivalent to the ``:`` operator.
874 It is equivalent to the ``:`` operator.
871 """
875 """
872 if opts.get('stack'):
876 if opts.get('stack'):
873 spec = ':(%s)' % spec
877 spec = ':(%s)' % spec
874 drevs = querydrev(repo, spec)
878 drevs = querydrev(repo, spec)
875 readpatch(repo, drevs, ui.write)
879 readpatch(repo, drevs, ui.write)
876
880
877 @command('phabupdate',
881 @command('phabupdate',
878 [('', 'accept', False, _('accept revisions')),
882 [('', 'accept', False, _('accept revisions')),
879 ('', 'reject', False, _('reject revisions')),
883 ('', 'reject', False, _('reject revisions')),
880 ('', 'abandon', False, _('abandon revisions')),
884 ('', 'abandon', False, _('abandon revisions')),
881 ('', 'reclaim', False, _('reclaim revisions')),
885 ('', 'reclaim', False, _('reclaim revisions')),
882 ('m', 'comment', '', _('comment on the last revision')),
886 ('m', 'comment', '', _('comment on the last revision')),
883 ], _('DREVSPEC [OPTIONS]'))
887 ], _('DREVSPEC [OPTIONS]'))
884 def phabupdate(ui, repo, spec, **opts):
888 def phabupdate(ui, repo, spec, **opts):
885 """update Differential Revision in batch
889 """update Differential Revision in batch
886
890
887 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
891 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
888 """
892 """
889 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
893 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
890 if len(flags) > 1:
894 if len(flags) > 1:
891 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
895 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
892
896
893 actions = []
897 actions = []
894 for f in flags:
898 for f in flags:
895 actions.append({'type': f, 'value': 'true'})
899 actions.append({'type': f, 'value': 'true'})
896
900
897 drevs = querydrev(repo, spec)
901 drevs = querydrev(repo, spec)
898 for i, drev in enumerate(drevs):
902 for i, drev in enumerate(drevs):
899 if i + 1 == len(drevs) and opts.get('comment'):
903 if i + 1 == len(drevs) and opts.get('comment'):
900 actions.append({'type': 'comment', 'value': opts['comment']})
904 actions.append({'type': 'comment', 'value': opts['comment']})
901 if actions:
905 if actions:
902 params = {'objectIdentifier': drev[r'phid'],
906 params = {'objectIdentifier': drev[r'phid'],
903 'transactions': actions}
907 'transactions': actions}
904 callconduit(repo, 'differential.revision.edit', params)
908 callconduit(repo, 'differential.revision.edit', params)
905
909
906 templatekeyword = registrar.templatekeyword()
910 templatekeyword = registrar.templatekeyword()
907
911
908 @templatekeyword('phabreview', requires={'ctx'})
912 @templatekeyword('phabreview', requires={'ctx'})
909 def template_review(context, mapping):
913 def template_review(context, mapping):
910 """:phabreview: Object describing the review for this changeset.
914 """:phabreview: Object describing the review for this changeset.
911 Has attributes `url` and `id`.
915 Has attributes `url` and `id`.
912 """
916 """
913 ctx = context.resource(mapping, 'ctx')
917 ctx = context.resource(mapping, 'ctx')
914 m = _differentialrevisiondescre.search(ctx.description())
918 m = _differentialrevisiondescre.search(ctx.description())
915 if m:
919 if m:
916 return {
920 return {
917 'url': m.group('url'),
921 'url': m.group('url'),
918 'id': "D{}".format(m.group('id')),
922 'id': "D{}".format(m.group('id')),
919 }
923 }
General Comments 0
You need to be logged in to leave comments. Login now