##// END OF EJS Templates
phabricator: add a config to use curl for communication...
Jun Wu -
r34066:8b659b73 default
parent child Browse files
Show More
@@ -1,854 +1,867
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration
7 """simple Phabricator integration
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # API token. Get it from https://$HOST/conduit/login/
24 # API token. Get it from https://$HOST/conduit/login/
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
26
26
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # callsign is "FOO".
28 # callsign is "FOO".
29 callsign = FOO
29 callsign = FOO
30
30
31 # curl command to use. If not set (default), use builtin HTTP library to
32 # communicate. If set, use the specified curl command. This could be useful
33 # if you need to specify advanced options that is not easily supported by
34 # the internal library.
35 curlcmd = curl --connect-timeout 2 --retry 3 --silent
31 """
36 """
32
37
33 from __future__ import absolute_import
38 from __future__ import absolute_import
34
39
35 import itertools
40 import itertools
36 import json
41 import json
37 import operator
42 import operator
38 import re
43 import re
39
44
40 from mercurial.node import bin, nullid
45 from mercurial.node import bin, nullid
41 from mercurial.i18n import _
46 from mercurial.i18n import _
42 from mercurial import (
47 from mercurial import (
43 cmdutil,
48 cmdutil,
44 context,
49 context,
45 encoding,
50 encoding,
46 error,
51 error,
47 mdiff,
52 mdiff,
48 obsutil,
53 obsutil,
49 parser,
54 parser,
50 patch,
55 patch,
51 registrar,
56 registrar,
52 scmutil,
57 scmutil,
53 smartset,
58 smartset,
54 tags,
59 tags,
55 url as urlmod,
60 url as urlmod,
56 util,
61 util,
57 )
62 )
58
63
59 cmdtable = {}
64 cmdtable = {}
60 command = registrar.command(cmdtable)
65 command = registrar.command(cmdtable)
61
66
62 colortable = {
67 colortable = {
63 'phabricator.action.created': 'green',
68 'phabricator.action.created': 'green',
64 'phabricator.action.skipped': 'magenta',
69 'phabricator.action.skipped': 'magenta',
65 'phabricator.action.updated': 'magenta',
70 'phabricator.action.updated': 'magenta',
66 'phabricator.desc': '',
71 'phabricator.desc': '',
67 'phabricator.drev': 'bold',
72 'phabricator.drev': 'bold',
68 'phabricator.node': '',
73 'phabricator.node': '',
69 }
74 }
70
75
71 def urlencodenested(params):
76 def urlencodenested(params):
72 """like urlencode, but works with nested parameters.
77 """like urlencode, but works with nested parameters.
73
78
74 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
79 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
75 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
80 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
76 urlencode. Note: the encoding is consistent with PHP's http_build_query.
81 urlencode. Note: the encoding is consistent with PHP's http_build_query.
77 """
82 """
78 flatparams = util.sortdict()
83 flatparams = util.sortdict()
79 def process(prefix, obj):
84 def process(prefix, obj):
80 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
85 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
81 if items is None:
86 if items is None:
82 flatparams[prefix] = obj
87 flatparams[prefix] = obj
83 else:
88 else:
84 for k, v in items(obj):
89 for k, v in items(obj):
85 if prefix:
90 if prefix:
86 process('%s[%s]' % (prefix, k), v)
91 process('%s[%s]' % (prefix, k), v)
87 else:
92 else:
88 process(k, v)
93 process(k, v)
89 process('', params)
94 process('', params)
90 return util.urlreq.urlencode(flatparams)
95 return util.urlreq.urlencode(flatparams)
91
96
92 def readurltoken(repo):
97 def readurltoken(repo):
93 """return conduit url, token and make sure they exist
98 """return conduit url, token and make sure they exist
94
99
95 Currently read from [phabricator] config section. In the future, it might
100 Currently read from [phabricator] config section. In the future, it might
96 make sense to read from .arcconfig and .arcrc as well.
101 make sense to read from .arcconfig and .arcrc as well.
97 """
102 """
98 values = []
103 values = []
99 section = 'phabricator'
104 section = 'phabricator'
100 for name in ['url', 'token']:
105 for name in ['url', 'token']:
101 value = repo.ui.config(section, name)
106 value = repo.ui.config(section, name)
102 if not value:
107 if not value:
103 raise error.Abort(_('config %s.%s is required') % (section, name))
108 raise error.Abort(_('config %s.%s is required') % (section, name))
104 values.append(value)
109 values.append(value)
105 return values
110 return values
106
111
107 def callconduit(repo, name, params):
112 def callconduit(repo, name, params):
108 """call Conduit API, params is a dict. return json.loads result, or None"""
113 """call Conduit API, params is a dict. return json.loads result, or None"""
109 host, token = readurltoken(repo)
114 host, token = readurltoken(repo)
110 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
115 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
111 urlopener = urlmod.opener(repo.ui, authinfo)
112 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
116 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
113 params = params.copy()
117 params = params.copy()
114 params['api.token'] = token
118 params['api.token'] = token
115 request = util.urlreq.request(url, data=urlencodenested(params))
119 data = urlencodenested(params)
116 body = urlopener.open(request).read()
120 curlcmd = repo.ui.config('phabricator', 'curlcmd')
121 if curlcmd:
122 sin, sout = util.popen2('%s -d @- %s' % (curlcmd, util.shellquote(url)))
123 sin.write(data)
124 sin.close()
125 body = sout.read()
126 else:
127 urlopener = urlmod.opener(repo.ui, authinfo)
128 request = util.urlreq.request(url, data=data)
129 body = urlopener.open(request).read()
117 repo.ui.debug('Conduit Response: %s\n' % body)
130 repo.ui.debug('Conduit Response: %s\n' % body)
118 parsed = json.loads(body)
131 parsed = json.loads(body)
119 if parsed.get(r'error_code'):
132 if parsed.get(r'error_code'):
120 msg = (_('Conduit Error (%s): %s')
133 msg = (_('Conduit Error (%s): %s')
121 % (parsed[r'error_code'], parsed[r'error_info']))
134 % (parsed[r'error_code'], parsed[r'error_info']))
122 raise error.Abort(msg)
135 raise error.Abort(msg)
123 return parsed[r'result']
136 return parsed[r'result']
124
137
125 @command('debugcallconduit', [], _('METHOD'))
138 @command('debugcallconduit', [], _('METHOD'))
126 def debugcallconduit(ui, repo, name):
139 def debugcallconduit(ui, repo, name):
127 """call Conduit API
140 """call Conduit API
128
141
129 Call parameters are read from stdin as a JSON blob. Result will be written
142 Call parameters are read from stdin as a JSON blob. Result will be written
130 to stdout as a JSON blob.
143 to stdout as a JSON blob.
131 """
144 """
132 params = json.loads(ui.fin.read())
145 params = json.loads(ui.fin.read())
133 result = callconduit(repo, name, params)
146 result = callconduit(repo, name, params)
134 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
147 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
135 ui.write('%s\n' % s)
148 ui.write('%s\n' % s)
136
149
137 def getrepophid(repo):
150 def getrepophid(repo):
138 """given callsign, return repository PHID or None"""
151 """given callsign, return repository PHID or None"""
139 # developer config: phabricator.repophid
152 # developer config: phabricator.repophid
140 repophid = repo.ui.config('phabricator', 'repophid')
153 repophid = repo.ui.config('phabricator', 'repophid')
141 if repophid:
154 if repophid:
142 return repophid
155 return repophid
143 callsign = repo.ui.config('phabricator', 'callsign')
156 callsign = repo.ui.config('phabricator', 'callsign')
144 if not callsign:
157 if not callsign:
145 return None
158 return None
146 query = callconduit(repo, 'diffusion.repository.search',
159 query = callconduit(repo, 'diffusion.repository.search',
147 {'constraints': {'callsigns': [callsign]}})
160 {'constraints': {'callsigns': [callsign]}})
148 if len(query[r'data']) == 0:
161 if len(query[r'data']) == 0:
149 return None
162 return None
150 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
163 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
151 repo.ui.setconfig('phabricator', 'repophid', repophid)
164 repo.ui.setconfig('phabricator', 'repophid', repophid)
152 return repophid
165 return repophid
153
166
154 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
167 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
155 _differentialrevisiondescre = re.compile(
168 _differentialrevisiondescre = re.compile(
156 '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
169 '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
157
170
158 def getoldnodedrevmap(repo, nodelist):
171 def getoldnodedrevmap(repo, nodelist):
159 """find previous nodes that has been sent to Phabricator
172 """find previous nodes that has been sent to Phabricator
160
173
161 return {node: (oldnode, Differential diff, Differential Revision ID)}
174 return {node: (oldnode, Differential diff, Differential Revision ID)}
162 for node in nodelist with known previous sent versions, or associated
175 for node in nodelist with known previous sent versions, or associated
163 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
176 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
164 be ``None``.
177 be ``None``.
165
178
166 Examines commit messages like "Differential Revision:" to get the
179 Examines commit messages like "Differential Revision:" to get the
167 association information.
180 association information.
168
181
169 If such commit message line is not found, examines all precursors and their
182 If such commit message line is not found, examines all precursors and their
170 tags. Tags with format like "D1234" are considered a match and the node
183 tags. Tags with format like "D1234" are considered a match and the node
171 with that tag, and the number after "D" (ex. 1234) will be returned.
184 with that tag, and the number after "D" (ex. 1234) will be returned.
172
185
173 The ``old node``, if not None, is guaranteed to be the last diff of
186 The ``old node``, if not None, is guaranteed to be the last diff of
174 corresponding Differential Revision, and exist in the repo.
187 corresponding Differential Revision, and exist in the repo.
175 """
188 """
176 url, token = readurltoken(repo)
189 url, token = readurltoken(repo)
177 unfi = repo.unfiltered()
190 unfi = repo.unfiltered()
178 nodemap = unfi.changelog.nodemap
191 nodemap = unfi.changelog.nodemap
179
192
180 result = {} # {node: (oldnode?, lastdiff?, drev)}
193 result = {} # {node: (oldnode?, lastdiff?, drev)}
181 toconfirm = {} # {node: (force, {precnode}, drev)}
194 toconfirm = {} # {node: (force, {precnode}, drev)}
182 for node in nodelist:
195 for node in nodelist:
183 ctx = unfi[node]
196 ctx = unfi[node]
184 # For tags like "D123", put them into "toconfirm" to verify later
197 # For tags like "D123", put them into "toconfirm" to verify later
185 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
198 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
186 for n in precnodes:
199 for n in precnodes:
187 if n in nodemap:
200 if n in nodemap:
188 for tag in unfi.nodetags(n):
201 for tag in unfi.nodetags(n):
189 m = _differentialrevisiontagre.match(tag)
202 m = _differentialrevisiontagre.match(tag)
190 if m:
203 if m:
191 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
204 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
192 continue
205 continue
193
206
194 # Check commit message
207 # Check commit message
195 m = _differentialrevisiondescre.search(ctx.description())
208 m = _differentialrevisiondescre.search(ctx.description())
196 if m:
209 if m:
197 toconfirm[node] = (1, set(precnodes), int(m.group(1)))
210 toconfirm[node] = (1, set(precnodes), int(m.group(1)))
198
211
199 # Double check if tags are genuine by collecting all old nodes from
212 # Double check if tags are genuine by collecting all old nodes from
200 # Phabricator, and expect precursors overlap with it.
213 # Phabricator, and expect precursors overlap with it.
201 if toconfirm:
214 if toconfirm:
202 drevs = [drev for force, precs, drev in toconfirm.values()]
215 drevs = [drev for force, precs, drev in toconfirm.values()]
203 alldiffs = callconduit(unfi, 'differential.querydiffs',
216 alldiffs = callconduit(unfi, 'differential.querydiffs',
204 {'revisionIDs': drevs})
217 {'revisionIDs': drevs})
205 getnode = lambda d: bin(encoding.unitolocal(
218 getnode = lambda d: bin(encoding.unitolocal(
206 getdiffmeta(d).get(r'node', ''))) or None
219 getdiffmeta(d).get(r'node', ''))) or None
207 for newnode, (force, precset, drev) in toconfirm.items():
220 for newnode, (force, precset, drev) in toconfirm.items():
208 diffs = [d for d in alldiffs.values()
221 diffs = [d for d in alldiffs.values()
209 if int(d[r'revisionID']) == drev]
222 if int(d[r'revisionID']) == drev]
210
223
211 # "precursors" as known by Phabricator
224 # "precursors" as known by Phabricator
212 phprecset = set(getnode(d) for d in diffs)
225 phprecset = set(getnode(d) for d in diffs)
213
226
214 # Ignore if precursors (Phabricator and local repo) do not overlap,
227 # Ignore if precursors (Phabricator and local repo) do not overlap,
215 # and force is not set (when commit message says nothing)
228 # and force is not set (when commit message says nothing)
216 if not force and not bool(phprecset & precset):
229 if not force and not bool(phprecset & precset):
217 tagname = 'D%d' % drev
230 tagname = 'D%d' % drev
218 tags.tag(repo, tagname, nullid, message=None, user=None,
231 tags.tag(repo, tagname, nullid, message=None, user=None,
219 date=None, local=True)
232 date=None, local=True)
220 unfi.ui.warn(_('D%s: local tag removed - does not match '
233 unfi.ui.warn(_('D%s: local tag removed - does not match '
221 'Differential history\n') % drev)
234 'Differential history\n') % drev)
222 continue
235 continue
223
236
224 # Find the last node using Phabricator metadata, and make sure it
237 # Find the last node using Phabricator metadata, and make sure it
225 # exists in the repo
238 # exists in the repo
226 oldnode = lastdiff = None
239 oldnode = lastdiff = None
227 if diffs:
240 if diffs:
228 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
241 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
229 oldnode = getnode(lastdiff)
242 oldnode = getnode(lastdiff)
230 if oldnode and oldnode not in nodemap:
243 if oldnode and oldnode not in nodemap:
231 oldnode = None
244 oldnode = None
232
245
233 result[newnode] = (oldnode, lastdiff, drev)
246 result[newnode] = (oldnode, lastdiff, drev)
234
247
235 return result
248 return result
236
249
237 def getdiff(ctx, diffopts):
250 def getdiff(ctx, diffopts):
238 """plain-text diff without header (user, commit message, etc)"""
251 """plain-text diff without header (user, commit message, etc)"""
239 output = util.stringio()
252 output = util.stringio()
240 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
253 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
241 None, opts=diffopts):
254 None, opts=diffopts):
242 output.write(chunk)
255 output.write(chunk)
243 return output.getvalue()
256 return output.getvalue()
244
257
245 def creatediff(ctx):
258 def creatediff(ctx):
246 """create a Differential Diff"""
259 """create a Differential Diff"""
247 repo = ctx.repo()
260 repo = ctx.repo()
248 repophid = getrepophid(repo)
261 repophid = getrepophid(repo)
249 # Create a "Differential Diff" via "differential.createrawdiff" API
262 # Create a "Differential Diff" via "differential.createrawdiff" API
250 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
263 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
251 if repophid:
264 if repophid:
252 params['repositoryPHID'] = repophid
265 params['repositoryPHID'] = repophid
253 diff = callconduit(repo, 'differential.createrawdiff', params)
266 diff = callconduit(repo, 'differential.createrawdiff', params)
254 if not diff:
267 if not diff:
255 raise error.Abort(_('cannot create diff for %s') % ctx)
268 raise error.Abort(_('cannot create diff for %s') % ctx)
256 return diff
269 return diff
257
270
258 def writediffproperties(ctx, diff):
271 def writediffproperties(ctx, diff):
259 """write metadata to diff so patches could be applied losslessly"""
272 """write metadata to diff so patches could be applied losslessly"""
260 params = {
273 params = {
261 'diff_id': diff[r'id'],
274 'diff_id': diff[r'id'],
262 'name': 'hg:meta',
275 'name': 'hg:meta',
263 'data': json.dumps({
276 'data': json.dumps({
264 'user': ctx.user(),
277 'user': ctx.user(),
265 'date': '%d %d' % ctx.date(),
278 'date': '%d %d' % ctx.date(),
266 'node': ctx.hex(),
279 'node': ctx.hex(),
267 'parent': ctx.p1().hex(),
280 'parent': ctx.p1().hex(),
268 }),
281 }),
269 }
282 }
270 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
283 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
271
284
272 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
285 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
273 olddiff=None, actions=None):
286 olddiff=None, actions=None):
274 """create or update a Differential Revision
287 """create or update a Differential Revision
275
288
276 If revid is None, create a new Differential Revision, otherwise update
289 If revid is None, create a new Differential Revision, otherwise update
277 revid. If parentrevid is not None, set it as a dependency.
290 revid. If parentrevid is not None, set it as a dependency.
278
291
279 If oldnode is not None, check if the patch content (without commit message
292 If oldnode is not None, check if the patch content (without commit message
280 and metadata) has changed before creating another diff.
293 and metadata) has changed before creating another diff.
281
294
282 If actions is not None, they will be appended to the transaction.
295 If actions is not None, they will be appended to the transaction.
283 """
296 """
284 repo = ctx.repo()
297 repo = ctx.repo()
285 if oldnode:
298 if oldnode:
286 diffopts = mdiff.diffopts(git=True, context=32767)
299 diffopts = mdiff.diffopts(git=True, context=32767)
287 oldctx = repo.unfiltered()[oldnode]
300 oldctx = repo.unfiltered()[oldnode]
288 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
301 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
289 else:
302 else:
290 neednewdiff = True
303 neednewdiff = True
291
304
292 transactions = []
305 transactions = []
293 if neednewdiff:
306 if neednewdiff:
294 diff = creatediff(ctx)
307 diff = creatediff(ctx)
295 transactions.append({'type': 'update', 'value': diff[r'phid']})
308 transactions.append({'type': 'update', 'value': diff[r'phid']})
296 else:
309 else:
297 # Even if we don't need to upload a new diff because the patch content
310 # Even if we don't need to upload a new diff because the patch content
298 # does not change. We might still need to update its metadata so
311 # does not change. We might still need to update its metadata so
299 # pushers could know the correct node metadata.
312 # pushers could know the correct node metadata.
300 assert olddiff
313 assert olddiff
301 diff = olddiff
314 diff = olddiff
302 writediffproperties(ctx, diff)
315 writediffproperties(ctx, diff)
303
316
304 # Use a temporary summary to set dependency. There might be better ways but
317 # Use a temporary summary to set dependency. There might be better ways but
305 # I cannot find them for now. But do not do that if we are updating an
318 # I cannot find them for now. But do not do that if we are updating an
306 # existing revision (revid is not None) since that introduces visible
319 # existing revision (revid is not None) since that introduces visible
307 # churns (someone edited "Summary" twice) on the web page.
320 # churns (someone edited "Summary" twice) on the web page.
308 if parentrevid and revid is None:
321 if parentrevid and revid is None:
309 summary = 'Depends on D%s' % parentrevid
322 summary = 'Depends on D%s' % parentrevid
310 transactions += [{'type': 'summary', 'value': summary},
323 transactions += [{'type': 'summary', 'value': summary},
311 {'type': 'summary', 'value': ' '}]
324 {'type': 'summary', 'value': ' '}]
312
325
313 if actions:
326 if actions:
314 transactions += actions
327 transactions += actions
315
328
316 # Parse commit message and update related fields.
329 # Parse commit message and update related fields.
317 desc = ctx.description()
330 desc = ctx.description()
318 info = callconduit(repo, 'differential.parsecommitmessage',
331 info = callconduit(repo, 'differential.parsecommitmessage',
319 {'corpus': desc})
332 {'corpus': desc})
320 for k, v in info[r'fields'].items():
333 for k, v in info[r'fields'].items():
321 if k in ['title', 'summary', 'testPlan']:
334 if k in ['title', 'summary', 'testPlan']:
322 transactions.append({'type': k, 'value': v})
335 transactions.append({'type': k, 'value': v})
323
336
324 params = {'transactions': transactions}
337 params = {'transactions': transactions}
325 if revid is not None:
338 if revid is not None:
326 # Update an existing Differential Revision
339 # Update an existing Differential Revision
327 params['objectIdentifier'] = revid
340 params['objectIdentifier'] = revid
328
341
329 revision = callconduit(repo, 'differential.revision.edit', params)
342 revision = callconduit(repo, 'differential.revision.edit', params)
330 if not revision:
343 if not revision:
331 raise error.Abort(_('cannot create revision for %s') % ctx)
344 raise error.Abort(_('cannot create revision for %s') % ctx)
332
345
333 return revision, diff
346 return revision, diff
334
347
335 def userphids(repo, names):
348 def userphids(repo, names):
336 """convert user names to PHIDs"""
349 """convert user names to PHIDs"""
337 query = {'constraints': {'usernames': names}}
350 query = {'constraints': {'usernames': names}}
338 result = callconduit(repo, 'user.search', query)
351 result = callconduit(repo, 'user.search', query)
339 # username not found is not an error of the API. So check if we have missed
352 # username not found is not an error of the API. So check if we have missed
340 # some names here.
353 # some names here.
341 data = result[r'data']
354 data = result[r'data']
342 resolved = set(entry[r'fields'][r'username'] for entry in data)
355 resolved = set(entry[r'fields'][r'username'] for entry in data)
343 unresolved = set(names) - resolved
356 unresolved = set(names) - resolved
344 if unresolved:
357 if unresolved:
345 raise error.Abort(_('unknown username: %s')
358 raise error.Abort(_('unknown username: %s')
346 % ' '.join(sorted(unresolved)))
359 % ' '.join(sorted(unresolved)))
347 return [entry[r'phid'] for entry in data]
360 return [entry[r'phid'] for entry in data]
348
361
349 @command('phabsend',
362 @command('phabsend',
350 [('r', 'rev', [], _('revisions to send'), _('REV')),
363 [('r', 'rev', [], _('revisions to send'), _('REV')),
351 ('', 'amend', True, _('update commit messages')),
364 ('', 'amend', True, _('update commit messages')),
352 ('', 'reviewer', [], _('specify reviewers')),
365 ('', 'reviewer', [], _('specify reviewers')),
353 ('', 'confirm', None, _('ask for confirmation before sending'))],
366 ('', 'confirm', None, _('ask for confirmation before sending'))],
354 _('REV [OPTIONS]'))
367 _('REV [OPTIONS]'))
355 def phabsend(ui, repo, *revs, **opts):
368 def phabsend(ui, repo, *revs, **opts):
356 """upload changesets to Phabricator
369 """upload changesets to Phabricator
357
370
358 If there are multiple revisions specified, they will be send as a stack
371 If there are multiple revisions specified, they will be send as a stack
359 with a linear dependencies relationship using the order specified by the
372 with a linear dependencies relationship using the order specified by the
360 revset.
373 revset.
361
374
362 For the first time uploading changesets, local tags will be created to
375 For the first time uploading changesets, local tags will be created to
363 maintain the association. After the first time, phabsend will check
376 maintain the association. After the first time, phabsend will check
364 obsstore and tags information so it can figure out whether to update an
377 obsstore and tags information so it can figure out whether to update an
365 existing Differential Revision, or create a new one.
378 existing Differential Revision, or create a new one.
366
379
367 If --amend is set, update commit messages so they have the
380 If --amend is set, update commit messages so they have the
368 ``Differential Revision`` URL, remove related tags. This is similar to what
381 ``Differential Revision`` URL, remove related tags. This is similar to what
369 arcanist will do, and is more desired in author-push workflows. Otherwise,
382 arcanist will do, and is more desired in author-push workflows. Otherwise,
370 use local tags to record the ``Differential Revision`` association.
383 use local tags to record the ``Differential Revision`` association.
371
384
372 The --confirm option lets you confirm changesets before sending them. You
385 The --confirm option lets you confirm changesets before sending them. You
373 can also add following to your configuration file to make it default
386 can also add following to your configuration file to make it default
374 behaviour::
387 behaviour::
375
388
376 [phabsend]
389 [phabsend]
377 confirm = true
390 confirm = true
378
391
379 phabsend will check obsstore and the above association to decide whether to
392 phabsend will check obsstore and the above association to decide whether to
380 update an existing Differential Revision, or create a new one.
393 update an existing Differential Revision, or create a new one.
381 """
394 """
382 revs = list(revs) + opts.get('rev', [])
395 revs = list(revs) + opts.get('rev', [])
383 revs = scmutil.revrange(repo, revs)
396 revs = scmutil.revrange(repo, revs)
384
397
385 if not revs:
398 if not revs:
386 raise error.Abort(_('phabsend requires at least one changeset'))
399 raise error.Abort(_('phabsend requires at least one changeset'))
387 if opts.get('amend'):
400 if opts.get('amend'):
388 cmdutil.checkunfinished(repo)
401 cmdutil.checkunfinished(repo)
389
402
390 # {newnode: (oldnode, olddiff, olddrev}
403 # {newnode: (oldnode, olddiff, olddrev}
391 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
404 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
392
405
393 confirm = ui.configbool('phabsend', 'confirm')
406 confirm = ui.configbool('phabsend', 'confirm')
394 confirm |= bool(opts.get('confirm'))
407 confirm |= bool(opts.get('confirm'))
395 if confirm:
408 if confirm:
396 confirmed = _confirmbeforesend(repo, revs, oldmap)
409 confirmed = _confirmbeforesend(repo, revs, oldmap)
397 if not confirmed:
410 if not confirmed:
398 raise error.Abort(_('phabsend cancelled'))
411 raise error.Abort(_('phabsend cancelled'))
399
412
400 actions = []
413 actions = []
401 reviewers = opts.get('reviewer', [])
414 reviewers = opts.get('reviewer', [])
402 if reviewers:
415 if reviewers:
403 phids = userphids(repo, reviewers)
416 phids = userphids(repo, reviewers)
404 actions.append({'type': 'reviewers.add', 'value': phids})
417 actions.append({'type': 'reviewers.add', 'value': phids})
405
418
406 drevids = [] # [int]
419 drevids = [] # [int]
407 diffmap = {} # {newnode: diff}
420 diffmap = {} # {newnode: diff}
408
421
409 # Send patches one by one so we know their Differential Revision IDs and
422 # Send patches one by one so we know their Differential Revision IDs and
410 # can provide dependency relationship
423 # can provide dependency relationship
411 lastrevid = None
424 lastrevid = None
412 for rev in revs:
425 for rev in revs:
413 ui.debug('sending rev %d\n' % rev)
426 ui.debug('sending rev %d\n' % rev)
414 ctx = repo[rev]
427 ctx = repo[rev]
415
428
416 # Get Differential Revision ID
429 # Get Differential Revision ID
417 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
430 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
418 if oldnode != ctx.node() or opts.get('amend'):
431 if oldnode != ctx.node() or opts.get('amend'):
419 # Create or update Differential Revision
432 # Create or update Differential Revision
420 revision, diff = createdifferentialrevision(
433 revision, diff = createdifferentialrevision(
421 ctx, revid, lastrevid, oldnode, olddiff, actions)
434 ctx, revid, lastrevid, oldnode, olddiff, actions)
422 diffmap[ctx.node()] = diff
435 diffmap[ctx.node()] = diff
423 newrevid = int(revision[r'object'][r'id'])
436 newrevid = int(revision[r'object'][r'id'])
424 if revid:
437 if revid:
425 action = 'updated'
438 action = 'updated'
426 else:
439 else:
427 action = 'created'
440 action = 'created'
428
441
429 # Create a local tag to note the association, if commit message
442 # Create a local tag to note the association, if commit message
430 # does not have it already
443 # does not have it already
431 m = _differentialrevisiondescre.search(ctx.description())
444 m = _differentialrevisiondescre.search(ctx.description())
432 if not m or int(m.group(1)) != newrevid:
445 if not m or int(m.group(1)) != newrevid:
433 tagname = 'D%d' % newrevid
446 tagname = 'D%d' % newrevid
434 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
447 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
435 date=None, local=True)
448 date=None, local=True)
436 else:
449 else:
437 # Nothing changed. But still set "newrevid" so the next revision
450 # Nothing changed. But still set "newrevid" so the next revision
438 # could depend on this one.
451 # could depend on this one.
439 newrevid = revid
452 newrevid = revid
440 action = 'skipped'
453 action = 'skipped'
441
454
442 actiondesc = ui.label(
455 actiondesc = ui.label(
443 {'created': _('created'),
456 {'created': _('created'),
444 'skipped': _('skipped'),
457 'skipped': _('skipped'),
445 'updated': _('updated')}[action],
458 'updated': _('updated')}[action],
446 'phabricator.action.%s' % action)
459 'phabricator.action.%s' % action)
447 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
460 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
448 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
461 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
449 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
462 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
450 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
463 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
451 desc))
464 desc))
452 drevids.append(newrevid)
465 drevids.append(newrevid)
453 lastrevid = newrevid
466 lastrevid = newrevid
454
467
455 # Update commit messages and remove tags
468 # Update commit messages and remove tags
456 if opts.get('amend'):
469 if opts.get('amend'):
457 unfi = repo.unfiltered()
470 unfi = repo.unfiltered()
458 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
471 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
459 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
472 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
460 wnode = unfi['.'].node()
473 wnode = unfi['.'].node()
461 mapping = {} # {oldnode: [newnode]}
474 mapping = {} # {oldnode: [newnode]}
462 for i, rev in enumerate(revs):
475 for i, rev in enumerate(revs):
463 old = unfi[rev]
476 old = unfi[rev]
464 drevid = drevids[i]
477 drevid = drevids[i]
465 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
478 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
466 newdesc = getdescfromdrev(drev)
479 newdesc = getdescfromdrev(drev)
467 # Make sure commit message contain "Differential Revision"
480 # Make sure commit message contain "Differential Revision"
468 if old.description() != newdesc:
481 if old.description() != newdesc:
469 parents = [
482 parents = [
470 mapping.get(old.p1().node(), (old.p1(),))[0],
483 mapping.get(old.p1().node(), (old.p1(),))[0],
471 mapping.get(old.p2().node(), (old.p2(),))[0],
484 mapping.get(old.p2().node(), (old.p2(),))[0],
472 ]
485 ]
473 new = context.metadataonlyctx(
486 new = context.metadataonlyctx(
474 repo, old, parents=parents, text=newdesc,
487 repo, old, parents=parents, text=newdesc,
475 user=old.user(), date=old.date(), extra=old.extra())
488 user=old.user(), date=old.date(), extra=old.extra())
476 newnode = new.commit()
489 newnode = new.commit()
477 mapping[old.node()] = [newnode]
490 mapping[old.node()] = [newnode]
478 # Update diff property
491 # Update diff property
479 writediffproperties(unfi[newnode], diffmap[old.node()])
492 writediffproperties(unfi[newnode], diffmap[old.node()])
480 # Remove local tags since it's no longer necessary
493 # Remove local tags since it's no longer necessary
481 tagname = 'D%d' % drevid
494 tagname = 'D%d' % drevid
482 if tagname in repo.tags():
495 if tagname in repo.tags():
483 tags.tag(repo, tagname, nullid, message=None, user=None,
496 tags.tag(repo, tagname, nullid, message=None, user=None,
484 date=None, local=True)
497 date=None, local=True)
485 scmutil.cleanupnodes(repo, mapping, 'phabsend')
498 scmutil.cleanupnodes(repo, mapping, 'phabsend')
486 if wnode in mapping:
499 if wnode in mapping:
487 unfi.setparents(mapping[wnode][0])
500 unfi.setparents(mapping[wnode][0])
488
501
489 # Map from "hg:meta" keys to header understood by "hg import". The order is
502 # Map from "hg:meta" keys to header understood by "hg import". The order is
490 # consistent with "hg export" output.
503 # consistent with "hg export" output.
491 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
504 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
492 (r'node', 'Node ID'), (r'parent', 'Parent ')])
505 (r'node', 'Node ID'), (r'parent', 'Parent ')])
493
506
494 def _confirmbeforesend(repo, revs, oldmap):
507 def _confirmbeforesend(repo, revs, oldmap):
495 url, token = readurltoken(repo)
508 url, token = readurltoken(repo)
496 ui = repo.ui
509 ui = repo.ui
497 for rev in revs:
510 for rev in revs:
498 ctx = repo[rev]
511 ctx = repo[rev]
499 desc = ctx.description().splitlines()[0]
512 desc = ctx.description().splitlines()[0]
500 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
513 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
501 if drevid:
514 if drevid:
502 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
515 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
503 else:
516 else:
504 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
517 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
505
518
506 ui.write(_('%s - %s: %s\n') % (drevdesc,
519 ui.write(_('%s - %s: %s\n') % (drevdesc,
507 ui.label(bytes(ctx), 'phabricator.node'),
520 ui.label(bytes(ctx), 'phabricator.node'),
508 ui.label(desc, 'phabricator.desc')))
521 ui.label(desc, 'phabricator.desc')))
509
522
510 if ui.promptchoice(_('Send the above changes to %s (yn)?'
523 if ui.promptchoice(_('Send the above changes to %s (yn)?'
511 '$$ &Yes $$ &No') % url):
524 '$$ &Yes $$ &No') % url):
512 return False
525 return False
513
526
514 return True
527 return True
515
528
516 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
529 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
517 'abandoned'}
530 'abandoned'}
518
531
519 def _getstatusname(drev):
532 def _getstatusname(drev):
520 """get normalized status name from a Differential Revision"""
533 """get normalized status name from a Differential Revision"""
521 return drev[r'statusName'].replace(' ', '').lower()
534 return drev[r'statusName'].replace(' ', '').lower()
522
535
523 # Small language to specify differential revisions. Support symbols: (), :X,
536 # Small language to specify differential revisions. Support symbols: (), :X,
524 # +, and -.
537 # +, and -.
525
538
526 _elements = {
539 _elements = {
527 # token-type: binding-strength, primary, prefix, infix, suffix
540 # token-type: binding-strength, primary, prefix, infix, suffix
528 '(': (12, None, ('group', 1, ')'), None, None),
541 '(': (12, None, ('group', 1, ')'), None, None),
529 ':': (8, None, ('ancestors', 8), None, None),
542 ':': (8, None, ('ancestors', 8), None, None),
530 '&': (5, None, None, ('and_', 5), None),
543 '&': (5, None, None, ('and_', 5), None),
531 '+': (4, None, None, ('add', 4), None),
544 '+': (4, None, None, ('add', 4), None),
532 '-': (4, None, None, ('sub', 4), None),
545 '-': (4, None, None, ('sub', 4), None),
533 ')': (0, None, None, None, None),
546 ')': (0, None, None, None, None),
534 'symbol': (0, 'symbol', None, None, None),
547 'symbol': (0, 'symbol', None, None, None),
535 'end': (0, None, None, None, None),
548 'end': (0, None, None, None, None),
536 }
549 }
537
550
538 def _tokenize(text):
551 def _tokenize(text):
539 view = memoryview(text) # zero-copy slice
552 view = memoryview(text) # zero-copy slice
540 special = '():+-& '
553 special = '():+-& '
541 pos = 0
554 pos = 0
542 length = len(text)
555 length = len(text)
543 while pos < length:
556 while pos < length:
544 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
557 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
545 view[pos:]))
558 view[pos:]))
546 if symbol:
559 if symbol:
547 yield ('symbol', symbol, pos)
560 yield ('symbol', symbol, pos)
548 pos += len(symbol)
561 pos += len(symbol)
549 else: # special char, ignore space
562 else: # special char, ignore space
550 if text[pos] != ' ':
563 if text[pos] != ' ':
551 yield (text[pos], None, pos)
564 yield (text[pos], None, pos)
552 pos += 1
565 pos += 1
553 yield ('end', None, pos)
566 yield ('end', None, pos)
554
567
555 def _parse(text):
568 def _parse(text):
556 tree, pos = parser.parser(_elements).parse(_tokenize(text))
569 tree, pos = parser.parser(_elements).parse(_tokenize(text))
557 if pos != len(text):
570 if pos != len(text):
558 raise error.ParseError('invalid token', pos)
571 raise error.ParseError('invalid token', pos)
559 return tree
572 return tree
560
573
561 def _parsedrev(symbol):
574 def _parsedrev(symbol):
562 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
575 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
563 if symbol.startswith('D') and symbol[1:].isdigit():
576 if symbol.startswith('D') and symbol[1:].isdigit():
564 return int(symbol[1:])
577 return int(symbol[1:])
565 if symbol.isdigit():
578 if symbol.isdigit():
566 return int(symbol)
579 return int(symbol)
567
580
568 def _prefetchdrevs(tree):
581 def _prefetchdrevs(tree):
569 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
582 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
570 drevs = set()
583 drevs = set()
571 ancestordrevs = set()
584 ancestordrevs = set()
572 op = tree[0]
585 op = tree[0]
573 if op == 'symbol':
586 if op == 'symbol':
574 r = _parsedrev(tree[1])
587 r = _parsedrev(tree[1])
575 if r:
588 if r:
576 drevs.add(r)
589 drevs.add(r)
577 elif op == 'ancestors':
590 elif op == 'ancestors':
578 r, a = _prefetchdrevs(tree[1])
591 r, a = _prefetchdrevs(tree[1])
579 drevs.update(r)
592 drevs.update(r)
580 ancestordrevs.update(r)
593 ancestordrevs.update(r)
581 ancestordrevs.update(a)
594 ancestordrevs.update(a)
582 else:
595 else:
583 for t in tree[1:]:
596 for t in tree[1:]:
584 r, a = _prefetchdrevs(t)
597 r, a = _prefetchdrevs(t)
585 drevs.update(r)
598 drevs.update(r)
586 ancestordrevs.update(a)
599 ancestordrevs.update(a)
587 return drevs, ancestordrevs
600 return drevs, ancestordrevs
588
601
589 def querydrev(repo, spec):
602 def querydrev(repo, spec):
590 """return a list of "Differential Revision" dicts
603 """return a list of "Differential Revision" dicts
591
604
592 spec is a string using a simple query language, see docstring in phabread
605 spec is a string using a simple query language, see docstring in phabread
593 for details.
606 for details.
594
607
595 A "Differential Revision dict" looks like:
608 A "Differential Revision dict" looks like:
596
609
597 {
610 {
598 "id": "2",
611 "id": "2",
599 "phid": "PHID-DREV-672qvysjcczopag46qty",
612 "phid": "PHID-DREV-672qvysjcczopag46qty",
600 "title": "example",
613 "title": "example",
601 "uri": "https://phab.example.com/D2",
614 "uri": "https://phab.example.com/D2",
602 "dateCreated": "1499181406",
615 "dateCreated": "1499181406",
603 "dateModified": "1499182103",
616 "dateModified": "1499182103",
604 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
617 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
605 "status": "0",
618 "status": "0",
606 "statusName": "Needs Review",
619 "statusName": "Needs Review",
607 "properties": [],
620 "properties": [],
608 "branch": null,
621 "branch": null,
609 "summary": "",
622 "summary": "",
610 "testPlan": "",
623 "testPlan": "",
611 "lineCount": "2",
624 "lineCount": "2",
612 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
625 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
613 "diffs": [
626 "diffs": [
614 "3",
627 "3",
615 "4",
628 "4",
616 ],
629 ],
617 "commits": [],
630 "commits": [],
618 "reviewers": [],
631 "reviewers": [],
619 "ccs": [],
632 "ccs": [],
620 "hashes": [],
633 "hashes": [],
621 "auxiliary": {
634 "auxiliary": {
622 "phabricator:projects": [],
635 "phabricator:projects": [],
623 "phabricator:depends-on": [
636 "phabricator:depends-on": [
624 "PHID-DREV-gbapp366kutjebt7agcd"
637 "PHID-DREV-gbapp366kutjebt7agcd"
625 ]
638 ]
626 },
639 },
627 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
640 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
628 "sourcePath": null
641 "sourcePath": null
629 }
642 }
630 """
643 """
631 def fetch(params):
644 def fetch(params):
632 """params -> single drev or None"""
645 """params -> single drev or None"""
633 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
646 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
634 if key in prefetched:
647 if key in prefetched:
635 return prefetched[key]
648 return prefetched[key]
636 drevs = callconduit(repo, 'differential.query', params)
649 drevs = callconduit(repo, 'differential.query', params)
637 # Fill prefetched with the result
650 # Fill prefetched with the result
638 for drev in drevs:
651 for drev in drevs:
639 prefetched[drev[r'phid']] = drev
652 prefetched[drev[r'phid']] = drev
640 prefetched[int(drev[r'id'])] = drev
653 prefetched[int(drev[r'id'])] = drev
641 if key not in prefetched:
654 if key not in prefetched:
642 raise error.Abort(_('cannot get Differential Revision %r') % params)
655 raise error.Abort(_('cannot get Differential Revision %r') % params)
643 return prefetched[key]
656 return prefetched[key]
644
657
645 def getstack(topdrevids):
658 def getstack(topdrevids):
646 """given a top, get a stack from the bottom, [id] -> [id]"""
659 """given a top, get a stack from the bottom, [id] -> [id]"""
647 visited = set()
660 visited = set()
648 result = []
661 result = []
649 queue = [{r'ids': [i]} for i in topdrevids]
662 queue = [{r'ids': [i]} for i in topdrevids]
650 while queue:
663 while queue:
651 params = queue.pop()
664 params = queue.pop()
652 drev = fetch(params)
665 drev = fetch(params)
653 if drev[r'id'] in visited:
666 if drev[r'id'] in visited:
654 continue
667 continue
655 visited.add(drev[r'id'])
668 visited.add(drev[r'id'])
656 result.append(int(drev[r'id']))
669 result.append(int(drev[r'id']))
657 auxiliary = drev.get(r'auxiliary', {})
670 auxiliary = drev.get(r'auxiliary', {})
658 depends = auxiliary.get(r'phabricator:depends-on', [])
671 depends = auxiliary.get(r'phabricator:depends-on', [])
659 for phid in depends:
672 for phid in depends:
660 queue.append({'phids': [phid]})
673 queue.append({'phids': [phid]})
661 result.reverse()
674 result.reverse()
662 return smartset.baseset(result)
675 return smartset.baseset(result)
663
676
664 # Initialize prefetch cache
677 # Initialize prefetch cache
665 prefetched = {} # {id or phid: drev}
678 prefetched = {} # {id or phid: drev}
666
679
667 tree = _parse(spec)
680 tree = _parse(spec)
668 drevs, ancestordrevs = _prefetchdrevs(tree)
681 drevs, ancestordrevs = _prefetchdrevs(tree)
669
682
670 # developer config: phabricator.batchsize
683 # developer config: phabricator.batchsize
671 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
684 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
672
685
673 # Prefetch Differential Revisions in batch
686 # Prefetch Differential Revisions in batch
674 tofetch = set(drevs)
687 tofetch = set(drevs)
675 for r in ancestordrevs:
688 for r in ancestordrevs:
676 tofetch.update(range(max(1, r - batchsize), r + 1))
689 tofetch.update(range(max(1, r - batchsize), r + 1))
677 if drevs:
690 if drevs:
678 fetch({r'ids': list(tofetch)})
691 fetch({r'ids': list(tofetch)})
679 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
692 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
680
693
681 # Walk through the tree, return smartsets
694 # Walk through the tree, return smartsets
682 def walk(tree):
695 def walk(tree):
683 op = tree[0]
696 op = tree[0]
684 if op == 'symbol':
697 if op == 'symbol':
685 drev = _parsedrev(tree[1])
698 drev = _parsedrev(tree[1])
686 if drev:
699 if drev:
687 return smartset.baseset([drev])
700 return smartset.baseset([drev])
688 elif tree[1] in _knownstatusnames:
701 elif tree[1] in _knownstatusnames:
689 drevs = [r for r in validids
702 drevs = [r for r in validids
690 if _getstatusname(prefetched[r]) == tree[1]]
703 if _getstatusname(prefetched[r]) == tree[1]]
691 return smartset.baseset(drevs)
704 return smartset.baseset(drevs)
692 else:
705 else:
693 raise error.Abort(_('unknown symbol: %s') % tree[1])
706 raise error.Abort(_('unknown symbol: %s') % tree[1])
694 elif op in {'and_', 'add', 'sub'}:
707 elif op in {'and_', 'add', 'sub'}:
695 assert len(tree) == 3
708 assert len(tree) == 3
696 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
709 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
697 elif op == 'group':
710 elif op == 'group':
698 return walk(tree[1])
711 return walk(tree[1])
699 elif op == 'ancestors':
712 elif op == 'ancestors':
700 return getstack(walk(tree[1]))
713 return getstack(walk(tree[1]))
701 else:
714 else:
702 raise error.ProgrammingError('illegal tree: %r' % tree)
715 raise error.ProgrammingError('illegal tree: %r' % tree)
703
716
704 return [prefetched[r] for r in walk(tree)]
717 return [prefetched[r] for r in walk(tree)]
705
718
706 def getdescfromdrev(drev):
719 def getdescfromdrev(drev):
707 """get description (commit message) from "Differential Revision"
720 """get description (commit message) from "Differential Revision"
708
721
709 This is similar to differential.getcommitmessage API. But we only care
722 This is similar to differential.getcommitmessage API. But we only care
710 about limited fields: title, summary, test plan, and URL.
723 about limited fields: title, summary, test plan, and URL.
711 """
724 """
712 title = drev[r'title']
725 title = drev[r'title']
713 summary = drev[r'summary'].rstrip()
726 summary = drev[r'summary'].rstrip()
714 testplan = drev[r'testPlan'].rstrip()
727 testplan = drev[r'testPlan'].rstrip()
715 if testplan:
728 if testplan:
716 testplan = 'Test Plan:\n%s' % testplan
729 testplan = 'Test Plan:\n%s' % testplan
717 uri = 'Differential Revision: %s' % drev[r'uri']
730 uri = 'Differential Revision: %s' % drev[r'uri']
718 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
731 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
719
732
720 def getdiffmeta(diff):
733 def getdiffmeta(diff):
721 """get commit metadata (date, node, user, p1) from a diff object
734 """get commit metadata (date, node, user, p1) from a diff object
722
735
723 The metadata could be "hg:meta", sent by phabsend, like:
736 The metadata could be "hg:meta", sent by phabsend, like:
724
737
725 "properties": {
738 "properties": {
726 "hg:meta": {
739 "hg:meta": {
727 "date": "1499571514 25200",
740 "date": "1499571514 25200",
728 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
741 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
729 "user": "Foo Bar <foo@example.com>",
742 "user": "Foo Bar <foo@example.com>",
730 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
743 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
731 }
744 }
732 }
745 }
733
746
734 Or converted from "local:commits", sent by "arc", like:
747 Or converted from "local:commits", sent by "arc", like:
735
748
736 "properties": {
749 "properties": {
737 "local:commits": {
750 "local:commits": {
738 "98c08acae292b2faf60a279b4189beb6cff1414d": {
751 "98c08acae292b2faf60a279b4189beb6cff1414d": {
739 "author": "Foo Bar",
752 "author": "Foo Bar",
740 "time": 1499546314,
753 "time": 1499546314,
741 "branch": "default",
754 "branch": "default",
742 "tag": "",
755 "tag": "",
743 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
756 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
744 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
757 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
745 "local": "1000",
758 "local": "1000",
746 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
759 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
747 "summary": "...",
760 "summary": "...",
748 "message": "...",
761 "message": "...",
749 "authorEmail": "foo@example.com"
762 "authorEmail": "foo@example.com"
750 }
763 }
751 }
764 }
752 }
765 }
753
766
754 Note: metadata extracted from "local:commits" will lose time zone
767 Note: metadata extracted from "local:commits" will lose time zone
755 information.
768 information.
756 """
769 """
757 props = diff.get(r'properties') or {}
770 props = diff.get(r'properties') or {}
758 meta = props.get(r'hg:meta')
771 meta = props.get(r'hg:meta')
759 if not meta and props.get(r'local:commits'):
772 if not meta and props.get(r'local:commits'):
760 commit = sorted(props[r'local:commits'].values())[0]
773 commit = sorted(props[r'local:commits'].values())[0]
761 meta = {
774 meta = {
762 r'date': r'%d 0' % commit[r'time'],
775 r'date': r'%d 0' % commit[r'time'],
763 r'node': commit[r'rev'],
776 r'node': commit[r'rev'],
764 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
777 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
765 }
778 }
766 if len(commit.get(r'parents', ())) >= 1:
779 if len(commit.get(r'parents', ())) >= 1:
767 meta[r'parent'] = commit[r'parents'][0]
780 meta[r'parent'] = commit[r'parents'][0]
768 return meta or {}
781 return meta or {}
769
782
770 def readpatch(repo, drevs, write):
783 def readpatch(repo, drevs, write):
771 """generate plain-text patch readable by 'hg import'
784 """generate plain-text patch readable by 'hg import'
772
785
773 write is usually ui.write. drevs is what "querydrev" returns, results of
786 write is usually ui.write. drevs is what "querydrev" returns, results of
774 "differential.query".
787 "differential.query".
775 """
788 """
776 # Prefetch hg:meta property for all diffs
789 # Prefetch hg:meta property for all diffs
777 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
790 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
778 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
791 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
779
792
780 # Generate patch for each drev
793 # Generate patch for each drev
781 for drev in drevs:
794 for drev in drevs:
782 repo.ui.note(_('reading D%s\n') % drev[r'id'])
795 repo.ui.note(_('reading D%s\n') % drev[r'id'])
783
796
784 diffid = max(int(v) for v in drev[r'diffs'])
797 diffid = max(int(v) for v in drev[r'diffs'])
785 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
798 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
786 desc = getdescfromdrev(drev)
799 desc = getdescfromdrev(drev)
787 header = '# HG changeset patch\n'
800 header = '# HG changeset patch\n'
788
801
789 # Try to preserve metadata from hg:meta property. Write hg patch
802 # Try to preserve metadata from hg:meta property. Write hg patch
790 # headers that can be read by the "import" command. See patchheadermap
803 # headers that can be read by the "import" command. See patchheadermap
791 # and extract in mercurial/patch.py for supported headers.
804 # and extract in mercurial/patch.py for supported headers.
792 meta = getdiffmeta(diffs[str(diffid)])
805 meta = getdiffmeta(diffs[str(diffid)])
793 for k in _metanamemap.keys():
806 for k in _metanamemap.keys():
794 if k in meta:
807 if k in meta:
795 header += '# %s %s\n' % (_metanamemap[k], meta[k])
808 header += '# %s %s\n' % (_metanamemap[k], meta[k])
796
809
797 content = '%s%s\n%s' % (header, desc, body)
810 content = '%s%s\n%s' % (header, desc, body)
798 write(encoding.unitolocal(content))
811 write(encoding.unitolocal(content))
799
812
800 @command('phabread',
813 @command('phabread',
801 [('', 'stack', False, _('read dependencies'))],
814 [('', 'stack', False, _('read dependencies'))],
802 _('DREVSPEC [OPTIONS]'))
815 _('DREVSPEC [OPTIONS]'))
803 def phabread(ui, repo, spec, **opts):
816 def phabread(ui, repo, spec, **opts):
804 """print patches from Phabricator suitable for importing
817 """print patches from Phabricator suitable for importing
805
818
806 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
819 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
807 the number ``123``. It could also have common operators like ``+``, ``-``,
820 the number ``123``. It could also have common operators like ``+``, ``-``,
808 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
821 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
809 select a stack.
822 select a stack.
810
823
811 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
824 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
812 could be used to filter patches by status. For performance reason, they
825 could be used to filter patches by status. For performance reason, they
813 only represent a subset of non-status selections and cannot be used alone.
826 only represent a subset of non-status selections and cannot be used alone.
814
827
815 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
828 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
816 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
829 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
817 stack up to D9.
830 stack up to D9.
818
831
819 If --stack is given, follow dependencies information and read all patches.
832 If --stack is given, follow dependencies information and read all patches.
820 It is equivalent to the ``:`` operator.
833 It is equivalent to the ``:`` operator.
821 """
834 """
822 if opts.get('stack'):
835 if opts.get('stack'):
823 spec = ':(%s)' % spec
836 spec = ':(%s)' % spec
824 drevs = querydrev(repo, spec)
837 drevs = querydrev(repo, spec)
825 readpatch(repo, drevs, ui.write)
838 readpatch(repo, drevs, ui.write)
826
839
827 @command('phabupdate',
840 @command('phabupdate',
828 [('', 'accept', False, _('accept revisions')),
841 [('', 'accept', False, _('accept revisions')),
829 ('', 'reject', False, _('reject revisions')),
842 ('', 'reject', False, _('reject revisions')),
830 ('', 'abandon', False, _('abandon revisions')),
843 ('', 'abandon', False, _('abandon revisions')),
831 ('', 'reclaim', False, _('reclaim revisions')),
844 ('', 'reclaim', False, _('reclaim revisions')),
832 ('m', 'comment', '', _('comment on the last revision')),
845 ('m', 'comment', '', _('comment on the last revision')),
833 ], _('DREVSPEC [OPTIONS]'))
846 ], _('DREVSPEC [OPTIONS]'))
834 def phabupdate(ui, repo, spec, **opts):
847 def phabupdate(ui, repo, spec, **opts):
835 """update Differential Revision in batch
848 """update Differential Revision in batch
836
849
837 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
850 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
838 """
851 """
839 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
852 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
840 if len(flags) > 1:
853 if len(flags) > 1:
841 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
854 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
842
855
843 actions = []
856 actions = []
844 for f in flags:
857 for f in flags:
845 actions.append({'type': f, 'value': 'true'})
858 actions.append({'type': f, 'value': 'true'})
846
859
847 drevs = querydrev(repo, spec)
860 drevs = querydrev(repo, spec)
848 for i, drev in enumerate(drevs):
861 for i, drev in enumerate(drevs):
849 if i + 1 == len(drevs) and opts.get('comment'):
862 if i + 1 == len(drevs) and opts.get('comment'):
850 actions.append({'type': 'comment', 'value': opts['comment']})
863 actions.append({'type': 'comment', 'value': opts['comment']})
851 if actions:
864 if actions:
852 params = {'objectIdentifier': drev[r'phid'],
865 params = {'objectIdentifier': drev[r'phid'],
853 'transactions': actions}
866 'transactions': actions}
854 callconduit(repo, 'differential.revision.edit', params)
867 callconduit(repo, 'differential.revision.edit', params)
General Comments 0
You need to be logged in to leave comments. Login now