##// END OF EJS Templates
phabsend: make --amend the default...
Jun Wu -
r33977:07ffff84 default
parent child Browse files
Show More
@@ -1,829 +1,829
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration
7 """simple Phabricator integration
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator without amending commit messages, and a ``phabread``
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 command which prints a stack of revisions in a format suitable
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 for :hg:`import`, and a ``phabupdate`` command to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # API token. Get it from https://$HOST/conduit/login/
24 # API token. Get it from https://$HOST/conduit/login/
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
25 token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
26
26
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
27 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # callsign is "FOO".
28 # callsign is "FOO".
29 callsign = FOO
29 callsign = FOO
30
30
31 """
31 """
32
32
33 from __future__ import absolute_import
33 from __future__ import absolute_import
34
34
35 import itertools
35 import itertools
36 import json
36 import json
37 import operator
37 import operator
38 import re
38 import re
39
39
40 from mercurial.node import bin, nullid
40 from mercurial.node import bin, nullid
41 from mercurial.i18n import _
41 from mercurial.i18n import _
42 from mercurial import (
42 from mercurial import (
43 cmdutil,
43 cmdutil,
44 context,
44 context,
45 encoding,
45 encoding,
46 error,
46 error,
47 mdiff,
47 mdiff,
48 obsutil,
48 obsutil,
49 parser,
49 parser,
50 patch,
50 patch,
51 registrar,
51 registrar,
52 scmutil,
52 scmutil,
53 smartset,
53 smartset,
54 tags,
54 tags,
55 url as urlmod,
55 url as urlmod,
56 util,
56 util,
57 )
57 )
58
58
59 cmdtable = {}
59 cmdtable = {}
60 command = registrar.command(cmdtable)
60 command = registrar.command(cmdtable)
61
61
62 def urlencodenested(params):
62 def urlencodenested(params):
63 """like urlencode, but works with nested parameters.
63 """like urlencode, but works with nested parameters.
64
64
65 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
65 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
66 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
66 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
67 urlencode. Note: the encoding is consistent with PHP's http_build_query.
67 urlencode. Note: the encoding is consistent with PHP's http_build_query.
68 """
68 """
69 flatparams = util.sortdict()
69 flatparams = util.sortdict()
70 def process(prefix, obj):
70 def process(prefix, obj):
71 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
71 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
72 if items is None:
72 if items is None:
73 flatparams[prefix] = obj
73 flatparams[prefix] = obj
74 else:
74 else:
75 for k, v in items(obj):
75 for k, v in items(obj):
76 if prefix:
76 if prefix:
77 process('%s[%s]' % (prefix, k), v)
77 process('%s[%s]' % (prefix, k), v)
78 else:
78 else:
79 process(k, v)
79 process(k, v)
80 process('', params)
80 process('', params)
81 return util.urlreq.urlencode(flatparams)
81 return util.urlreq.urlencode(flatparams)
82
82
83 def readurltoken(repo):
83 def readurltoken(repo):
84 """return conduit url, token and make sure they exist
84 """return conduit url, token and make sure they exist
85
85
86 Currently read from [phabricator] config section. In the future, it might
86 Currently read from [phabricator] config section. In the future, it might
87 make sense to read from .arcconfig and .arcrc as well.
87 make sense to read from .arcconfig and .arcrc as well.
88 """
88 """
89 values = []
89 values = []
90 section = 'phabricator'
90 section = 'phabricator'
91 for name in ['url', 'token']:
91 for name in ['url', 'token']:
92 value = repo.ui.config(section, name)
92 value = repo.ui.config(section, name)
93 if not value:
93 if not value:
94 raise error.Abort(_('config %s.%s is required') % (section, name))
94 raise error.Abort(_('config %s.%s is required') % (section, name))
95 values.append(value)
95 values.append(value)
96 return values
96 return values
97
97
98 def callconduit(repo, name, params):
98 def callconduit(repo, name, params):
99 """call Conduit API, params is a dict. return json.loads result, or None"""
99 """call Conduit API, params is a dict. return json.loads result, or None"""
100 host, token = readurltoken(repo)
100 host, token = readurltoken(repo)
101 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
101 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
102 urlopener = urlmod.opener(repo.ui, authinfo)
102 urlopener = urlmod.opener(repo.ui, authinfo)
103 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
103 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
104 params = params.copy()
104 params = params.copy()
105 params['api.token'] = token
105 params['api.token'] = token
106 request = util.urlreq.request(url, data=urlencodenested(params))
106 request = util.urlreq.request(url, data=urlencodenested(params))
107 body = urlopener.open(request).read()
107 body = urlopener.open(request).read()
108 repo.ui.debug('Conduit Response: %s\n' % body)
108 repo.ui.debug('Conduit Response: %s\n' % body)
109 parsed = json.loads(body)
109 parsed = json.loads(body)
110 if parsed.get(r'error_code'):
110 if parsed.get(r'error_code'):
111 msg = (_('Conduit Error (%s): %s')
111 msg = (_('Conduit Error (%s): %s')
112 % (parsed[r'error_code'], parsed[r'error_info']))
112 % (parsed[r'error_code'], parsed[r'error_info']))
113 raise error.Abort(msg)
113 raise error.Abort(msg)
114 return parsed[r'result']
114 return parsed[r'result']
115
115
116 @command('debugcallconduit', [], _('METHOD'))
116 @command('debugcallconduit', [], _('METHOD'))
117 def debugcallconduit(ui, repo, name):
117 def debugcallconduit(ui, repo, name):
118 """call Conduit API
118 """call Conduit API
119
119
120 Call parameters are read from stdin as a JSON blob. Result will be written
120 Call parameters are read from stdin as a JSON blob. Result will be written
121 to stdout as a JSON blob.
121 to stdout as a JSON blob.
122 """
122 """
123 params = json.loads(ui.fin.read())
123 params = json.loads(ui.fin.read())
124 result = callconduit(repo, name, params)
124 result = callconduit(repo, name, params)
125 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
125 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
126 ui.write('%s\n' % s)
126 ui.write('%s\n' % s)
127
127
128 def getrepophid(repo):
128 def getrepophid(repo):
129 """given callsign, return repository PHID or None"""
129 """given callsign, return repository PHID or None"""
130 # developer config: phabricator.repophid
130 # developer config: phabricator.repophid
131 repophid = repo.ui.config('phabricator', 'repophid')
131 repophid = repo.ui.config('phabricator', 'repophid')
132 if repophid:
132 if repophid:
133 return repophid
133 return repophid
134 callsign = repo.ui.config('phabricator', 'callsign')
134 callsign = repo.ui.config('phabricator', 'callsign')
135 if not callsign:
135 if not callsign:
136 return None
136 return None
137 query = callconduit(repo, 'diffusion.repository.search',
137 query = callconduit(repo, 'diffusion.repository.search',
138 {'constraints': {'callsigns': [callsign]}})
138 {'constraints': {'callsigns': [callsign]}})
139 if len(query[r'data']) == 0:
139 if len(query[r'data']) == 0:
140 return None
140 return None
141 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
141 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
142 repo.ui.setconfig('phabricator', 'repophid', repophid)
142 repo.ui.setconfig('phabricator', 'repophid', repophid)
143 return repophid
143 return repophid
144
144
145 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
145 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
146 _differentialrevisiondescre = re.compile(
146 _differentialrevisiondescre = re.compile(
147 '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
147 '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
148
148
149 def getoldnodedrevmap(repo, nodelist):
149 def getoldnodedrevmap(repo, nodelist):
150 """find previous nodes that has been sent to Phabricator
150 """find previous nodes that has been sent to Phabricator
151
151
152 return {node: (oldnode, Differential diff, Differential Revision ID)}
152 return {node: (oldnode, Differential diff, Differential Revision ID)}
153 for node in nodelist with known previous sent versions, or associated
153 for node in nodelist with known previous sent versions, or associated
154 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
154 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
155 be ``None``.
155 be ``None``.
156
156
157 Examines commit messages like "Differential Revision:" to get the
157 Examines commit messages like "Differential Revision:" to get the
158 association information.
158 association information.
159
159
160 If such commit message line is not found, examines all precursors and their
160 If such commit message line is not found, examines all precursors and their
161 tags. Tags with format like "D1234" are considered a match and the node
161 tags. Tags with format like "D1234" are considered a match and the node
162 with that tag, and the number after "D" (ex. 1234) will be returned.
162 with that tag, and the number after "D" (ex. 1234) will be returned.
163
163
164 The ``old node``, if not None, is guaranteed to be the last diff of
164 The ``old node``, if not None, is guaranteed to be the last diff of
165 corresponding Differential Revision, and exist in the repo.
165 corresponding Differential Revision, and exist in the repo.
166 """
166 """
167 url, token = readurltoken(repo)
167 url, token = readurltoken(repo)
168 unfi = repo.unfiltered()
168 unfi = repo.unfiltered()
169 nodemap = unfi.changelog.nodemap
169 nodemap = unfi.changelog.nodemap
170
170
171 result = {} # {node: (oldnode?, lastdiff?, drev)}
171 result = {} # {node: (oldnode?, lastdiff?, drev)}
172 toconfirm = {} # {node: (force, {precnode}, drev)}
172 toconfirm = {} # {node: (force, {precnode}, drev)}
173 for node in nodelist:
173 for node in nodelist:
174 ctx = unfi[node]
174 ctx = unfi[node]
175 # For tags like "D123", put them into "toconfirm" to verify later
175 # For tags like "D123", put them into "toconfirm" to verify later
176 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
176 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
177 for n in precnodes:
177 for n in precnodes:
178 if n in nodemap:
178 if n in nodemap:
179 for tag in unfi.nodetags(n):
179 for tag in unfi.nodetags(n):
180 m = _differentialrevisiontagre.match(tag)
180 m = _differentialrevisiontagre.match(tag)
181 if m:
181 if m:
182 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
182 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
183 continue
183 continue
184
184
185 # Check commit message
185 # Check commit message
186 m = _differentialrevisiondescre.search(ctx.description())
186 m = _differentialrevisiondescre.search(ctx.description())
187 if m:
187 if m:
188 toconfirm[node] = (1, set(precnodes), int(m.group(1)))
188 toconfirm[node] = (1, set(precnodes), int(m.group(1)))
189
189
190 # Double check if tags are genuine by collecting all old nodes from
190 # Double check if tags are genuine by collecting all old nodes from
191 # Phabricator, and expect precursors overlap with it.
191 # Phabricator, and expect precursors overlap with it.
192 if toconfirm:
192 if toconfirm:
193 drevs = [drev for force, precs, drev in toconfirm.values()]
193 drevs = [drev for force, precs, drev in toconfirm.values()]
194 alldiffs = callconduit(unfi, 'differential.querydiffs',
194 alldiffs = callconduit(unfi, 'differential.querydiffs',
195 {'revisionIDs': drevs})
195 {'revisionIDs': drevs})
196 getnode = lambda d: bin(encoding.unitolocal(
196 getnode = lambda d: bin(encoding.unitolocal(
197 getdiffmeta(d).get(r'node', ''))) or None
197 getdiffmeta(d).get(r'node', ''))) or None
198 for newnode, (force, precset, drev) in toconfirm.items():
198 for newnode, (force, precset, drev) in toconfirm.items():
199 diffs = [d for d in alldiffs.values()
199 diffs = [d for d in alldiffs.values()
200 if int(d[r'revisionID']) == drev]
200 if int(d[r'revisionID']) == drev]
201
201
202 # "precursors" as known by Phabricator
202 # "precursors" as known by Phabricator
203 phprecset = set(getnode(d) for d in diffs)
203 phprecset = set(getnode(d) for d in diffs)
204
204
205 # Ignore if precursors (Phabricator and local repo) do not overlap,
205 # Ignore if precursors (Phabricator and local repo) do not overlap,
206 # and force is not set (when commit message says nothing)
206 # and force is not set (when commit message says nothing)
207 if not force and not bool(phprecset & precset):
207 if not force and not bool(phprecset & precset):
208 tagname = 'D%d' % drev
208 tagname = 'D%d' % drev
209 tags.tag(repo, tagname, nullid, message=None, user=None,
209 tags.tag(repo, tagname, nullid, message=None, user=None,
210 date=None, local=True)
210 date=None, local=True)
211 unfi.ui.warn(_('D%s: local tag removed - does not match '
211 unfi.ui.warn(_('D%s: local tag removed - does not match '
212 'Differential history\n') % drev)
212 'Differential history\n') % drev)
213 continue
213 continue
214
214
215 # Find the last node using Phabricator metadata, and make sure it
215 # Find the last node using Phabricator metadata, and make sure it
216 # exists in the repo
216 # exists in the repo
217 oldnode = lastdiff = None
217 oldnode = lastdiff = None
218 if diffs:
218 if diffs:
219 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
219 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
220 oldnode = getnode(lastdiff)
220 oldnode = getnode(lastdiff)
221 if oldnode and oldnode not in nodemap:
221 if oldnode and oldnode not in nodemap:
222 oldnode = None
222 oldnode = None
223
223
224 result[newnode] = (oldnode, lastdiff, drev)
224 result[newnode] = (oldnode, lastdiff, drev)
225
225
226 return result
226 return result
227
227
228 def getdiff(ctx, diffopts):
228 def getdiff(ctx, diffopts):
229 """plain-text diff without header (user, commit message, etc)"""
229 """plain-text diff without header (user, commit message, etc)"""
230 output = util.stringio()
230 output = util.stringio()
231 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
231 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
232 None, opts=diffopts):
232 None, opts=diffopts):
233 output.write(chunk)
233 output.write(chunk)
234 return output.getvalue()
234 return output.getvalue()
235
235
236 def creatediff(ctx):
236 def creatediff(ctx):
237 """create a Differential Diff"""
237 """create a Differential Diff"""
238 repo = ctx.repo()
238 repo = ctx.repo()
239 repophid = getrepophid(repo)
239 repophid = getrepophid(repo)
240 # Create a "Differential Diff" via "differential.createrawdiff" API
240 # Create a "Differential Diff" via "differential.createrawdiff" API
241 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
241 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
242 if repophid:
242 if repophid:
243 params['repositoryPHID'] = repophid
243 params['repositoryPHID'] = repophid
244 diff = callconduit(repo, 'differential.createrawdiff', params)
244 diff = callconduit(repo, 'differential.createrawdiff', params)
245 if not diff:
245 if not diff:
246 raise error.Abort(_('cannot create diff for %s') % ctx)
246 raise error.Abort(_('cannot create diff for %s') % ctx)
247 return diff
247 return diff
248
248
249 def writediffproperties(ctx, diff):
249 def writediffproperties(ctx, diff):
250 """write metadata to diff so patches could be applied losslessly"""
250 """write metadata to diff so patches could be applied losslessly"""
251 params = {
251 params = {
252 'diff_id': diff[r'id'],
252 'diff_id': diff[r'id'],
253 'name': 'hg:meta',
253 'name': 'hg:meta',
254 'data': json.dumps({
254 'data': json.dumps({
255 'user': ctx.user(),
255 'user': ctx.user(),
256 'date': '%d %d' % ctx.date(),
256 'date': '%d %d' % ctx.date(),
257 'node': ctx.hex(),
257 'node': ctx.hex(),
258 'parent': ctx.p1().hex(),
258 'parent': ctx.p1().hex(),
259 }),
259 }),
260 }
260 }
261 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
261 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
262
262
263 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
263 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
264 olddiff=None, actions=None):
264 olddiff=None, actions=None):
265 """create or update a Differential Revision
265 """create or update a Differential Revision
266
266
267 If revid is None, create a new Differential Revision, otherwise update
267 If revid is None, create a new Differential Revision, otherwise update
268 revid. If parentrevid is not None, set it as a dependency.
268 revid. If parentrevid is not None, set it as a dependency.
269
269
270 If oldnode is not None, check if the patch content (without commit message
270 If oldnode is not None, check if the patch content (without commit message
271 and metadata) has changed before creating another diff.
271 and metadata) has changed before creating another diff.
272
272
273 If actions is not None, they will be appended to the transaction.
273 If actions is not None, they will be appended to the transaction.
274 """
274 """
275 repo = ctx.repo()
275 repo = ctx.repo()
276 if oldnode:
276 if oldnode:
277 diffopts = mdiff.diffopts(git=True, context=1)
277 diffopts = mdiff.diffopts(git=True, context=1)
278 oldctx = repo.unfiltered()[oldnode]
278 oldctx = repo.unfiltered()[oldnode]
279 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
279 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
280 else:
280 else:
281 neednewdiff = True
281 neednewdiff = True
282
282
283 transactions = []
283 transactions = []
284 if neednewdiff:
284 if neednewdiff:
285 diff = creatediff(ctx)
285 diff = creatediff(ctx)
286 transactions.append({'type': 'update', 'value': diff[r'phid']})
286 transactions.append({'type': 'update', 'value': diff[r'phid']})
287 else:
287 else:
288 # Even if we don't need to upload a new diff because the patch content
288 # Even if we don't need to upload a new diff because the patch content
289 # does not change. We might still need to update its metadata so
289 # does not change. We might still need to update its metadata so
290 # pushers could know the correct node metadata.
290 # pushers could know the correct node metadata.
291 assert olddiff
291 assert olddiff
292 diff = olddiff
292 diff = olddiff
293 writediffproperties(ctx, diff)
293 writediffproperties(ctx, diff)
294
294
295 # Use a temporary summary to set dependency. There might be better ways but
295 # Use a temporary summary to set dependency. There might be better ways but
296 # I cannot find them for now. But do not do that if we are updating an
296 # I cannot find them for now. But do not do that if we are updating an
297 # existing revision (revid is not None) since that introduces visible
297 # existing revision (revid is not None) since that introduces visible
298 # churns (someone edited "Summary" twice) on the web page.
298 # churns (someone edited "Summary" twice) on the web page.
299 if parentrevid and revid is None:
299 if parentrevid and revid is None:
300 summary = 'Depends on D%s' % parentrevid
300 summary = 'Depends on D%s' % parentrevid
301 transactions += [{'type': 'summary', 'value': summary},
301 transactions += [{'type': 'summary', 'value': summary},
302 {'type': 'summary', 'value': ' '}]
302 {'type': 'summary', 'value': ' '}]
303
303
304 if actions:
304 if actions:
305 transactions += actions
305 transactions += actions
306
306
307 # Parse commit message and update related fields.
307 # Parse commit message and update related fields.
308 desc = ctx.description()
308 desc = ctx.description()
309 info = callconduit(repo, 'differential.parsecommitmessage',
309 info = callconduit(repo, 'differential.parsecommitmessage',
310 {'corpus': desc})
310 {'corpus': desc})
311 for k, v in info[r'fields'].items():
311 for k, v in info[r'fields'].items():
312 if k in ['title', 'summary', 'testPlan']:
312 if k in ['title', 'summary', 'testPlan']:
313 transactions.append({'type': k, 'value': v})
313 transactions.append({'type': k, 'value': v})
314
314
315 params = {'transactions': transactions}
315 params = {'transactions': transactions}
316 if revid is not None:
316 if revid is not None:
317 # Update an existing Differential Revision
317 # Update an existing Differential Revision
318 params['objectIdentifier'] = revid
318 params['objectIdentifier'] = revid
319
319
320 revision = callconduit(repo, 'differential.revision.edit', params)
320 revision = callconduit(repo, 'differential.revision.edit', params)
321 if not revision:
321 if not revision:
322 raise error.Abort(_('cannot create revision for %s') % ctx)
322 raise error.Abort(_('cannot create revision for %s') % ctx)
323
323
324 return revision, diff
324 return revision, diff
325
325
326 def userphids(repo, names):
326 def userphids(repo, names):
327 """convert user names to PHIDs"""
327 """convert user names to PHIDs"""
328 query = {'constraints': {'usernames': names}}
328 query = {'constraints': {'usernames': names}}
329 result = callconduit(repo, 'user.search', query)
329 result = callconduit(repo, 'user.search', query)
330 # username not found is not an error of the API. So check if we have missed
330 # username not found is not an error of the API. So check if we have missed
331 # some names here.
331 # some names here.
332 data = result[r'data']
332 data = result[r'data']
333 resolved = set(entry[r'fields'][r'username'] for entry in data)
333 resolved = set(entry[r'fields'][r'username'] for entry in data)
334 unresolved = set(names) - resolved
334 unresolved = set(names) - resolved
335 if unresolved:
335 if unresolved:
336 raise error.Abort(_('unknown username: %s')
336 raise error.Abort(_('unknown username: %s')
337 % ' '.join(sorted(unresolved)))
337 % ' '.join(sorted(unresolved)))
338 return [entry[r'phid'] for entry in data]
338 return [entry[r'phid'] for entry in data]
339
339
340 @command('phabsend',
340 @command('phabsend',
341 [('r', 'rev', [], _('revisions to send'), _('REV')),
341 [('r', 'rev', [], _('revisions to send'), _('REV')),
342 ('', 'amend', False, _('update commit messages')),
342 ('', 'amend', True, _('update commit messages')),
343 ('', 'reviewer', [], _('specify reviewers')),
343 ('', 'reviewer', [], _('specify reviewers')),
344 ('', 'confirm', None, _('ask for confirmation before sending'))],
344 ('', 'confirm', None, _('ask for confirmation before sending'))],
345 _('REV [OPTIONS]'))
345 _('REV [OPTIONS]'))
346 def phabsend(ui, repo, *revs, **opts):
346 def phabsend(ui, repo, *revs, **opts):
347 """upload changesets to Phabricator
347 """upload changesets to Phabricator
348
348
349 If there are multiple revisions specified, they will be send as a stack
349 If there are multiple revisions specified, they will be send as a stack
350 with a linear dependencies relationship using the order specified by the
350 with a linear dependencies relationship using the order specified by the
351 revset.
351 revset.
352
352
353 For the first time uploading changesets, local tags will be created to
353 For the first time uploading changesets, local tags will be created to
354 maintain the association. After the first time, phabsend will check
354 maintain the association. After the first time, phabsend will check
355 obsstore and tags information so it can figure out whether to update an
355 obsstore and tags information so it can figure out whether to update an
356 existing Differential Revision, or create a new one.
356 existing Differential Revision, or create a new one.
357
357
358 If --amend is set, update commit messages so they have the
358 If --amend is set, update commit messages so they have the
359 ``Differential Revision`` URL, remove related tags. This is similar to what
359 ``Differential Revision`` URL, remove related tags. This is similar to what
360 arcanist will do, and is more desired in author-push workflows. Otherwise,
360 arcanist will do, and is more desired in author-push workflows. Otherwise,
361 use local tags to record the ``Differential Revision`` association.
361 use local tags to record the ``Differential Revision`` association.
362
362
363 The --confirm option lets you confirm changesets before sending them. You
363 The --confirm option lets you confirm changesets before sending them. You
364 can also add following to your configuration file to make it default
364 can also add following to your configuration file to make it default
365 behaviour::
365 behaviour::
366
366
367 [phabsend]
367 [phabsend]
368 confirm = true
368 confirm = true
369
369
370 phabsend will check obsstore and the above association to decide whether to
370 phabsend will check obsstore and the above association to decide whether to
371 update an existing Differential Revision, or create a new one.
371 update an existing Differential Revision, or create a new one.
372 """
372 """
373 revs = list(revs) + opts.get('rev', [])
373 revs = list(revs) + opts.get('rev', [])
374 revs = scmutil.revrange(repo, revs)
374 revs = scmutil.revrange(repo, revs)
375
375
376 if not revs:
376 if not revs:
377 raise error.Abort(_('phabsend requires at least one changeset'))
377 raise error.Abort(_('phabsend requires at least one changeset'))
378 if opts.get('amend'):
378 if opts.get('amend'):
379 cmdutil.checkunfinished(repo)
379 cmdutil.checkunfinished(repo)
380
380
381 confirm = ui.configbool('phabsend', 'confirm')
381 confirm = ui.configbool('phabsend', 'confirm')
382 confirm |= bool(opts.get('confirm'))
382 confirm |= bool(opts.get('confirm'))
383 if confirm:
383 if confirm:
384 confirmed = _confirmbeforesend(repo, revs)
384 confirmed = _confirmbeforesend(repo, revs)
385 if not confirmed:
385 if not confirmed:
386 raise error.Abort(_('phabsend cancelled'))
386 raise error.Abort(_('phabsend cancelled'))
387
387
388 actions = []
388 actions = []
389 reviewers = opts.get('reviewer', [])
389 reviewers = opts.get('reviewer', [])
390 if reviewers:
390 if reviewers:
391 phids = userphids(repo, reviewers)
391 phids = userphids(repo, reviewers)
392 actions.append({'type': 'reviewers.add', 'value': phids})
392 actions.append({'type': 'reviewers.add', 'value': phids})
393
393
394 # {newnode: (oldnode, olddiff, olddrev}
394 # {newnode: (oldnode, olddiff, olddrev}
395 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
395 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
396
396
397 drevids = [] # [int]
397 drevids = [] # [int]
398 diffmap = {} # {newnode: diff}
398 diffmap = {} # {newnode: diff}
399
399
400 # Send patches one by one so we know their Differential Revision IDs and
400 # Send patches one by one so we know their Differential Revision IDs and
401 # can provide dependency relationship
401 # can provide dependency relationship
402 lastrevid = None
402 lastrevid = None
403 for rev in revs:
403 for rev in revs:
404 ui.debug('sending rev %d\n' % rev)
404 ui.debug('sending rev %d\n' % rev)
405 ctx = repo[rev]
405 ctx = repo[rev]
406
406
407 # Get Differential Revision ID
407 # Get Differential Revision ID
408 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
408 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
409 if oldnode != ctx.node() or opts.get('amend'):
409 if oldnode != ctx.node() or opts.get('amend'):
410 # Create or update Differential Revision
410 # Create or update Differential Revision
411 revision, diff = createdifferentialrevision(
411 revision, diff = createdifferentialrevision(
412 ctx, revid, lastrevid, oldnode, olddiff, actions)
412 ctx, revid, lastrevid, oldnode, olddiff, actions)
413 diffmap[ctx.node()] = diff
413 diffmap[ctx.node()] = diff
414 newrevid = int(revision[r'object'][r'id'])
414 newrevid = int(revision[r'object'][r'id'])
415 if revid:
415 if revid:
416 action = _('updated')
416 action = _('updated')
417 else:
417 else:
418 action = _('created')
418 action = _('created')
419
419
420 # Create a local tag to note the association, if commit message
420 # Create a local tag to note the association, if commit message
421 # does not have it already
421 # does not have it already
422 m = _differentialrevisiondescre.search(ctx.description())
422 m = _differentialrevisiondescre.search(ctx.description())
423 if not m or int(m.group(1)) != newrevid:
423 if not m or int(m.group(1)) != newrevid:
424 tagname = 'D%d' % newrevid
424 tagname = 'D%d' % newrevid
425 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
425 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
426 date=None, local=True)
426 date=None, local=True)
427 else:
427 else:
428 # Nothing changed. But still set "newrevid" so the next revision
428 # Nothing changed. But still set "newrevid" so the next revision
429 # could depend on this one.
429 # could depend on this one.
430 newrevid = revid
430 newrevid = revid
431 action = _('skipped')
431 action = _('skipped')
432
432
433 ui.write(_('D%s: %s - %s: %s\n') % (newrevid, action, ctx,
433 ui.write(_('D%s: %s - %s: %s\n') % (newrevid, action, ctx,
434 ctx.description().split('\n')[0]))
434 ctx.description().split('\n')[0]))
435 drevids.append(newrevid)
435 drevids.append(newrevid)
436 lastrevid = newrevid
436 lastrevid = newrevid
437
437
438 # Update commit messages and remove tags
438 # Update commit messages and remove tags
439 if opts.get('amend'):
439 if opts.get('amend'):
440 unfi = repo.unfiltered()
440 unfi = repo.unfiltered()
441 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
441 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
442 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
442 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
443 wnode = unfi['.'].node()
443 wnode = unfi['.'].node()
444 mapping = {} # {oldnode: [newnode]}
444 mapping = {} # {oldnode: [newnode]}
445 for i, rev in enumerate(revs):
445 for i, rev in enumerate(revs):
446 old = unfi[rev]
446 old = unfi[rev]
447 drevid = drevids[i]
447 drevid = drevids[i]
448 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
448 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
449 newdesc = getdescfromdrev(drev)
449 newdesc = getdescfromdrev(drev)
450 # Make sure commit message contain "Differential Revision"
450 # Make sure commit message contain "Differential Revision"
451 if old.description() != newdesc:
451 if old.description() != newdesc:
452 parents = [
452 parents = [
453 mapping.get(old.p1().node(), (old.p1(),))[0],
453 mapping.get(old.p1().node(), (old.p1(),))[0],
454 mapping.get(old.p2().node(), (old.p2(),))[0],
454 mapping.get(old.p2().node(), (old.p2(),))[0],
455 ]
455 ]
456 new = context.metadataonlyctx(
456 new = context.metadataonlyctx(
457 repo, old, parents=parents, text=newdesc,
457 repo, old, parents=parents, text=newdesc,
458 user=old.user(), date=old.date(), extra=old.extra())
458 user=old.user(), date=old.date(), extra=old.extra())
459 newnode = new.commit()
459 newnode = new.commit()
460 mapping[old.node()] = [newnode]
460 mapping[old.node()] = [newnode]
461 # Update diff property
461 # Update diff property
462 writediffproperties(unfi[newnode], diffmap[old.node()])
462 writediffproperties(unfi[newnode], diffmap[old.node()])
463 # Remove local tags since it's no longer necessary
463 # Remove local tags since it's no longer necessary
464 tagname = 'D%d' % drevid
464 tagname = 'D%d' % drevid
465 if tagname in repo.tags():
465 if tagname in repo.tags():
466 tags.tag(repo, tagname, nullid, message=None, user=None,
466 tags.tag(repo, tagname, nullid, message=None, user=None,
467 date=None, local=True)
467 date=None, local=True)
468 scmutil.cleanupnodes(repo, mapping, 'phabsend')
468 scmutil.cleanupnodes(repo, mapping, 'phabsend')
469 if wnode in mapping:
469 if wnode in mapping:
470 unfi.setparents(mapping[wnode][0])
470 unfi.setparents(mapping[wnode][0])
471
471
472 # Map from "hg:meta" keys to header understood by "hg import". The order is
472 # Map from "hg:meta" keys to header understood by "hg import". The order is
473 # consistent with "hg export" output.
473 # consistent with "hg export" output.
474 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
474 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
475 (r'node', 'Node ID'), (r'parent', 'Parent ')])
475 (r'node', 'Node ID'), (r'parent', 'Parent ')])
476
476
477 def _confirmbeforesend(repo, revs):
477 def _confirmbeforesend(repo, revs):
478 ui = repo.ui
478 ui = repo.ui
479 for rev in revs:
479 for rev in revs:
480 ctx = repo[rev]
480 ctx = repo[rev]
481 desc = ctx.description().splitlines()[0]
481 desc = ctx.description().splitlines()[0]
482 ui.write(('%d: ' % rev), label='phabsend.revnumber')
482 ui.write(('%d: ' % rev), label='phabsend.revnumber')
483 ui.write(('%s\n' % desc), label='phabsend.desc')
483 ui.write(('%s\n' % desc), label='phabsend.desc')
484
484
485 if ui.promptchoice(_('Phabsend the above changes (yn)?'
485 if ui.promptchoice(_('Phabsend the above changes (yn)?'
486 '$$ &Yes $$ &No')):
486 '$$ &Yes $$ &No')):
487 return False
487 return False
488
488
489 return True
489 return True
490
490
491 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
491 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
492 'abandoned'}
492 'abandoned'}
493
493
494 def _getstatusname(drev):
494 def _getstatusname(drev):
495 """get normalized status name from a Differential Revision"""
495 """get normalized status name from a Differential Revision"""
496 return drev[r'statusName'].replace(' ', '').lower()
496 return drev[r'statusName'].replace(' ', '').lower()
497
497
498 # Small language to specify differential revisions. Support symbols: (), :X,
498 # Small language to specify differential revisions. Support symbols: (), :X,
499 # +, and -.
499 # +, and -.
500
500
501 _elements = {
501 _elements = {
502 # token-type: binding-strength, primary, prefix, infix, suffix
502 # token-type: binding-strength, primary, prefix, infix, suffix
503 '(': (12, None, ('group', 1, ')'), None, None),
503 '(': (12, None, ('group', 1, ')'), None, None),
504 ':': (8, None, ('ancestors', 8), None, None),
504 ':': (8, None, ('ancestors', 8), None, None),
505 '&': (5, None, None, ('and_', 5), None),
505 '&': (5, None, None, ('and_', 5), None),
506 '+': (4, None, None, ('add', 4), None),
506 '+': (4, None, None, ('add', 4), None),
507 '-': (4, None, None, ('sub', 4), None),
507 '-': (4, None, None, ('sub', 4), None),
508 ')': (0, None, None, None, None),
508 ')': (0, None, None, None, None),
509 'symbol': (0, 'symbol', None, None, None),
509 'symbol': (0, 'symbol', None, None, None),
510 'end': (0, None, None, None, None),
510 'end': (0, None, None, None, None),
511 }
511 }
512
512
513 def _tokenize(text):
513 def _tokenize(text):
514 view = memoryview(text) # zero-copy slice
514 view = memoryview(text) # zero-copy slice
515 special = '():+-& '
515 special = '():+-& '
516 pos = 0
516 pos = 0
517 length = len(text)
517 length = len(text)
518 while pos < length:
518 while pos < length:
519 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
519 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
520 view[pos:]))
520 view[pos:]))
521 if symbol:
521 if symbol:
522 yield ('symbol', symbol, pos)
522 yield ('symbol', symbol, pos)
523 pos += len(symbol)
523 pos += len(symbol)
524 else: # special char, ignore space
524 else: # special char, ignore space
525 if text[pos] != ' ':
525 if text[pos] != ' ':
526 yield (text[pos], None, pos)
526 yield (text[pos], None, pos)
527 pos += 1
527 pos += 1
528 yield ('end', None, pos)
528 yield ('end', None, pos)
529
529
530 def _parse(text):
530 def _parse(text):
531 tree, pos = parser.parser(_elements).parse(_tokenize(text))
531 tree, pos = parser.parser(_elements).parse(_tokenize(text))
532 if pos != len(text):
532 if pos != len(text):
533 raise error.ParseError('invalid token', pos)
533 raise error.ParseError('invalid token', pos)
534 return tree
534 return tree
535
535
536 def _parsedrev(symbol):
536 def _parsedrev(symbol):
537 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
537 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
538 if symbol.startswith('D') and symbol[1:].isdigit():
538 if symbol.startswith('D') and symbol[1:].isdigit():
539 return int(symbol[1:])
539 return int(symbol[1:])
540 if symbol.isdigit():
540 if symbol.isdigit():
541 return int(symbol)
541 return int(symbol)
542
542
543 def _prefetchdrevs(tree):
543 def _prefetchdrevs(tree):
544 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
544 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
545 drevs = set()
545 drevs = set()
546 ancestordrevs = set()
546 ancestordrevs = set()
547 op = tree[0]
547 op = tree[0]
548 if op == 'symbol':
548 if op == 'symbol':
549 r = _parsedrev(tree[1])
549 r = _parsedrev(tree[1])
550 if r:
550 if r:
551 drevs.add(r)
551 drevs.add(r)
552 elif op == 'ancestors':
552 elif op == 'ancestors':
553 r, a = _prefetchdrevs(tree[1])
553 r, a = _prefetchdrevs(tree[1])
554 drevs.update(r)
554 drevs.update(r)
555 ancestordrevs.update(r)
555 ancestordrevs.update(r)
556 ancestordrevs.update(a)
556 ancestordrevs.update(a)
557 else:
557 else:
558 for t in tree[1:]:
558 for t in tree[1:]:
559 r, a = _prefetchdrevs(t)
559 r, a = _prefetchdrevs(t)
560 drevs.update(r)
560 drevs.update(r)
561 ancestordrevs.update(a)
561 ancestordrevs.update(a)
562 return drevs, ancestordrevs
562 return drevs, ancestordrevs
563
563
564 def querydrev(repo, spec):
564 def querydrev(repo, spec):
565 """return a list of "Differential Revision" dicts
565 """return a list of "Differential Revision" dicts
566
566
567 spec is a string using a simple query language, see docstring in phabread
567 spec is a string using a simple query language, see docstring in phabread
568 for details.
568 for details.
569
569
570 A "Differential Revision dict" looks like:
570 A "Differential Revision dict" looks like:
571
571
572 {
572 {
573 "id": "2",
573 "id": "2",
574 "phid": "PHID-DREV-672qvysjcczopag46qty",
574 "phid": "PHID-DREV-672qvysjcczopag46qty",
575 "title": "example",
575 "title": "example",
576 "uri": "https://phab.example.com/D2",
576 "uri": "https://phab.example.com/D2",
577 "dateCreated": "1499181406",
577 "dateCreated": "1499181406",
578 "dateModified": "1499182103",
578 "dateModified": "1499182103",
579 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
579 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
580 "status": "0",
580 "status": "0",
581 "statusName": "Needs Review",
581 "statusName": "Needs Review",
582 "properties": [],
582 "properties": [],
583 "branch": null,
583 "branch": null,
584 "summary": "",
584 "summary": "",
585 "testPlan": "",
585 "testPlan": "",
586 "lineCount": "2",
586 "lineCount": "2",
587 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
587 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
588 "diffs": [
588 "diffs": [
589 "3",
589 "3",
590 "4",
590 "4",
591 ],
591 ],
592 "commits": [],
592 "commits": [],
593 "reviewers": [],
593 "reviewers": [],
594 "ccs": [],
594 "ccs": [],
595 "hashes": [],
595 "hashes": [],
596 "auxiliary": {
596 "auxiliary": {
597 "phabricator:projects": [],
597 "phabricator:projects": [],
598 "phabricator:depends-on": [
598 "phabricator:depends-on": [
599 "PHID-DREV-gbapp366kutjebt7agcd"
599 "PHID-DREV-gbapp366kutjebt7agcd"
600 ]
600 ]
601 },
601 },
602 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
602 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
603 "sourcePath": null
603 "sourcePath": null
604 }
604 }
605 """
605 """
606 def fetch(params):
606 def fetch(params):
607 """params -> single drev or None"""
607 """params -> single drev or None"""
608 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
608 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
609 if key in prefetched:
609 if key in prefetched:
610 return prefetched[key]
610 return prefetched[key]
611 drevs = callconduit(repo, 'differential.query', params)
611 drevs = callconduit(repo, 'differential.query', params)
612 # Fill prefetched with the result
612 # Fill prefetched with the result
613 for drev in drevs:
613 for drev in drevs:
614 prefetched[drev[r'phid']] = drev
614 prefetched[drev[r'phid']] = drev
615 prefetched[int(drev[r'id'])] = drev
615 prefetched[int(drev[r'id'])] = drev
616 if key not in prefetched:
616 if key not in prefetched:
617 raise error.Abort(_('cannot get Differential Revision %r') % params)
617 raise error.Abort(_('cannot get Differential Revision %r') % params)
618 return prefetched[key]
618 return prefetched[key]
619
619
620 def getstack(topdrevids):
620 def getstack(topdrevids):
621 """given a top, get a stack from the bottom, [id] -> [id]"""
621 """given a top, get a stack from the bottom, [id] -> [id]"""
622 visited = set()
622 visited = set()
623 result = []
623 result = []
624 queue = [{r'ids': [i]} for i in topdrevids]
624 queue = [{r'ids': [i]} for i in topdrevids]
625 while queue:
625 while queue:
626 params = queue.pop()
626 params = queue.pop()
627 drev = fetch(params)
627 drev = fetch(params)
628 if drev[r'id'] in visited:
628 if drev[r'id'] in visited:
629 continue
629 continue
630 visited.add(drev[r'id'])
630 visited.add(drev[r'id'])
631 result.append(int(drev[r'id']))
631 result.append(int(drev[r'id']))
632 auxiliary = drev.get(r'auxiliary', {})
632 auxiliary = drev.get(r'auxiliary', {})
633 depends = auxiliary.get(r'phabricator:depends-on', [])
633 depends = auxiliary.get(r'phabricator:depends-on', [])
634 for phid in depends:
634 for phid in depends:
635 queue.append({'phids': [phid]})
635 queue.append({'phids': [phid]})
636 result.reverse()
636 result.reverse()
637 return smartset.baseset(result)
637 return smartset.baseset(result)
638
638
639 # Initialize prefetch cache
639 # Initialize prefetch cache
640 prefetched = {} # {id or phid: drev}
640 prefetched = {} # {id or phid: drev}
641
641
642 tree = _parse(spec)
642 tree = _parse(spec)
643 drevs, ancestordrevs = _prefetchdrevs(tree)
643 drevs, ancestordrevs = _prefetchdrevs(tree)
644
644
645 # developer config: phabricator.batchsize
645 # developer config: phabricator.batchsize
646 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
646 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
647
647
648 # Prefetch Differential Revisions in batch
648 # Prefetch Differential Revisions in batch
649 tofetch = set(drevs)
649 tofetch = set(drevs)
650 for r in ancestordrevs:
650 for r in ancestordrevs:
651 tofetch.update(range(max(1, r - batchsize), r + 1))
651 tofetch.update(range(max(1, r - batchsize), r + 1))
652 if drevs:
652 if drevs:
653 fetch({r'ids': list(tofetch)})
653 fetch({r'ids': list(tofetch)})
654 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
654 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
655
655
656 # Walk through the tree, return smartsets
656 # Walk through the tree, return smartsets
657 def walk(tree):
657 def walk(tree):
658 op = tree[0]
658 op = tree[0]
659 if op == 'symbol':
659 if op == 'symbol':
660 drev = _parsedrev(tree[1])
660 drev = _parsedrev(tree[1])
661 if drev:
661 if drev:
662 return smartset.baseset([drev])
662 return smartset.baseset([drev])
663 elif tree[1] in _knownstatusnames:
663 elif tree[1] in _knownstatusnames:
664 drevs = [r for r in validids
664 drevs = [r for r in validids
665 if _getstatusname(prefetched[r]) == tree[1]]
665 if _getstatusname(prefetched[r]) == tree[1]]
666 return smartset.baseset(drevs)
666 return smartset.baseset(drevs)
667 else:
667 else:
668 raise error.Abort(_('unknown symbol: %s') % tree[1])
668 raise error.Abort(_('unknown symbol: %s') % tree[1])
669 elif op in {'and_', 'add', 'sub'}:
669 elif op in {'and_', 'add', 'sub'}:
670 assert len(tree) == 3
670 assert len(tree) == 3
671 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
671 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
672 elif op == 'group':
672 elif op == 'group':
673 return walk(tree[1])
673 return walk(tree[1])
674 elif op == 'ancestors':
674 elif op == 'ancestors':
675 return getstack(walk(tree[1]))
675 return getstack(walk(tree[1]))
676 else:
676 else:
677 raise error.ProgrammingError('illegal tree: %r' % tree)
677 raise error.ProgrammingError('illegal tree: %r' % tree)
678
678
679 return [prefetched[r] for r in walk(tree)]
679 return [prefetched[r] for r in walk(tree)]
680
680
681 def getdescfromdrev(drev):
681 def getdescfromdrev(drev):
682 """get description (commit message) from "Differential Revision"
682 """get description (commit message) from "Differential Revision"
683
683
684 This is similar to differential.getcommitmessage API. But we only care
684 This is similar to differential.getcommitmessage API. But we only care
685 about limited fields: title, summary, test plan, and URL.
685 about limited fields: title, summary, test plan, and URL.
686 """
686 """
687 title = drev[r'title']
687 title = drev[r'title']
688 summary = drev[r'summary'].rstrip()
688 summary = drev[r'summary'].rstrip()
689 testplan = drev[r'testPlan'].rstrip()
689 testplan = drev[r'testPlan'].rstrip()
690 if testplan:
690 if testplan:
691 testplan = 'Test Plan:\n%s' % testplan
691 testplan = 'Test Plan:\n%s' % testplan
692 uri = 'Differential Revision: %s' % drev[r'uri']
692 uri = 'Differential Revision: %s' % drev[r'uri']
693 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
693 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
694
694
695 def getdiffmeta(diff):
695 def getdiffmeta(diff):
696 """get commit metadata (date, node, user, p1) from a diff object
696 """get commit metadata (date, node, user, p1) from a diff object
697
697
698 The metadata could be "hg:meta", sent by phabsend, like:
698 The metadata could be "hg:meta", sent by phabsend, like:
699
699
700 "properties": {
700 "properties": {
701 "hg:meta": {
701 "hg:meta": {
702 "date": "1499571514 25200",
702 "date": "1499571514 25200",
703 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
703 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
704 "user": "Foo Bar <foo@example.com>",
704 "user": "Foo Bar <foo@example.com>",
705 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
705 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
706 }
706 }
707 }
707 }
708
708
709 Or converted from "local:commits", sent by "arc", like:
709 Or converted from "local:commits", sent by "arc", like:
710
710
711 "properties": {
711 "properties": {
712 "local:commits": {
712 "local:commits": {
713 "98c08acae292b2faf60a279b4189beb6cff1414d": {
713 "98c08acae292b2faf60a279b4189beb6cff1414d": {
714 "author": "Foo Bar",
714 "author": "Foo Bar",
715 "time": 1499546314,
715 "time": 1499546314,
716 "branch": "default",
716 "branch": "default",
717 "tag": "",
717 "tag": "",
718 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
718 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
719 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
719 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
720 "local": "1000",
720 "local": "1000",
721 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
721 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
722 "summary": "...",
722 "summary": "...",
723 "message": "...",
723 "message": "...",
724 "authorEmail": "foo@example.com"
724 "authorEmail": "foo@example.com"
725 }
725 }
726 }
726 }
727 }
727 }
728
728
729 Note: metadata extracted from "local:commits" will lose time zone
729 Note: metadata extracted from "local:commits" will lose time zone
730 information.
730 information.
731 """
731 """
732 props = diff.get(r'properties') or {}
732 props = diff.get(r'properties') or {}
733 meta = props.get(r'hg:meta')
733 meta = props.get(r'hg:meta')
734 if not meta and props.get(r'local:commits'):
734 if not meta and props.get(r'local:commits'):
735 commit = sorted(props[r'local:commits'].values())[0]
735 commit = sorted(props[r'local:commits'].values())[0]
736 meta = {
736 meta = {
737 r'date': r'%d 0' % commit[r'time'],
737 r'date': r'%d 0' % commit[r'time'],
738 r'node': commit[r'rev'],
738 r'node': commit[r'rev'],
739 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
739 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
740 }
740 }
741 if len(commit.get(r'parents', ())) >= 1:
741 if len(commit.get(r'parents', ())) >= 1:
742 meta[r'parent'] = commit[r'parents'][0]
742 meta[r'parent'] = commit[r'parents'][0]
743 return meta or {}
743 return meta or {}
744
744
745 def readpatch(repo, drevs, write):
745 def readpatch(repo, drevs, write):
746 """generate plain-text patch readable by 'hg import'
746 """generate plain-text patch readable by 'hg import'
747
747
748 write is usually ui.write. drevs is what "querydrev" returns, results of
748 write is usually ui.write. drevs is what "querydrev" returns, results of
749 "differential.query".
749 "differential.query".
750 """
750 """
751 # Prefetch hg:meta property for all diffs
751 # Prefetch hg:meta property for all diffs
752 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
752 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
753 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
753 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
754
754
755 # Generate patch for each drev
755 # Generate patch for each drev
756 for drev in drevs:
756 for drev in drevs:
757 repo.ui.note(_('reading D%s\n') % drev[r'id'])
757 repo.ui.note(_('reading D%s\n') % drev[r'id'])
758
758
759 diffid = max(int(v) for v in drev[r'diffs'])
759 diffid = max(int(v) for v in drev[r'diffs'])
760 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
760 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
761 desc = getdescfromdrev(drev)
761 desc = getdescfromdrev(drev)
762 header = '# HG changeset patch\n'
762 header = '# HG changeset patch\n'
763
763
764 # Try to preserve metadata from hg:meta property. Write hg patch
764 # Try to preserve metadata from hg:meta property. Write hg patch
765 # headers that can be read by the "import" command. See patchheadermap
765 # headers that can be read by the "import" command. See patchheadermap
766 # and extract in mercurial/patch.py for supported headers.
766 # and extract in mercurial/patch.py for supported headers.
767 meta = getdiffmeta(diffs[str(diffid)])
767 meta = getdiffmeta(diffs[str(diffid)])
768 for k in _metanamemap.keys():
768 for k in _metanamemap.keys():
769 if k in meta:
769 if k in meta:
770 header += '# %s %s\n' % (_metanamemap[k], meta[k])
770 header += '# %s %s\n' % (_metanamemap[k], meta[k])
771
771
772 content = '%s%s\n%s' % (header, desc, body)
772 content = '%s%s\n%s' % (header, desc, body)
773 write(encoding.unitolocal(content))
773 write(encoding.unitolocal(content))
774
774
775 @command('phabread',
775 @command('phabread',
776 [('', 'stack', False, _('read dependencies'))],
776 [('', 'stack', False, _('read dependencies'))],
777 _('DREVSPEC [OPTIONS]'))
777 _('DREVSPEC [OPTIONS]'))
778 def phabread(ui, repo, spec, **opts):
778 def phabread(ui, repo, spec, **opts):
779 """print patches from Phabricator suitable for importing
779 """print patches from Phabricator suitable for importing
780
780
781 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
781 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
782 the number ``123``. It could also have common operators like ``+``, ``-``,
782 the number ``123``. It could also have common operators like ``+``, ``-``,
783 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
783 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
784 select a stack.
784 select a stack.
785
785
786 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
786 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
787 could be used to filter patches by status. For performance reason, they
787 could be used to filter patches by status. For performance reason, they
788 only represent a subset of non-status selections and cannot be used alone.
788 only represent a subset of non-status selections and cannot be used alone.
789
789
790 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
790 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
791 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
791 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
792 stack up to D9.
792 stack up to D9.
793
793
794 If --stack is given, follow dependencies information and read all patches.
794 If --stack is given, follow dependencies information and read all patches.
795 It is equivalent to the ``:`` operator.
795 It is equivalent to the ``:`` operator.
796 """
796 """
797 if opts.get('stack'):
797 if opts.get('stack'):
798 spec = ':(%s)' % spec
798 spec = ':(%s)' % spec
799 drevs = querydrev(repo, spec)
799 drevs = querydrev(repo, spec)
800 readpatch(repo, drevs, ui.write)
800 readpatch(repo, drevs, ui.write)
801
801
802 @command('phabupdate',
802 @command('phabupdate',
803 [('', 'accept', False, _('accept revisions')),
803 [('', 'accept', False, _('accept revisions')),
804 ('', 'reject', False, _('reject revisions')),
804 ('', 'reject', False, _('reject revisions')),
805 ('', 'abandon', False, _('abandon revisions')),
805 ('', 'abandon', False, _('abandon revisions')),
806 ('', 'reclaim', False, _('reclaim revisions')),
806 ('', 'reclaim', False, _('reclaim revisions')),
807 ('m', 'comment', '', _('comment on the last revision')),
807 ('m', 'comment', '', _('comment on the last revision')),
808 ], _('DREVSPEC [OPTIONS]'))
808 ], _('DREVSPEC [OPTIONS]'))
809 def phabupdate(ui, repo, spec, **opts):
809 def phabupdate(ui, repo, spec, **opts):
810 """update Differential Revision in batch
810 """update Differential Revision in batch
811
811
812 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
812 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
813 """
813 """
814 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
814 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
815 if len(flags) > 1:
815 if len(flags) > 1:
816 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
816 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
817
817
818 actions = []
818 actions = []
819 for f in flags:
819 for f in flags:
820 actions.append({'type': f, 'value': 'true'})
820 actions.append({'type': f, 'value': 'true'})
821
821
822 drevs = querydrev(repo, spec)
822 drevs = querydrev(repo, spec)
823 for i, drev in enumerate(drevs):
823 for i, drev in enumerate(drevs):
824 if i + 1 == len(drevs) and opts.get('comment'):
824 if i + 1 == len(drevs) and opts.get('comment'):
825 actions.append({'type': 'comment', 'value': opts['comment']})
825 actions.append({'type': 'comment', 'value': opts['comment']})
826 if actions:
826 if actions:
827 params = {'objectIdentifier': drev[r'phid'],
827 params = {'objectIdentifier': drev[r'phid'],
828 'transactions': actions}
828 'transactions': actions}
829 callconduit(repo, 'differential.revision.edit', params)
829 callconduit(repo, 'differential.revision.edit', params)
General Comments 0
You need to be logged in to leave comments. Login now