##// END OF EJS Templates
phabricator: add the phabdiff data structure...
Ian Moody -
r43455:75e7628b default
parent child Browse files
Show More
@@ -1,1333 +1,1359 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial.pycompat import getattr
52 from mercurial.pycompat import getattr
53 from mercurial.thirdparty import attr
53 from mercurial.thirdparty import attr
54 from mercurial import (
54 from mercurial import (
55 cmdutil,
55 cmdutil,
56 context,
56 context,
57 encoding,
57 encoding,
58 error,
58 error,
59 exthelper,
59 exthelper,
60 httpconnection as httpconnectionmod,
60 httpconnection as httpconnectionmod,
61 mdiff,
61 mdiff,
62 obsutil,
62 obsutil,
63 parser,
63 parser,
64 patch,
64 patch,
65 phases,
65 phases,
66 pycompat,
66 pycompat,
67 scmutil,
67 scmutil,
68 smartset,
68 smartset,
69 tags,
69 tags,
70 templatefilters,
70 templatefilters,
71 templateutil,
71 templateutil,
72 url as urlmod,
72 url as urlmod,
73 util,
73 util,
74 )
74 )
75 from mercurial.utils import (
75 from mercurial.utils import (
76 procutil,
76 procutil,
77 stringutil,
77 stringutil,
78 )
78 )
79
79
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # be specifying the version(s) of Mercurial they are tested with, or
82 # be specifying the version(s) of Mercurial they are tested with, or
83 # leave the attribute unspecified.
83 # leave the attribute unspecified.
84 testedwith = b'ships-with-hg-core'
84 testedwith = b'ships-with-hg-core'
85
85
86 eh = exthelper.exthelper()
86 eh = exthelper.exthelper()
87
87
88 cmdtable = eh.cmdtable
88 cmdtable = eh.cmdtable
89 command = eh.command
89 command = eh.command
90 configtable = eh.configtable
90 configtable = eh.configtable
91 templatekeyword = eh.templatekeyword
91 templatekeyword = eh.templatekeyword
92
92
93 # developer config: phabricator.batchsize
93 # developer config: phabricator.batchsize
94 eh.configitem(
94 eh.configitem(
95 b'phabricator', b'batchsize', default=12,
95 b'phabricator', b'batchsize', default=12,
96 )
96 )
97 eh.configitem(
97 eh.configitem(
98 b'phabricator', b'callsign', default=None,
98 b'phabricator', b'callsign', default=None,
99 )
99 )
100 eh.configitem(
100 eh.configitem(
101 b'phabricator', b'curlcmd', default=None,
101 b'phabricator', b'curlcmd', default=None,
102 )
102 )
103 # developer config: phabricator.repophid
103 # developer config: phabricator.repophid
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'repophid', default=None,
105 b'phabricator', b'repophid', default=None,
106 )
106 )
107 eh.configitem(
107 eh.configitem(
108 b'phabricator', b'url', default=None,
108 b'phabricator', b'url', default=None,
109 )
109 )
110 eh.configitem(
110 eh.configitem(
111 b'phabsend', b'confirm', default=False,
111 b'phabsend', b'confirm', default=False,
112 )
112 )
113
113
114 colortable = {
114 colortable = {
115 b'phabricator.action.created': b'green',
115 b'phabricator.action.created': b'green',
116 b'phabricator.action.skipped': b'magenta',
116 b'phabricator.action.skipped': b'magenta',
117 b'phabricator.action.updated': b'magenta',
117 b'phabricator.action.updated': b'magenta',
118 b'phabricator.desc': b'',
118 b'phabricator.desc': b'',
119 b'phabricator.drev': b'bold',
119 b'phabricator.drev': b'bold',
120 b'phabricator.node': b'',
120 b'phabricator.node': b'',
121 }
121 }
122
122
123 _VCR_FLAGS = [
123 _VCR_FLAGS = [
124 (
124 (
125 b'',
125 b'',
126 b'test-vcr',
126 b'test-vcr',
127 b'',
127 b'',
128 _(
128 _(
129 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
130 b', otherwise will mock all http requests using the specified vcr file.'
130 b', otherwise will mock all http requests using the specified vcr file.'
131 b' (ADVANCED)'
131 b' (ADVANCED)'
132 ),
132 ),
133 ),
133 ),
134 ]
134 ]
135
135
136
136
137 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
138 fullflags = flags + _VCR_FLAGS
138 fullflags = flags + _VCR_FLAGS
139
139
140 def hgmatcher(r1, r2):
140 def hgmatcher(r1, r2):
141 if r1.uri != r2.uri or r1.method != r2.method:
141 if r1.uri != r2.uri or r1.method != r2.method:
142 return False
142 return False
143 r1params = r1.body.split(b'&')
143 r1params = r1.body.split(b'&')
144 r2params = r2.body.split(b'&')
144 r2params = r2.body.split(b'&')
145 return set(r1params) == set(r2params)
145 return set(r1params) == set(r2params)
146
146
147 def sanitiserequest(request):
147 def sanitiserequest(request):
148 request.body = re.sub(
148 request.body = re.sub(
149 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
150 )
150 )
151 return request
151 return request
152
152
153 def sanitiseresponse(response):
153 def sanitiseresponse(response):
154 if r'set-cookie' in response[r'headers']:
154 if r'set-cookie' in response[r'headers']:
155 del response[r'headers'][r'set-cookie']
155 del response[r'headers'][r'set-cookie']
156 return response
156 return response
157
157
158 def decorate(fn):
158 def decorate(fn):
159 def inner(*args, **kwargs):
159 def inner(*args, **kwargs):
160 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
161 if cassette:
161 if cassette:
162 import hgdemandimport
162 import hgdemandimport
163
163
164 with hgdemandimport.deactivated():
164 with hgdemandimport.deactivated():
165 import vcr as vcrmod
165 import vcr as vcrmod
166 import vcr.stubs as stubs
166 import vcr.stubs as stubs
167
167
168 vcr = vcrmod.VCR(
168 vcr = vcrmod.VCR(
169 serializer=r'json',
169 serializer=r'json',
170 before_record_request=sanitiserequest,
170 before_record_request=sanitiserequest,
171 before_record_response=sanitiseresponse,
171 before_record_response=sanitiseresponse,
172 custom_patches=[
172 custom_patches=[
173 (
173 (
174 urlmod,
174 urlmod,
175 r'httpconnection',
175 r'httpconnection',
176 stubs.VCRHTTPConnection,
176 stubs.VCRHTTPConnection,
177 ),
177 ),
178 (
178 (
179 urlmod,
179 urlmod,
180 r'httpsconnection',
180 r'httpsconnection',
181 stubs.VCRHTTPSConnection,
181 stubs.VCRHTTPSConnection,
182 ),
182 ),
183 ],
183 ],
184 )
184 )
185 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 vcr.register_matcher(r'hgmatcher', hgmatcher)
186 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
187 return fn(*args, **kwargs)
187 return fn(*args, **kwargs)
188 return fn(*args, **kwargs)
188 return fn(*args, **kwargs)
189
189
190 inner.__name__ = fn.__name__
190 inner.__name__ = fn.__name__
191 inner.__doc__ = fn.__doc__
191 inner.__doc__ = fn.__doc__
192 return command(
192 return command(
193 name,
193 name,
194 fullflags,
194 fullflags,
195 spec,
195 spec,
196 helpcategory=helpcategory,
196 helpcategory=helpcategory,
197 optionalrepo=optionalrepo,
197 optionalrepo=optionalrepo,
198 )(inner)
198 )(inner)
199
199
200 return decorate
200 return decorate
201
201
202
202
203 def urlencodenested(params):
203 def urlencodenested(params):
204 """like urlencode, but works with nested parameters.
204 """like urlencode, but works with nested parameters.
205
205
206 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
207 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
208 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 urlencode. Note: the encoding is consistent with PHP's http_build_query.
209 """
209 """
210 flatparams = util.sortdict()
210 flatparams = util.sortdict()
211
211
212 def process(prefix, obj):
212 def process(prefix, obj):
213 if isinstance(obj, bool):
213 if isinstance(obj, bool):
214 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
215 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
216 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
217 if items is None:
217 if items is None:
218 flatparams[prefix] = obj
218 flatparams[prefix] = obj
219 else:
219 else:
220 for k, v in items(obj):
220 for k, v in items(obj):
221 if prefix:
221 if prefix:
222 process(b'%s[%s]' % (prefix, k), v)
222 process(b'%s[%s]' % (prefix, k), v)
223 else:
223 else:
224 process(k, v)
224 process(k, v)
225
225
226 process(b'', params)
226 process(b'', params)
227 return util.urlreq.urlencode(flatparams)
227 return util.urlreq.urlencode(flatparams)
228
228
229
229
230 def readurltoken(ui):
230 def readurltoken(ui):
231 """return conduit url, token and make sure they exist
231 """return conduit url, token and make sure they exist
232
232
233 Currently read from [auth] config section. In the future, it might
233 Currently read from [auth] config section. In the future, it might
234 make sense to read from .arcconfig and .arcrc as well.
234 make sense to read from .arcconfig and .arcrc as well.
235 """
235 """
236 url = ui.config(b'phabricator', b'url')
236 url = ui.config(b'phabricator', b'url')
237 if not url:
237 if not url:
238 raise error.Abort(
238 raise error.Abort(
239 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 _(b'config %s.%s is required') % (b'phabricator', b'url')
240 )
240 )
241
241
242 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
243 token = None
243 token = None
244
244
245 if res:
245 if res:
246 group, auth = res
246 group, auth = res
247
247
248 ui.debug(b"using auth.%s.* for authentication\n" % group)
248 ui.debug(b"using auth.%s.* for authentication\n" % group)
249
249
250 token = auth.get(b'phabtoken')
250 token = auth.get(b'phabtoken')
251
251
252 if not token:
252 if not token:
253 raise error.Abort(
253 raise error.Abort(
254 _(b'Can\'t find conduit token associated to %s') % (url,)
254 _(b'Can\'t find conduit token associated to %s') % (url,)
255 )
255 )
256
256
257 return url, token
257 return url, token
258
258
259
259
260 def callconduit(ui, name, params):
260 def callconduit(ui, name, params):
261 """call Conduit API, params is a dict. return json.loads result, or None"""
261 """call Conduit API, params is a dict. return json.loads result, or None"""
262 host, token = readurltoken(ui)
262 host, token = readurltoken(ui)
263 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
264 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
265 params = params.copy()
265 params = params.copy()
266 params[b'api.token'] = token
266 params[b'api.token'] = token
267 data = urlencodenested(params)
267 data = urlencodenested(params)
268 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 curlcmd = ui.config(b'phabricator', b'curlcmd')
269 if curlcmd:
269 if curlcmd:
270 sin, sout = procutil.popen2(
270 sin, sout = procutil.popen2(
271 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
272 )
272 )
273 sin.write(data)
273 sin.write(data)
274 sin.close()
274 sin.close()
275 body = sout.read()
275 body = sout.read()
276 else:
276 else:
277 urlopener = urlmod.opener(ui, authinfo)
277 urlopener = urlmod.opener(ui, authinfo)
278 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 request = util.urlreq.request(pycompat.strurl(url), data=data)
279 with contextlib.closing(urlopener.open(request)) as rsp:
279 with contextlib.closing(urlopener.open(request)) as rsp:
280 body = rsp.read()
280 body = rsp.read()
281 ui.debug(b'Conduit Response: %s\n' % body)
281 ui.debug(b'Conduit Response: %s\n' % body)
282 parsed = pycompat.rapply(
282 parsed = pycompat.rapply(
283 lambda x: encoding.unitolocal(x)
283 lambda x: encoding.unitolocal(x)
284 if isinstance(x, pycompat.unicode)
284 if isinstance(x, pycompat.unicode)
285 else x,
285 else x,
286 # json.loads only accepts bytes from py3.6+
286 # json.loads only accepts bytes from py3.6+
287 json.loads(encoding.unifromlocal(body)),
287 json.loads(encoding.unifromlocal(body)),
288 )
288 )
289 if parsed.get(b'error_code'):
289 if parsed.get(b'error_code'):
290 msg = _(b'Conduit Error (%s): %s') % (
290 msg = _(b'Conduit Error (%s): %s') % (
291 parsed[b'error_code'],
291 parsed[b'error_code'],
292 parsed[b'error_info'],
292 parsed[b'error_info'],
293 )
293 )
294 raise error.Abort(msg)
294 raise error.Abort(msg)
295 return parsed[b'result']
295 return parsed[b'result']
296
296
297
297
298 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
299 def debugcallconduit(ui, repo, name):
299 def debugcallconduit(ui, repo, name):
300 """call Conduit API
300 """call Conduit API
301
301
302 Call parameters are read from stdin as a JSON blob. Result will be written
302 Call parameters are read from stdin as a JSON blob. Result will be written
303 to stdout as a JSON blob.
303 to stdout as a JSON blob.
304 """
304 """
305 # json.loads only accepts bytes from 3.6+
305 # json.loads only accepts bytes from 3.6+
306 rawparams = encoding.unifromlocal(ui.fin.read())
306 rawparams = encoding.unifromlocal(ui.fin.read())
307 # json.loads only returns unicode strings
307 # json.loads only returns unicode strings
308 params = pycompat.rapply(
308 params = pycompat.rapply(
309 lambda x: encoding.unitolocal(x)
309 lambda x: encoding.unitolocal(x)
310 if isinstance(x, pycompat.unicode)
310 if isinstance(x, pycompat.unicode)
311 else x,
311 else x,
312 json.loads(rawparams),
312 json.loads(rawparams),
313 )
313 )
314 # json.dumps only accepts unicode strings
314 # json.dumps only accepts unicode strings
315 result = pycompat.rapply(
315 result = pycompat.rapply(
316 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
317 callconduit(ui, name, params),
317 callconduit(ui, name, params),
318 )
318 )
319 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
320 ui.write(b'%s\n' % encoding.unitolocal(s))
320 ui.write(b'%s\n' % encoding.unitolocal(s))
321
321
322
322
323 def getrepophid(repo):
323 def getrepophid(repo):
324 """given callsign, return repository PHID or None"""
324 """given callsign, return repository PHID or None"""
325 # developer config: phabricator.repophid
325 # developer config: phabricator.repophid
326 repophid = repo.ui.config(b'phabricator', b'repophid')
326 repophid = repo.ui.config(b'phabricator', b'repophid')
327 if repophid:
327 if repophid:
328 return repophid
328 return repophid
329 callsign = repo.ui.config(b'phabricator', b'callsign')
329 callsign = repo.ui.config(b'phabricator', b'callsign')
330 if not callsign:
330 if not callsign:
331 return None
331 return None
332 query = callconduit(
332 query = callconduit(
333 repo.ui,
333 repo.ui,
334 b'diffusion.repository.search',
334 b'diffusion.repository.search',
335 {b'constraints': {b'callsigns': [callsign]}},
335 {b'constraints': {b'callsigns': [callsign]}},
336 )
336 )
337 if len(query[b'data']) == 0:
337 if len(query[b'data']) == 0:
338 return None
338 return None
339 repophid = query[b'data'][0][b'phid']
339 repophid = query[b'data'][0][b'phid']
340 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
341 return repophid
341 return repophid
342
342
343
343
344 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
345 _differentialrevisiondescre = re.compile(
345 _differentialrevisiondescre = re.compile(
346 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
347 )
347 )
348
348
349
349
350 def getoldnodedrevmap(repo, nodelist):
350 def getoldnodedrevmap(repo, nodelist):
351 """find previous nodes that has been sent to Phabricator
351 """find previous nodes that has been sent to Phabricator
352
352
353 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 return {node: (oldnode, Differential diff, Differential Revision ID)}
354 for node in nodelist with known previous sent versions, or associated
354 for node in nodelist with known previous sent versions, or associated
355 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
356 be ``None``.
356 be ``None``.
357
357
358 Examines commit messages like "Differential Revision:" to get the
358 Examines commit messages like "Differential Revision:" to get the
359 association information.
359 association information.
360
360
361 If such commit message line is not found, examines all precursors and their
361 If such commit message line is not found, examines all precursors and their
362 tags. Tags with format like "D1234" are considered a match and the node
362 tags. Tags with format like "D1234" are considered a match and the node
363 with that tag, and the number after "D" (ex. 1234) will be returned.
363 with that tag, and the number after "D" (ex. 1234) will be returned.
364
364
365 The ``old node``, if not None, is guaranteed to be the last diff of
365 The ``old node``, if not None, is guaranteed to be the last diff of
366 corresponding Differential Revision, and exist in the repo.
366 corresponding Differential Revision, and exist in the repo.
367 """
367 """
368 unfi = repo.unfiltered()
368 unfi = repo.unfiltered()
369 nodemap = unfi.changelog.nodemap
369 nodemap = unfi.changelog.nodemap
370
370
371 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 result = {} # {node: (oldnode?, lastdiff?, drev)}
372 toconfirm = {} # {node: (force, {precnode}, drev)}
372 toconfirm = {} # {node: (force, {precnode}, drev)}
373 for node in nodelist:
373 for node in nodelist:
374 ctx = unfi[node]
374 ctx = unfi[node]
375 # For tags like "D123", put them into "toconfirm" to verify later
375 # For tags like "D123", put them into "toconfirm" to verify later
376 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
377 for n in precnodes:
377 for n in precnodes:
378 if n in nodemap:
378 if n in nodemap:
379 for tag in unfi.nodetags(n):
379 for tag in unfi.nodetags(n):
380 m = _differentialrevisiontagre.match(tag)
380 m = _differentialrevisiontagre.match(tag)
381 if m:
381 if m:
382 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
383 continue
383 continue
384
384
385 # Check commit message
385 # Check commit message
386 m = _differentialrevisiondescre.search(ctx.description())
386 m = _differentialrevisiondescre.search(ctx.description())
387 if m:
387 if m:
388 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
389
389
390 # Double check if tags are genuine by collecting all old nodes from
390 # Double check if tags are genuine by collecting all old nodes from
391 # Phabricator, and expect precursors overlap with it.
391 # Phabricator, and expect precursors overlap with it.
392 if toconfirm:
392 if toconfirm:
393 drevs = [drev for force, precs, drev in toconfirm.values()]
393 drevs = [drev for force, precs, drev in toconfirm.values()]
394 alldiffs = callconduit(
394 alldiffs = callconduit(
395 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
396 )
396 )
397 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
398 for newnode, (force, precset, drev) in toconfirm.items():
398 for newnode, (force, precset, drev) in toconfirm.items():
399 diffs = [
399 diffs = [
400 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
401 ]
401 ]
402
402
403 # "precursors" as known by Phabricator
403 # "precursors" as known by Phabricator
404 phprecset = set(getnode(d) for d in diffs)
404 phprecset = set(getnode(d) for d in diffs)
405
405
406 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 # Ignore if precursors (Phabricator and local repo) do not overlap,
407 # and force is not set (when commit message says nothing)
407 # and force is not set (when commit message says nothing)
408 if not force and not bool(phprecset & precset):
408 if not force and not bool(phprecset & precset):
409 tagname = b'D%d' % drev
409 tagname = b'D%d' % drev
410 tags.tag(
410 tags.tag(
411 repo,
411 repo,
412 tagname,
412 tagname,
413 nullid,
413 nullid,
414 message=None,
414 message=None,
415 user=None,
415 user=None,
416 date=None,
416 date=None,
417 local=True,
417 local=True,
418 )
418 )
419 unfi.ui.warn(
419 unfi.ui.warn(
420 _(
420 _(
421 b'D%s: local tag removed - does not match '
421 b'D%s: local tag removed - does not match '
422 b'Differential history\n'
422 b'Differential history\n'
423 )
423 )
424 % drev
424 % drev
425 )
425 )
426 continue
426 continue
427
427
428 # Find the last node using Phabricator metadata, and make sure it
428 # Find the last node using Phabricator metadata, and make sure it
429 # exists in the repo
429 # exists in the repo
430 oldnode = lastdiff = None
430 oldnode = lastdiff = None
431 if diffs:
431 if diffs:
432 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
433 oldnode = getnode(lastdiff)
433 oldnode = getnode(lastdiff)
434 if oldnode and oldnode not in nodemap:
434 if oldnode and oldnode not in nodemap:
435 oldnode = None
435 oldnode = None
436
436
437 result[newnode] = (oldnode, lastdiff, drev)
437 result[newnode] = (oldnode, lastdiff, drev)
438
438
439 return result
439 return result
440
440
441
441
442 def getdiff(ctx, diffopts):
442 def getdiff(ctx, diffopts):
443 """plain-text diff without header (user, commit message, etc)"""
443 """plain-text diff without header (user, commit message, etc)"""
444 output = util.stringio()
444 output = util.stringio()
445 for chunk, _label in patch.diffui(
445 for chunk, _label in patch.diffui(
446 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
447 ):
447 ):
448 output.write(chunk)
448 output.write(chunk)
449 return output.getvalue()
449 return output.getvalue()
450
450
451
451
452 class DiffChangeType(object):
452 class DiffChangeType(object):
453 ADD = 1
453 ADD = 1
454 CHANGE = 2
454 CHANGE = 2
455 DELETE = 3
455 DELETE = 3
456 MOVE_AWAY = 4
456 MOVE_AWAY = 4
457 COPY_AWAY = 5
457 COPY_AWAY = 5
458 MOVE_HERE = 6
458 MOVE_HERE = 6
459 COPY_HERE = 7
459 COPY_HERE = 7
460 MULTICOPY = 8
460 MULTICOPY = 8
461
461
462
462
463 class DiffFileType(object):
463 class DiffFileType(object):
464 TEXT = 1
464 TEXT = 1
465 IMAGE = 2
465 IMAGE = 2
466 BINARY = 3
466 BINARY = 3
467
467
468
468
469 @attr.s
469 @attr.s
470 class phabhunk(dict):
470 class phabhunk(dict):
471 """Represents a Differential hunk, which is owned by a Differential change
471 """Represents a Differential hunk, which is owned by a Differential change
472 """
472 """
473
473
474 oldOffset = attr.ib(default=0) # camelcase-required
474 oldOffset = attr.ib(default=0) # camelcase-required
475 oldLength = attr.ib(default=0) # camelcase-required
475 oldLength = attr.ib(default=0) # camelcase-required
476 newOffset = attr.ib(default=0) # camelcase-required
476 newOffset = attr.ib(default=0) # camelcase-required
477 newLength = attr.ib(default=0) # camelcase-required
477 newLength = attr.ib(default=0) # camelcase-required
478 corpus = attr.ib(default='')
478 corpus = attr.ib(default='')
479 # These get added to the phabchange's equivalents
479 # These get added to the phabchange's equivalents
480 addLines = attr.ib(default=0) # camelcase-required
480 addLines = attr.ib(default=0) # camelcase-required
481 delLines = attr.ib(default=0) # camelcase-required
481 delLines = attr.ib(default=0) # camelcase-required
482
482
483
483
484 @attr.s
484 @attr.s
485 class phabchange(object):
485 class phabchange(object):
486 """Represents a Differential change, owns Differential hunks and owned by a
486 """Represents a Differential change, owns Differential hunks and owned by a
487 Differential diff. Each one represents one file in a diff.
487 Differential diff. Each one represents one file in a diff.
488 """
488 """
489
489
490 currentPath = attr.ib(default=None) # camelcase-required
490 currentPath = attr.ib(default=None) # camelcase-required
491 oldPath = attr.ib(default=None) # camelcase-required
491 oldPath = attr.ib(default=None) # camelcase-required
492 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
492 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
493 metadata = attr.ib(default=attr.Factory(dict))
493 metadata = attr.ib(default=attr.Factory(dict))
494 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
494 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
495 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
495 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 type = attr.ib(default=DiffChangeType.CHANGE)
496 type = attr.ib(default=DiffChangeType.CHANGE)
497 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
497 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
498 commitHash = attr.ib(default=None) # camelcase-required
498 commitHash = attr.ib(default=None) # camelcase-required
499 addLines = attr.ib(default=0) # camelcase-required
499 addLines = attr.ib(default=0) # camelcase-required
500 delLines = attr.ib(default=0) # camelcase-required
500 delLines = attr.ib(default=0) # camelcase-required
501 hunks = attr.ib(default=attr.Factory(list))
501 hunks = attr.ib(default=attr.Factory(list))
502
502
503 def copynewmetadatatoold(self):
503 def copynewmetadatatoold(self):
504 for key in list(self.metadata.keys()):
504 for key in list(self.metadata.keys()):
505 newkey = key.replace(b'new:', b'old:')
505 newkey = key.replace(b'new:', b'old:')
506 self.metadata[newkey] = self.metadata[key]
506 self.metadata[newkey] = self.metadata[key]
507
507
508 def addoldmode(self, value):
508 def addoldmode(self, value):
509 self.oldProperties[b'unix:filemode'] = value
509 self.oldProperties[b'unix:filemode'] = value
510
510
511 def addnewmode(self, value):
511 def addnewmode(self, value):
512 self.newProperties[b'unix:filemode'] = value
512 self.newProperties[b'unix:filemode'] = value
513
513
514 def addhunk(self, hunk):
514 def addhunk(self, hunk):
515 if not isinstance(hunk, phabhunk):
515 if not isinstance(hunk, phabhunk):
516 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
516 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
517 self.hunks.append(hunk)
517 self.hunks.append(hunk)
518 # It's useful to include these stats since the Phab web UI shows them,
518 # It's useful to include these stats since the Phab web UI shows them,
519 # and uses them to estimate how large a change a Revision is. Also used
519 # and uses them to estimate how large a change a Revision is. Also used
520 # in email subjects for the [+++--] bit.
520 # in email subjects for the [+++--] bit.
521 self.addLines += hunk.addLines
521 self.addLines += hunk.addLines
522 self.delLines += hunk.delLines
522 self.delLines += hunk.delLines
523
523
524
524
525 @attr.s
526 class phabdiff(object):
527 """Represents a Differential diff, owns Differential changes. Corresponds
528 to a commit.
529 """
530
531 # Doesn't seem to be any reason to send this (output of uname -n)
532 sourceMachine = attr.ib(default=b'') # camelcase-required
533 sourcePath = attr.ib(default=b'/') # camelcase-required
534 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
535 sourceControlPath = attr.ib(default=b'/') # camelcase-required
536 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
537 branch = attr.ib(default=b'default')
538 bookmark = attr.ib(default=None)
539 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
540 lintStatus = attr.ib(default=b'none') # camelcase-required
541 unitStatus = attr.ib(default=b'none') # camelcase-required
542 changes = attr.ib(default=attr.Factory(dict))
543 repositoryPHID = attr.ib(default=None) # camelcase-required
544
545 def addchange(self, change):
546 if not isinstance(change, phabchange):
547 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
548 self.changes[change.currentPath] = change
549
550
525 def creatediff(ctx):
551 def creatediff(ctx):
526 """create a Differential Diff"""
552 """create a Differential Diff"""
527 repo = ctx.repo()
553 repo = ctx.repo()
528 repophid = getrepophid(repo)
554 repophid = getrepophid(repo)
529 # Create a "Differential Diff" via "differential.createrawdiff" API
555 # Create a "Differential Diff" via "differential.createrawdiff" API
530 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
556 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
531 if repophid:
557 if repophid:
532 params[b'repositoryPHID'] = repophid
558 params[b'repositoryPHID'] = repophid
533 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
559 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
534 if not diff:
560 if not diff:
535 raise error.Abort(_(b'cannot create diff for %s') % ctx)
561 raise error.Abort(_(b'cannot create diff for %s') % ctx)
536 return diff
562 return diff
537
563
538
564
539 def writediffproperties(ctx, diff):
565 def writediffproperties(ctx, diff):
540 """write metadata to diff so patches could be applied losslessly"""
566 """write metadata to diff so patches could be applied losslessly"""
541 params = {
567 params = {
542 b'diff_id': diff[b'id'],
568 b'diff_id': diff[b'id'],
543 b'name': b'hg:meta',
569 b'name': b'hg:meta',
544 b'data': templatefilters.json(
570 b'data': templatefilters.json(
545 {
571 {
546 b'user': ctx.user(),
572 b'user': ctx.user(),
547 b'date': b'%d %d' % ctx.date(),
573 b'date': b'%d %d' % ctx.date(),
548 b'branch': ctx.branch(),
574 b'branch': ctx.branch(),
549 b'node': ctx.hex(),
575 b'node': ctx.hex(),
550 b'parent': ctx.p1().hex(),
576 b'parent': ctx.p1().hex(),
551 }
577 }
552 ),
578 ),
553 }
579 }
554 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
580 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
555
581
556 params = {
582 params = {
557 b'diff_id': diff[b'id'],
583 b'diff_id': diff[b'id'],
558 b'name': b'local:commits',
584 b'name': b'local:commits',
559 b'data': templatefilters.json(
585 b'data': templatefilters.json(
560 {
586 {
561 ctx.hex(): {
587 ctx.hex(): {
562 b'author': stringutil.person(ctx.user()),
588 b'author': stringutil.person(ctx.user()),
563 b'authorEmail': stringutil.email(ctx.user()),
589 b'authorEmail': stringutil.email(ctx.user()),
564 b'time': int(ctx.date()[0]),
590 b'time': int(ctx.date()[0]),
565 b'commit': ctx.hex(),
591 b'commit': ctx.hex(),
566 b'parents': [ctx.p1().hex()],
592 b'parents': [ctx.p1().hex()],
567 b'branch': ctx.branch(),
593 b'branch': ctx.branch(),
568 },
594 },
569 }
595 }
570 ),
596 ),
571 }
597 }
572 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
598 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
573
599
574
600
575 def createdifferentialrevision(
601 def createdifferentialrevision(
576 ctx,
602 ctx,
577 revid=None,
603 revid=None,
578 parentrevphid=None,
604 parentrevphid=None,
579 oldnode=None,
605 oldnode=None,
580 olddiff=None,
606 olddiff=None,
581 actions=None,
607 actions=None,
582 comment=None,
608 comment=None,
583 ):
609 ):
584 """create or update a Differential Revision
610 """create or update a Differential Revision
585
611
586 If revid is None, create a new Differential Revision, otherwise update
612 If revid is None, create a new Differential Revision, otherwise update
587 revid. If parentrevphid is not None, set it as a dependency.
613 revid. If parentrevphid is not None, set it as a dependency.
588
614
589 If oldnode is not None, check if the patch content (without commit message
615 If oldnode is not None, check if the patch content (without commit message
590 and metadata) has changed before creating another diff.
616 and metadata) has changed before creating another diff.
591
617
592 If actions is not None, they will be appended to the transaction.
618 If actions is not None, they will be appended to the transaction.
593 """
619 """
594 repo = ctx.repo()
620 repo = ctx.repo()
595 if oldnode:
621 if oldnode:
596 diffopts = mdiff.diffopts(git=True, context=32767)
622 diffopts = mdiff.diffopts(git=True, context=32767)
597 oldctx = repo.unfiltered()[oldnode]
623 oldctx = repo.unfiltered()[oldnode]
598 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
624 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
599 else:
625 else:
600 neednewdiff = True
626 neednewdiff = True
601
627
602 transactions = []
628 transactions = []
603 if neednewdiff:
629 if neednewdiff:
604 diff = creatediff(ctx)
630 diff = creatediff(ctx)
605 transactions.append({b'type': b'update', b'value': diff[b'phid']})
631 transactions.append({b'type': b'update', b'value': diff[b'phid']})
606 if comment:
632 if comment:
607 transactions.append({b'type': b'comment', b'value': comment})
633 transactions.append({b'type': b'comment', b'value': comment})
608 else:
634 else:
609 # Even if we don't need to upload a new diff because the patch content
635 # Even if we don't need to upload a new diff because the patch content
610 # does not change. We might still need to update its metadata so
636 # does not change. We might still need to update its metadata so
611 # pushers could know the correct node metadata.
637 # pushers could know the correct node metadata.
612 assert olddiff
638 assert olddiff
613 diff = olddiff
639 diff = olddiff
614 writediffproperties(ctx, diff)
640 writediffproperties(ctx, diff)
615
641
616 # Set the parent Revision every time, so commit re-ordering is picked-up
642 # Set the parent Revision every time, so commit re-ordering is picked-up
617 if parentrevphid:
643 if parentrevphid:
618 transactions.append(
644 transactions.append(
619 {b'type': b'parents.set', b'value': [parentrevphid]}
645 {b'type': b'parents.set', b'value': [parentrevphid]}
620 )
646 )
621
647
622 if actions:
648 if actions:
623 transactions += actions
649 transactions += actions
624
650
625 # Parse commit message and update related fields.
651 # Parse commit message and update related fields.
626 desc = ctx.description()
652 desc = ctx.description()
627 info = callconduit(
653 info = callconduit(
628 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
654 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
629 )
655 )
630 for k, v in info[b'fields'].items():
656 for k, v in info[b'fields'].items():
631 if k in [b'title', b'summary', b'testPlan']:
657 if k in [b'title', b'summary', b'testPlan']:
632 transactions.append({b'type': k, b'value': v})
658 transactions.append({b'type': k, b'value': v})
633
659
634 params = {b'transactions': transactions}
660 params = {b'transactions': transactions}
635 if revid is not None:
661 if revid is not None:
636 # Update an existing Differential Revision
662 # Update an existing Differential Revision
637 params[b'objectIdentifier'] = revid
663 params[b'objectIdentifier'] = revid
638
664
639 revision = callconduit(repo.ui, b'differential.revision.edit', params)
665 revision = callconduit(repo.ui, b'differential.revision.edit', params)
640 if not revision:
666 if not revision:
641 raise error.Abort(_(b'cannot create revision for %s') % ctx)
667 raise error.Abort(_(b'cannot create revision for %s') % ctx)
642
668
643 return revision, diff
669 return revision, diff
644
670
645
671
646 def userphids(repo, names):
672 def userphids(repo, names):
647 """convert user names to PHIDs"""
673 """convert user names to PHIDs"""
648 names = [name.lower() for name in names]
674 names = [name.lower() for name in names]
649 query = {b'constraints': {b'usernames': names}}
675 query = {b'constraints': {b'usernames': names}}
650 result = callconduit(repo.ui, b'user.search', query)
676 result = callconduit(repo.ui, b'user.search', query)
651 # username not found is not an error of the API. So check if we have missed
677 # username not found is not an error of the API. So check if we have missed
652 # some names here.
678 # some names here.
653 data = result[b'data']
679 data = result[b'data']
654 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
680 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
655 unresolved = set(names) - resolved
681 unresolved = set(names) - resolved
656 if unresolved:
682 if unresolved:
657 raise error.Abort(
683 raise error.Abort(
658 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
684 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
659 )
685 )
660 return [entry[b'phid'] for entry in data]
686 return [entry[b'phid'] for entry in data]
661
687
662
688
663 @vcrcommand(
689 @vcrcommand(
664 b'phabsend',
690 b'phabsend',
665 [
691 [
666 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
692 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
667 (b'', b'amend', True, _(b'update commit messages')),
693 (b'', b'amend', True, _(b'update commit messages')),
668 (b'', b'reviewer', [], _(b'specify reviewers')),
694 (b'', b'reviewer', [], _(b'specify reviewers')),
669 (b'', b'blocker', [], _(b'specify blocking reviewers')),
695 (b'', b'blocker', [], _(b'specify blocking reviewers')),
670 (
696 (
671 b'm',
697 b'm',
672 b'comment',
698 b'comment',
673 b'',
699 b'',
674 _(b'add a comment to Revisions with new/updated Diffs'),
700 _(b'add a comment to Revisions with new/updated Diffs'),
675 ),
701 ),
676 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
702 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
677 ],
703 ],
678 _(b'REV [OPTIONS]'),
704 _(b'REV [OPTIONS]'),
679 helpcategory=command.CATEGORY_IMPORT_EXPORT,
705 helpcategory=command.CATEGORY_IMPORT_EXPORT,
680 )
706 )
681 def phabsend(ui, repo, *revs, **opts):
707 def phabsend(ui, repo, *revs, **opts):
682 """upload changesets to Phabricator
708 """upload changesets to Phabricator
683
709
684 If there are multiple revisions specified, they will be send as a stack
710 If there are multiple revisions specified, they will be send as a stack
685 with a linear dependencies relationship using the order specified by the
711 with a linear dependencies relationship using the order specified by the
686 revset.
712 revset.
687
713
688 For the first time uploading changesets, local tags will be created to
714 For the first time uploading changesets, local tags will be created to
689 maintain the association. After the first time, phabsend will check
715 maintain the association. After the first time, phabsend will check
690 obsstore and tags information so it can figure out whether to update an
716 obsstore and tags information so it can figure out whether to update an
691 existing Differential Revision, or create a new one.
717 existing Differential Revision, or create a new one.
692
718
693 If --amend is set, update commit messages so they have the
719 If --amend is set, update commit messages so they have the
694 ``Differential Revision`` URL, remove related tags. This is similar to what
720 ``Differential Revision`` URL, remove related tags. This is similar to what
695 arcanist will do, and is more desired in author-push workflows. Otherwise,
721 arcanist will do, and is more desired in author-push workflows. Otherwise,
696 use local tags to record the ``Differential Revision`` association.
722 use local tags to record the ``Differential Revision`` association.
697
723
698 The --confirm option lets you confirm changesets before sending them. You
724 The --confirm option lets you confirm changesets before sending them. You
699 can also add following to your configuration file to make it default
725 can also add following to your configuration file to make it default
700 behaviour::
726 behaviour::
701
727
702 [phabsend]
728 [phabsend]
703 confirm = true
729 confirm = true
704
730
705 phabsend will check obsstore and the above association to decide whether to
731 phabsend will check obsstore and the above association to decide whether to
706 update an existing Differential Revision, or create a new one.
732 update an existing Differential Revision, or create a new one.
707 """
733 """
708 opts = pycompat.byteskwargs(opts)
734 opts = pycompat.byteskwargs(opts)
709 revs = list(revs) + opts.get(b'rev', [])
735 revs = list(revs) + opts.get(b'rev', [])
710 revs = scmutil.revrange(repo, revs)
736 revs = scmutil.revrange(repo, revs)
711
737
712 if not revs:
738 if not revs:
713 raise error.Abort(_(b'phabsend requires at least one changeset'))
739 raise error.Abort(_(b'phabsend requires at least one changeset'))
714 if opts.get(b'amend'):
740 if opts.get(b'amend'):
715 cmdutil.checkunfinished(repo)
741 cmdutil.checkunfinished(repo)
716
742
717 # {newnode: (oldnode, olddiff, olddrev}
743 # {newnode: (oldnode, olddiff, olddrev}
718 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
744 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
719
745
720 confirm = ui.configbool(b'phabsend', b'confirm')
746 confirm = ui.configbool(b'phabsend', b'confirm')
721 confirm |= bool(opts.get(b'confirm'))
747 confirm |= bool(opts.get(b'confirm'))
722 if confirm:
748 if confirm:
723 confirmed = _confirmbeforesend(repo, revs, oldmap)
749 confirmed = _confirmbeforesend(repo, revs, oldmap)
724 if not confirmed:
750 if not confirmed:
725 raise error.Abort(_(b'phabsend cancelled'))
751 raise error.Abort(_(b'phabsend cancelled'))
726
752
727 actions = []
753 actions = []
728 reviewers = opts.get(b'reviewer', [])
754 reviewers = opts.get(b'reviewer', [])
729 blockers = opts.get(b'blocker', [])
755 blockers = opts.get(b'blocker', [])
730 phids = []
756 phids = []
731 if reviewers:
757 if reviewers:
732 phids.extend(userphids(repo, reviewers))
758 phids.extend(userphids(repo, reviewers))
733 if blockers:
759 if blockers:
734 phids.extend(
760 phids.extend(
735 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
761 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
736 )
762 )
737 if phids:
763 if phids:
738 actions.append({b'type': b'reviewers.add', b'value': phids})
764 actions.append({b'type': b'reviewers.add', b'value': phids})
739
765
740 drevids = [] # [int]
766 drevids = [] # [int]
741 diffmap = {} # {newnode: diff}
767 diffmap = {} # {newnode: diff}
742
768
743 # Send patches one by one so we know their Differential Revision PHIDs and
769 # Send patches one by one so we know their Differential Revision PHIDs and
744 # can provide dependency relationship
770 # can provide dependency relationship
745 lastrevphid = None
771 lastrevphid = None
746 for rev in revs:
772 for rev in revs:
747 ui.debug(b'sending rev %d\n' % rev)
773 ui.debug(b'sending rev %d\n' % rev)
748 ctx = repo[rev]
774 ctx = repo[rev]
749
775
750 # Get Differential Revision ID
776 # Get Differential Revision ID
751 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
777 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
752 if oldnode != ctx.node() or opts.get(b'amend'):
778 if oldnode != ctx.node() or opts.get(b'amend'):
753 # Create or update Differential Revision
779 # Create or update Differential Revision
754 revision, diff = createdifferentialrevision(
780 revision, diff = createdifferentialrevision(
755 ctx,
781 ctx,
756 revid,
782 revid,
757 lastrevphid,
783 lastrevphid,
758 oldnode,
784 oldnode,
759 olddiff,
785 olddiff,
760 actions,
786 actions,
761 opts.get(b'comment'),
787 opts.get(b'comment'),
762 )
788 )
763 diffmap[ctx.node()] = diff
789 diffmap[ctx.node()] = diff
764 newrevid = int(revision[b'object'][b'id'])
790 newrevid = int(revision[b'object'][b'id'])
765 newrevphid = revision[b'object'][b'phid']
791 newrevphid = revision[b'object'][b'phid']
766 if revid:
792 if revid:
767 action = b'updated'
793 action = b'updated'
768 else:
794 else:
769 action = b'created'
795 action = b'created'
770
796
771 # Create a local tag to note the association, if commit message
797 # Create a local tag to note the association, if commit message
772 # does not have it already
798 # does not have it already
773 m = _differentialrevisiondescre.search(ctx.description())
799 m = _differentialrevisiondescre.search(ctx.description())
774 if not m or int(m.group(r'id')) != newrevid:
800 if not m or int(m.group(r'id')) != newrevid:
775 tagname = b'D%d' % newrevid
801 tagname = b'D%d' % newrevid
776 tags.tag(
802 tags.tag(
777 repo,
803 repo,
778 tagname,
804 tagname,
779 ctx.node(),
805 ctx.node(),
780 message=None,
806 message=None,
781 user=None,
807 user=None,
782 date=None,
808 date=None,
783 local=True,
809 local=True,
784 )
810 )
785 else:
811 else:
786 # Nothing changed. But still set "newrevphid" so the next revision
812 # Nothing changed. But still set "newrevphid" so the next revision
787 # could depend on this one and "newrevid" for the summary line.
813 # could depend on this one and "newrevid" for the summary line.
788 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
814 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
789 newrevid = revid
815 newrevid = revid
790 action = b'skipped'
816 action = b'skipped'
791
817
792 actiondesc = ui.label(
818 actiondesc = ui.label(
793 {
819 {
794 b'created': _(b'created'),
820 b'created': _(b'created'),
795 b'skipped': _(b'skipped'),
821 b'skipped': _(b'skipped'),
796 b'updated': _(b'updated'),
822 b'updated': _(b'updated'),
797 }[action],
823 }[action],
798 b'phabricator.action.%s' % action,
824 b'phabricator.action.%s' % action,
799 )
825 )
800 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
826 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
801 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
827 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
802 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
828 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
803 ui.write(
829 ui.write(
804 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
830 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
805 )
831 )
806 drevids.append(newrevid)
832 drevids.append(newrevid)
807 lastrevphid = newrevphid
833 lastrevphid = newrevphid
808
834
809 # Update commit messages and remove tags
835 # Update commit messages and remove tags
810 if opts.get(b'amend'):
836 if opts.get(b'amend'):
811 unfi = repo.unfiltered()
837 unfi = repo.unfiltered()
812 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
838 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
813 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
839 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
814 wnode = unfi[b'.'].node()
840 wnode = unfi[b'.'].node()
815 mapping = {} # {oldnode: [newnode]}
841 mapping = {} # {oldnode: [newnode]}
816 for i, rev in enumerate(revs):
842 for i, rev in enumerate(revs):
817 old = unfi[rev]
843 old = unfi[rev]
818 drevid = drevids[i]
844 drevid = drevids[i]
819 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
845 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
820 newdesc = getdescfromdrev(drev)
846 newdesc = getdescfromdrev(drev)
821 # Make sure commit message contain "Differential Revision"
847 # Make sure commit message contain "Differential Revision"
822 if old.description() != newdesc:
848 if old.description() != newdesc:
823 if old.phase() == phases.public:
849 if old.phase() == phases.public:
824 ui.warn(
850 ui.warn(
825 _(b"warning: not updating public commit %s\n")
851 _(b"warning: not updating public commit %s\n")
826 % scmutil.formatchangeid(old)
852 % scmutil.formatchangeid(old)
827 )
853 )
828 continue
854 continue
829 parents = [
855 parents = [
830 mapping.get(old.p1().node(), (old.p1(),))[0],
856 mapping.get(old.p1().node(), (old.p1(),))[0],
831 mapping.get(old.p2().node(), (old.p2(),))[0],
857 mapping.get(old.p2().node(), (old.p2(),))[0],
832 ]
858 ]
833 new = context.metadataonlyctx(
859 new = context.metadataonlyctx(
834 repo,
860 repo,
835 old,
861 old,
836 parents=parents,
862 parents=parents,
837 text=newdesc,
863 text=newdesc,
838 user=old.user(),
864 user=old.user(),
839 date=old.date(),
865 date=old.date(),
840 extra=old.extra(),
866 extra=old.extra(),
841 )
867 )
842
868
843 newnode = new.commit()
869 newnode = new.commit()
844
870
845 mapping[old.node()] = [newnode]
871 mapping[old.node()] = [newnode]
846 # Update diff property
872 # Update diff property
847 # If it fails just warn and keep going, otherwise the DREV
873 # If it fails just warn and keep going, otherwise the DREV
848 # associations will be lost
874 # associations will be lost
849 try:
875 try:
850 writediffproperties(unfi[newnode], diffmap[old.node()])
876 writediffproperties(unfi[newnode], diffmap[old.node()])
851 except util.urlerr.urlerror:
877 except util.urlerr.urlerror:
852 ui.warnnoi18n(
878 ui.warnnoi18n(
853 b'Failed to update metadata for D%s\n' % drevid
879 b'Failed to update metadata for D%s\n' % drevid
854 )
880 )
855 # Remove local tags since it's no longer necessary
881 # Remove local tags since it's no longer necessary
856 tagname = b'D%d' % drevid
882 tagname = b'D%d' % drevid
857 if tagname in repo.tags():
883 if tagname in repo.tags():
858 tags.tag(
884 tags.tag(
859 repo,
885 repo,
860 tagname,
886 tagname,
861 nullid,
887 nullid,
862 message=None,
888 message=None,
863 user=None,
889 user=None,
864 date=None,
890 date=None,
865 local=True,
891 local=True,
866 )
892 )
867 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
893 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
868 if wnode in mapping:
894 if wnode in mapping:
869 unfi.setparents(mapping[wnode][0])
895 unfi.setparents(mapping[wnode][0])
870
896
871
897
872 # Map from "hg:meta" keys to header understood by "hg import". The order is
898 # Map from "hg:meta" keys to header understood by "hg import". The order is
873 # consistent with "hg export" output.
899 # consistent with "hg export" output.
874 _metanamemap = util.sortdict(
900 _metanamemap = util.sortdict(
875 [
901 [
876 (b'user', b'User'),
902 (b'user', b'User'),
877 (b'date', b'Date'),
903 (b'date', b'Date'),
878 (b'branch', b'Branch'),
904 (b'branch', b'Branch'),
879 (b'node', b'Node ID'),
905 (b'node', b'Node ID'),
880 (b'parent', b'Parent '),
906 (b'parent', b'Parent '),
881 ]
907 ]
882 )
908 )
883
909
884
910
885 def _confirmbeforesend(repo, revs, oldmap):
911 def _confirmbeforesend(repo, revs, oldmap):
886 url, token = readurltoken(repo.ui)
912 url, token = readurltoken(repo.ui)
887 ui = repo.ui
913 ui = repo.ui
888 for rev in revs:
914 for rev in revs:
889 ctx = repo[rev]
915 ctx = repo[rev]
890 desc = ctx.description().splitlines()[0]
916 desc = ctx.description().splitlines()[0]
891 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
917 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
892 if drevid:
918 if drevid:
893 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
919 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
894 else:
920 else:
895 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
921 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
896
922
897 ui.write(
923 ui.write(
898 _(b'%s - %s: %s\n')
924 _(b'%s - %s: %s\n')
899 % (
925 % (
900 drevdesc,
926 drevdesc,
901 ui.label(bytes(ctx), b'phabricator.node'),
927 ui.label(bytes(ctx), b'phabricator.node'),
902 ui.label(desc, b'phabricator.desc'),
928 ui.label(desc, b'phabricator.desc'),
903 )
929 )
904 )
930 )
905
931
906 if ui.promptchoice(
932 if ui.promptchoice(
907 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
933 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
908 ):
934 ):
909 return False
935 return False
910
936
911 return True
937 return True
912
938
913
939
914 _knownstatusnames = {
940 _knownstatusnames = {
915 b'accepted',
941 b'accepted',
916 b'needsreview',
942 b'needsreview',
917 b'needsrevision',
943 b'needsrevision',
918 b'closed',
944 b'closed',
919 b'abandoned',
945 b'abandoned',
920 }
946 }
921
947
922
948
923 def _getstatusname(drev):
949 def _getstatusname(drev):
924 """get normalized status name from a Differential Revision"""
950 """get normalized status name from a Differential Revision"""
925 return drev[b'statusName'].replace(b' ', b'').lower()
951 return drev[b'statusName'].replace(b' ', b'').lower()
926
952
927
953
928 # Small language to specify differential revisions. Support symbols: (), :X,
954 # Small language to specify differential revisions. Support symbols: (), :X,
929 # +, and -.
955 # +, and -.
930
956
931 _elements = {
957 _elements = {
932 # token-type: binding-strength, primary, prefix, infix, suffix
958 # token-type: binding-strength, primary, prefix, infix, suffix
933 b'(': (12, None, (b'group', 1, b')'), None, None),
959 b'(': (12, None, (b'group', 1, b')'), None, None),
934 b':': (8, None, (b'ancestors', 8), None, None),
960 b':': (8, None, (b'ancestors', 8), None, None),
935 b'&': (5, None, None, (b'and_', 5), None),
961 b'&': (5, None, None, (b'and_', 5), None),
936 b'+': (4, None, None, (b'add', 4), None),
962 b'+': (4, None, None, (b'add', 4), None),
937 b'-': (4, None, None, (b'sub', 4), None),
963 b'-': (4, None, None, (b'sub', 4), None),
938 b')': (0, None, None, None, None),
964 b')': (0, None, None, None, None),
939 b'symbol': (0, b'symbol', None, None, None),
965 b'symbol': (0, b'symbol', None, None, None),
940 b'end': (0, None, None, None, None),
966 b'end': (0, None, None, None, None),
941 }
967 }
942
968
943
969
944 def _tokenize(text):
970 def _tokenize(text):
945 view = memoryview(text) # zero-copy slice
971 view = memoryview(text) # zero-copy slice
946 special = b'():+-& '
972 special = b'():+-& '
947 pos = 0
973 pos = 0
948 length = len(text)
974 length = len(text)
949 while pos < length:
975 while pos < length:
950 symbol = b''.join(
976 symbol = b''.join(
951 itertools.takewhile(
977 itertools.takewhile(
952 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
978 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
953 )
979 )
954 )
980 )
955 if symbol:
981 if symbol:
956 yield (b'symbol', symbol, pos)
982 yield (b'symbol', symbol, pos)
957 pos += len(symbol)
983 pos += len(symbol)
958 else: # special char, ignore space
984 else: # special char, ignore space
959 if text[pos] != b' ':
985 if text[pos] != b' ':
960 yield (text[pos], None, pos)
986 yield (text[pos], None, pos)
961 pos += 1
987 pos += 1
962 yield (b'end', None, pos)
988 yield (b'end', None, pos)
963
989
964
990
965 def _parse(text):
991 def _parse(text):
966 tree, pos = parser.parser(_elements).parse(_tokenize(text))
992 tree, pos = parser.parser(_elements).parse(_tokenize(text))
967 if pos != len(text):
993 if pos != len(text):
968 raise error.ParseError(b'invalid token', pos)
994 raise error.ParseError(b'invalid token', pos)
969 return tree
995 return tree
970
996
971
997
972 def _parsedrev(symbol):
998 def _parsedrev(symbol):
973 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
999 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
974 if symbol.startswith(b'D') and symbol[1:].isdigit():
1000 if symbol.startswith(b'D') and symbol[1:].isdigit():
975 return int(symbol[1:])
1001 return int(symbol[1:])
976 if symbol.isdigit():
1002 if symbol.isdigit():
977 return int(symbol)
1003 return int(symbol)
978
1004
979
1005
980 def _prefetchdrevs(tree):
1006 def _prefetchdrevs(tree):
981 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1007 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
982 drevs = set()
1008 drevs = set()
983 ancestordrevs = set()
1009 ancestordrevs = set()
984 op = tree[0]
1010 op = tree[0]
985 if op == b'symbol':
1011 if op == b'symbol':
986 r = _parsedrev(tree[1])
1012 r = _parsedrev(tree[1])
987 if r:
1013 if r:
988 drevs.add(r)
1014 drevs.add(r)
989 elif op == b'ancestors':
1015 elif op == b'ancestors':
990 r, a = _prefetchdrevs(tree[1])
1016 r, a = _prefetchdrevs(tree[1])
991 drevs.update(r)
1017 drevs.update(r)
992 ancestordrevs.update(r)
1018 ancestordrevs.update(r)
993 ancestordrevs.update(a)
1019 ancestordrevs.update(a)
994 else:
1020 else:
995 for t in tree[1:]:
1021 for t in tree[1:]:
996 r, a = _prefetchdrevs(t)
1022 r, a = _prefetchdrevs(t)
997 drevs.update(r)
1023 drevs.update(r)
998 ancestordrevs.update(a)
1024 ancestordrevs.update(a)
999 return drevs, ancestordrevs
1025 return drevs, ancestordrevs
1000
1026
1001
1027
1002 def querydrev(repo, spec):
1028 def querydrev(repo, spec):
1003 """return a list of "Differential Revision" dicts
1029 """return a list of "Differential Revision" dicts
1004
1030
1005 spec is a string using a simple query language, see docstring in phabread
1031 spec is a string using a simple query language, see docstring in phabread
1006 for details.
1032 for details.
1007
1033
1008 A "Differential Revision dict" looks like:
1034 A "Differential Revision dict" looks like:
1009
1035
1010 {
1036 {
1011 "id": "2",
1037 "id": "2",
1012 "phid": "PHID-DREV-672qvysjcczopag46qty",
1038 "phid": "PHID-DREV-672qvysjcczopag46qty",
1013 "title": "example",
1039 "title": "example",
1014 "uri": "https://phab.example.com/D2",
1040 "uri": "https://phab.example.com/D2",
1015 "dateCreated": "1499181406",
1041 "dateCreated": "1499181406",
1016 "dateModified": "1499182103",
1042 "dateModified": "1499182103",
1017 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1043 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1018 "status": "0",
1044 "status": "0",
1019 "statusName": "Needs Review",
1045 "statusName": "Needs Review",
1020 "properties": [],
1046 "properties": [],
1021 "branch": null,
1047 "branch": null,
1022 "summary": "",
1048 "summary": "",
1023 "testPlan": "",
1049 "testPlan": "",
1024 "lineCount": "2",
1050 "lineCount": "2",
1025 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1051 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1026 "diffs": [
1052 "diffs": [
1027 "3",
1053 "3",
1028 "4",
1054 "4",
1029 ],
1055 ],
1030 "commits": [],
1056 "commits": [],
1031 "reviewers": [],
1057 "reviewers": [],
1032 "ccs": [],
1058 "ccs": [],
1033 "hashes": [],
1059 "hashes": [],
1034 "auxiliary": {
1060 "auxiliary": {
1035 "phabricator:projects": [],
1061 "phabricator:projects": [],
1036 "phabricator:depends-on": [
1062 "phabricator:depends-on": [
1037 "PHID-DREV-gbapp366kutjebt7agcd"
1063 "PHID-DREV-gbapp366kutjebt7agcd"
1038 ]
1064 ]
1039 },
1065 },
1040 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1066 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1041 "sourcePath": null
1067 "sourcePath": null
1042 }
1068 }
1043 """
1069 """
1044
1070
1045 def fetch(params):
1071 def fetch(params):
1046 """params -> single drev or None"""
1072 """params -> single drev or None"""
1047 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1073 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1048 if key in prefetched:
1074 if key in prefetched:
1049 return prefetched[key]
1075 return prefetched[key]
1050 drevs = callconduit(repo.ui, b'differential.query', params)
1076 drevs = callconduit(repo.ui, b'differential.query', params)
1051 # Fill prefetched with the result
1077 # Fill prefetched with the result
1052 for drev in drevs:
1078 for drev in drevs:
1053 prefetched[drev[b'phid']] = drev
1079 prefetched[drev[b'phid']] = drev
1054 prefetched[int(drev[b'id'])] = drev
1080 prefetched[int(drev[b'id'])] = drev
1055 if key not in prefetched:
1081 if key not in prefetched:
1056 raise error.Abort(
1082 raise error.Abort(
1057 _(b'cannot get Differential Revision %r') % params
1083 _(b'cannot get Differential Revision %r') % params
1058 )
1084 )
1059 return prefetched[key]
1085 return prefetched[key]
1060
1086
1061 def getstack(topdrevids):
1087 def getstack(topdrevids):
1062 """given a top, get a stack from the bottom, [id] -> [id]"""
1088 """given a top, get a stack from the bottom, [id] -> [id]"""
1063 visited = set()
1089 visited = set()
1064 result = []
1090 result = []
1065 queue = [{b'ids': [i]} for i in topdrevids]
1091 queue = [{b'ids': [i]} for i in topdrevids]
1066 while queue:
1092 while queue:
1067 params = queue.pop()
1093 params = queue.pop()
1068 drev = fetch(params)
1094 drev = fetch(params)
1069 if drev[b'id'] in visited:
1095 if drev[b'id'] in visited:
1070 continue
1096 continue
1071 visited.add(drev[b'id'])
1097 visited.add(drev[b'id'])
1072 result.append(int(drev[b'id']))
1098 result.append(int(drev[b'id']))
1073 auxiliary = drev.get(b'auxiliary', {})
1099 auxiliary = drev.get(b'auxiliary', {})
1074 depends = auxiliary.get(b'phabricator:depends-on', [])
1100 depends = auxiliary.get(b'phabricator:depends-on', [])
1075 for phid in depends:
1101 for phid in depends:
1076 queue.append({b'phids': [phid]})
1102 queue.append({b'phids': [phid]})
1077 result.reverse()
1103 result.reverse()
1078 return smartset.baseset(result)
1104 return smartset.baseset(result)
1079
1105
1080 # Initialize prefetch cache
1106 # Initialize prefetch cache
1081 prefetched = {} # {id or phid: drev}
1107 prefetched = {} # {id or phid: drev}
1082
1108
1083 tree = _parse(spec)
1109 tree = _parse(spec)
1084 drevs, ancestordrevs = _prefetchdrevs(tree)
1110 drevs, ancestordrevs = _prefetchdrevs(tree)
1085
1111
1086 # developer config: phabricator.batchsize
1112 # developer config: phabricator.batchsize
1087 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1113 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1088
1114
1089 # Prefetch Differential Revisions in batch
1115 # Prefetch Differential Revisions in batch
1090 tofetch = set(drevs)
1116 tofetch = set(drevs)
1091 for r in ancestordrevs:
1117 for r in ancestordrevs:
1092 tofetch.update(range(max(1, r - batchsize), r + 1))
1118 tofetch.update(range(max(1, r - batchsize), r + 1))
1093 if drevs:
1119 if drevs:
1094 fetch({b'ids': list(tofetch)})
1120 fetch({b'ids': list(tofetch)})
1095 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1121 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1096
1122
1097 # Walk through the tree, return smartsets
1123 # Walk through the tree, return smartsets
1098 def walk(tree):
1124 def walk(tree):
1099 op = tree[0]
1125 op = tree[0]
1100 if op == b'symbol':
1126 if op == b'symbol':
1101 drev = _parsedrev(tree[1])
1127 drev = _parsedrev(tree[1])
1102 if drev:
1128 if drev:
1103 return smartset.baseset([drev])
1129 return smartset.baseset([drev])
1104 elif tree[1] in _knownstatusnames:
1130 elif tree[1] in _knownstatusnames:
1105 drevs = [
1131 drevs = [
1106 r
1132 r
1107 for r in validids
1133 for r in validids
1108 if _getstatusname(prefetched[r]) == tree[1]
1134 if _getstatusname(prefetched[r]) == tree[1]
1109 ]
1135 ]
1110 return smartset.baseset(drevs)
1136 return smartset.baseset(drevs)
1111 else:
1137 else:
1112 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1138 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1113 elif op in {b'and_', b'add', b'sub'}:
1139 elif op in {b'and_', b'add', b'sub'}:
1114 assert len(tree) == 3
1140 assert len(tree) == 3
1115 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1141 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1116 elif op == b'group':
1142 elif op == b'group':
1117 return walk(tree[1])
1143 return walk(tree[1])
1118 elif op == b'ancestors':
1144 elif op == b'ancestors':
1119 return getstack(walk(tree[1]))
1145 return getstack(walk(tree[1]))
1120 else:
1146 else:
1121 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1147 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1122
1148
1123 return [prefetched[r] for r in walk(tree)]
1149 return [prefetched[r] for r in walk(tree)]
1124
1150
1125
1151
1126 def getdescfromdrev(drev):
1152 def getdescfromdrev(drev):
1127 """get description (commit message) from "Differential Revision"
1153 """get description (commit message) from "Differential Revision"
1128
1154
1129 This is similar to differential.getcommitmessage API. But we only care
1155 This is similar to differential.getcommitmessage API. But we only care
1130 about limited fields: title, summary, test plan, and URL.
1156 about limited fields: title, summary, test plan, and URL.
1131 """
1157 """
1132 title = drev[b'title']
1158 title = drev[b'title']
1133 summary = drev[b'summary'].rstrip()
1159 summary = drev[b'summary'].rstrip()
1134 testplan = drev[b'testPlan'].rstrip()
1160 testplan = drev[b'testPlan'].rstrip()
1135 if testplan:
1161 if testplan:
1136 testplan = b'Test Plan:\n%s' % testplan
1162 testplan = b'Test Plan:\n%s' % testplan
1137 uri = b'Differential Revision: %s' % drev[b'uri']
1163 uri = b'Differential Revision: %s' % drev[b'uri']
1138 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1164 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1139
1165
1140
1166
1141 def getdiffmeta(diff):
1167 def getdiffmeta(diff):
1142 """get commit metadata (date, node, user, p1) from a diff object
1168 """get commit metadata (date, node, user, p1) from a diff object
1143
1169
1144 The metadata could be "hg:meta", sent by phabsend, like:
1170 The metadata could be "hg:meta", sent by phabsend, like:
1145
1171
1146 "properties": {
1172 "properties": {
1147 "hg:meta": {
1173 "hg:meta": {
1148 "date": "1499571514 25200",
1174 "date": "1499571514 25200",
1149 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1175 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1150 "user": "Foo Bar <foo@example.com>",
1176 "user": "Foo Bar <foo@example.com>",
1151 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1177 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1152 }
1178 }
1153 }
1179 }
1154
1180
1155 Or converted from "local:commits", sent by "arc", like:
1181 Or converted from "local:commits", sent by "arc", like:
1156
1182
1157 "properties": {
1183 "properties": {
1158 "local:commits": {
1184 "local:commits": {
1159 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1185 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1160 "author": "Foo Bar",
1186 "author": "Foo Bar",
1161 "time": 1499546314,
1187 "time": 1499546314,
1162 "branch": "default",
1188 "branch": "default",
1163 "tag": "",
1189 "tag": "",
1164 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1190 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1165 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1191 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1166 "local": "1000",
1192 "local": "1000",
1167 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1193 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1168 "summary": "...",
1194 "summary": "...",
1169 "message": "...",
1195 "message": "...",
1170 "authorEmail": "foo@example.com"
1196 "authorEmail": "foo@example.com"
1171 }
1197 }
1172 }
1198 }
1173 }
1199 }
1174
1200
1175 Note: metadata extracted from "local:commits" will lose time zone
1201 Note: metadata extracted from "local:commits" will lose time zone
1176 information.
1202 information.
1177 """
1203 """
1178 props = diff.get(b'properties') or {}
1204 props = diff.get(b'properties') or {}
1179 meta = props.get(b'hg:meta')
1205 meta = props.get(b'hg:meta')
1180 if not meta:
1206 if not meta:
1181 if props.get(b'local:commits'):
1207 if props.get(b'local:commits'):
1182 commit = sorted(props[b'local:commits'].values())[0]
1208 commit = sorted(props[b'local:commits'].values())[0]
1183 meta = {}
1209 meta = {}
1184 if b'author' in commit and b'authorEmail' in commit:
1210 if b'author' in commit and b'authorEmail' in commit:
1185 meta[b'user'] = b'%s <%s>' % (
1211 meta[b'user'] = b'%s <%s>' % (
1186 commit[b'author'],
1212 commit[b'author'],
1187 commit[b'authorEmail'],
1213 commit[b'authorEmail'],
1188 )
1214 )
1189 if b'time' in commit:
1215 if b'time' in commit:
1190 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1216 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1191 if b'branch' in commit:
1217 if b'branch' in commit:
1192 meta[b'branch'] = commit[b'branch']
1218 meta[b'branch'] = commit[b'branch']
1193 node = commit.get(b'commit', commit.get(b'rev'))
1219 node = commit.get(b'commit', commit.get(b'rev'))
1194 if node:
1220 if node:
1195 meta[b'node'] = node
1221 meta[b'node'] = node
1196 if len(commit.get(b'parents', ())) >= 1:
1222 if len(commit.get(b'parents', ())) >= 1:
1197 meta[b'parent'] = commit[b'parents'][0]
1223 meta[b'parent'] = commit[b'parents'][0]
1198 else:
1224 else:
1199 meta = {}
1225 meta = {}
1200 if b'date' not in meta and b'dateCreated' in diff:
1226 if b'date' not in meta and b'dateCreated' in diff:
1201 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1227 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1202 if b'branch' not in meta and diff.get(b'branch'):
1228 if b'branch' not in meta and diff.get(b'branch'):
1203 meta[b'branch'] = diff[b'branch']
1229 meta[b'branch'] = diff[b'branch']
1204 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1230 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1205 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1231 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1206 return meta
1232 return meta
1207
1233
1208
1234
1209 def readpatch(repo, drevs, write):
1235 def readpatch(repo, drevs, write):
1210 """generate plain-text patch readable by 'hg import'
1236 """generate plain-text patch readable by 'hg import'
1211
1237
1212 write is usually ui.write. drevs is what "querydrev" returns, results of
1238 write is usually ui.write. drevs is what "querydrev" returns, results of
1213 "differential.query".
1239 "differential.query".
1214 """
1240 """
1215 # Prefetch hg:meta property for all diffs
1241 # Prefetch hg:meta property for all diffs
1216 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1242 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1217 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1243 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1218
1244
1219 # Generate patch for each drev
1245 # Generate patch for each drev
1220 for drev in drevs:
1246 for drev in drevs:
1221 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1247 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1222
1248
1223 diffid = max(int(v) for v in drev[b'diffs'])
1249 diffid = max(int(v) for v in drev[b'diffs'])
1224 body = callconduit(
1250 body = callconduit(
1225 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1251 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1226 )
1252 )
1227 desc = getdescfromdrev(drev)
1253 desc = getdescfromdrev(drev)
1228 header = b'# HG changeset patch\n'
1254 header = b'# HG changeset patch\n'
1229
1255
1230 # Try to preserve metadata from hg:meta property. Write hg patch
1256 # Try to preserve metadata from hg:meta property. Write hg patch
1231 # headers that can be read by the "import" command. See patchheadermap
1257 # headers that can be read by the "import" command. See patchheadermap
1232 # and extract in mercurial/patch.py for supported headers.
1258 # and extract in mercurial/patch.py for supported headers.
1233 meta = getdiffmeta(diffs[b'%d' % diffid])
1259 meta = getdiffmeta(diffs[b'%d' % diffid])
1234 for k in _metanamemap.keys():
1260 for k in _metanamemap.keys():
1235 if k in meta:
1261 if k in meta:
1236 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1262 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1237
1263
1238 content = b'%s%s\n%s' % (header, desc, body)
1264 content = b'%s%s\n%s' % (header, desc, body)
1239 write(content)
1265 write(content)
1240
1266
1241
1267
1242 @vcrcommand(
1268 @vcrcommand(
1243 b'phabread',
1269 b'phabread',
1244 [(b'', b'stack', False, _(b'read dependencies'))],
1270 [(b'', b'stack', False, _(b'read dependencies'))],
1245 _(b'DREVSPEC [OPTIONS]'),
1271 _(b'DREVSPEC [OPTIONS]'),
1246 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1272 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1247 )
1273 )
1248 def phabread(ui, repo, spec, **opts):
1274 def phabread(ui, repo, spec, **opts):
1249 """print patches from Phabricator suitable for importing
1275 """print patches from Phabricator suitable for importing
1250
1276
1251 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1277 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1252 the number ``123``. It could also have common operators like ``+``, ``-``,
1278 the number ``123``. It could also have common operators like ``+``, ``-``,
1253 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1279 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1254 select a stack.
1280 select a stack.
1255
1281
1256 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1282 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1257 could be used to filter patches by status. For performance reason, they
1283 could be used to filter patches by status. For performance reason, they
1258 only represent a subset of non-status selections and cannot be used alone.
1284 only represent a subset of non-status selections and cannot be used alone.
1259
1285
1260 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1286 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1261 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1287 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1262 stack up to D9.
1288 stack up to D9.
1263
1289
1264 If --stack is given, follow dependencies information and read all patches.
1290 If --stack is given, follow dependencies information and read all patches.
1265 It is equivalent to the ``:`` operator.
1291 It is equivalent to the ``:`` operator.
1266 """
1292 """
1267 opts = pycompat.byteskwargs(opts)
1293 opts = pycompat.byteskwargs(opts)
1268 if opts.get(b'stack'):
1294 if opts.get(b'stack'):
1269 spec = b':(%s)' % spec
1295 spec = b':(%s)' % spec
1270 drevs = querydrev(repo, spec)
1296 drevs = querydrev(repo, spec)
1271 readpatch(repo, drevs, ui.write)
1297 readpatch(repo, drevs, ui.write)
1272
1298
1273
1299
1274 @vcrcommand(
1300 @vcrcommand(
1275 b'phabupdate',
1301 b'phabupdate',
1276 [
1302 [
1277 (b'', b'accept', False, _(b'accept revisions')),
1303 (b'', b'accept', False, _(b'accept revisions')),
1278 (b'', b'reject', False, _(b'reject revisions')),
1304 (b'', b'reject', False, _(b'reject revisions')),
1279 (b'', b'abandon', False, _(b'abandon revisions')),
1305 (b'', b'abandon', False, _(b'abandon revisions')),
1280 (b'', b'reclaim', False, _(b'reclaim revisions')),
1306 (b'', b'reclaim', False, _(b'reclaim revisions')),
1281 (b'm', b'comment', b'', _(b'comment on the last revision')),
1307 (b'm', b'comment', b'', _(b'comment on the last revision')),
1282 ],
1308 ],
1283 _(b'DREVSPEC [OPTIONS]'),
1309 _(b'DREVSPEC [OPTIONS]'),
1284 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1310 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1285 )
1311 )
1286 def phabupdate(ui, repo, spec, **opts):
1312 def phabupdate(ui, repo, spec, **opts):
1287 """update Differential Revision in batch
1313 """update Differential Revision in batch
1288
1314
1289 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1315 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1290 """
1316 """
1291 opts = pycompat.byteskwargs(opts)
1317 opts = pycompat.byteskwargs(opts)
1292 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1318 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1293 if len(flags) > 1:
1319 if len(flags) > 1:
1294 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1320 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1295
1321
1296 actions = []
1322 actions = []
1297 for f in flags:
1323 for f in flags:
1298 actions.append({b'type': f, b'value': b'true'})
1324 actions.append({b'type': f, b'value': b'true'})
1299
1325
1300 drevs = querydrev(repo, spec)
1326 drevs = querydrev(repo, spec)
1301 for i, drev in enumerate(drevs):
1327 for i, drev in enumerate(drevs):
1302 if i + 1 == len(drevs) and opts.get(b'comment'):
1328 if i + 1 == len(drevs) and opts.get(b'comment'):
1303 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1329 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1304 if actions:
1330 if actions:
1305 params = {
1331 params = {
1306 b'objectIdentifier': drev[b'phid'],
1332 b'objectIdentifier': drev[b'phid'],
1307 b'transactions': actions,
1333 b'transactions': actions,
1308 }
1334 }
1309 callconduit(ui, b'differential.revision.edit', params)
1335 callconduit(ui, b'differential.revision.edit', params)
1310
1336
1311
1337
1312 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1338 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1313 def template_review(context, mapping):
1339 def template_review(context, mapping):
1314 """:phabreview: Object describing the review for this changeset.
1340 """:phabreview: Object describing the review for this changeset.
1315 Has attributes `url` and `id`.
1341 Has attributes `url` and `id`.
1316 """
1342 """
1317 ctx = context.resource(mapping, b'ctx')
1343 ctx = context.resource(mapping, b'ctx')
1318 m = _differentialrevisiondescre.search(ctx.description())
1344 m = _differentialrevisiondescre.search(ctx.description())
1319 if m:
1345 if m:
1320 return templateutil.hybriddict(
1346 return templateutil.hybriddict(
1321 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1347 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1322 )
1348 )
1323 else:
1349 else:
1324 tags = ctx.repo().nodetags(ctx.node())
1350 tags = ctx.repo().nodetags(ctx.node())
1325 for t in tags:
1351 for t in tags:
1326 if _differentialrevisiontagre.match(t):
1352 if _differentialrevisiontagre.match(t):
1327 url = ctx.repo().ui.config(b'phabricator', b'url')
1353 url = ctx.repo().ui.config(b'phabricator', b'url')
1328 if not url.endswith(b'/'):
1354 if not url.endswith(b'/'):
1329 url += b'/'
1355 url += b'/'
1330 url += t
1356 url += t
1331
1357
1332 return templateutil.hybriddict({b'url': url, b'id': t,})
1358 return templateutil.hybriddict({b'url': url, b'id': t,})
1333 return None
1359 return None
General Comments 0
You need to be logged in to leave comments. Login now