##// END OF EJS Templates
phabricator: add the phabhunk data structure...
Ian Moody -
r43453:73d4bc60 default
parent child Browse files
Show More
@@ -1,1276 +1,1292 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial.pycompat import getattr
52 from mercurial.pycompat import getattr
53 from mercurial.thirdparty import attr
53 from mercurial import (
54 from mercurial import (
54 cmdutil,
55 cmdutil,
55 context,
56 context,
56 encoding,
57 encoding,
57 error,
58 error,
58 exthelper,
59 exthelper,
59 httpconnection as httpconnectionmod,
60 httpconnection as httpconnectionmod,
60 mdiff,
61 mdiff,
61 obsutil,
62 obsutil,
62 parser,
63 parser,
63 patch,
64 patch,
64 phases,
65 phases,
65 pycompat,
66 pycompat,
66 scmutil,
67 scmutil,
67 smartset,
68 smartset,
68 tags,
69 tags,
69 templatefilters,
70 templatefilters,
70 templateutil,
71 templateutil,
71 url as urlmod,
72 url as urlmod,
72 util,
73 util,
73 )
74 )
74 from mercurial.utils import (
75 from mercurial.utils import (
75 procutil,
76 procutil,
76 stringutil,
77 stringutil,
77 )
78 )
78
79
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # be specifying the version(s) of Mercurial they are tested with, or
82 # be specifying the version(s) of Mercurial they are tested with, or
82 # leave the attribute unspecified.
83 # leave the attribute unspecified.
83 testedwith = b'ships-with-hg-core'
84 testedwith = b'ships-with-hg-core'
84
85
85 eh = exthelper.exthelper()
86 eh = exthelper.exthelper()
86
87
87 cmdtable = eh.cmdtable
88 cmdtable = eh.cmdtable
88 command = eh.command
89 command = eh.command
89 configtable = eh.configtable
90 configtable = eh.configtable
90 templatekeyword = eh.templatekeyword
91 templatekeyword = eh.templatekeyword
91
92
92 # developer config: phabricator.batchsize
93 # developer config: phabricator.batchsize
93 eh.configitem(
94 eh.configitem(
94 b'phabricator', b'batchsize', default=12,
95 b'phabricator', b'batchsize', default=12,
95 )
96 )
96 eh.configitem(
97 eh.configitem(
97 b'phabricator', b'callsign', default=None,
98 b'phabricator', b'callsign', default=None,
98 )
99 )
99 eh.configitem(
100 eh.configitem(
100 b'phabricator', b'curlcmd', default=None,
101 b'phabricator', b'curlcmd', default=None,
101 )
102 )
102 # developer config: phabricator.repophid
103 # developer config: phabricator.repophid
103 eh.configitem(
104 eh.configitem(
104 b'phabricator', b'repophid', default=None,
105 b'phabricator', b'repophid', default=None,
105 )
106 )
106 eh.configitem(
107 eh.configitem(
107 b'phabricator', b'url', default=None,
108 b'phabricator', b'url', default=None,
108 )
109 )
109 eh.configitem(
110 eh.configitem(
110 b'phabsend', b'confirm', default=False,
111 b'phabsend', b'confirm', default=False,
111 )
112 )
112
113
113 colortable = {
114 colortable = {
114 b'phabricator.action.created': b'green',
115 b'phabricator.action.created': b'green',
115 b'phabricator.action.skipped': b'magenta',
116 b'phabricator.action.skipped': b'magenta',
116 b'phabricator.action.updated': b'magenta',
117 b'phabricator.action.updated': b'magenta',
117 b'phabricator.desc': b'',
118 b'phabricator.desc': b'',
118 b'phabricator.drev': b'bold',
119 b'phabricator.drev': b'bold',
119 b'phabricator.node': b'',
120 b'phabricator.node': b'',
120 }
121 }
121
122
122 _VCR_FLAGS = [
123 _VCR_FLAGS = [
123 (
124 (
124 b'',
125 b'',
125 b'test-vcr',
126 b'test-vcr',
126 b'',
127 b'',
127 _(
128 _(
128 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 b', otherwise will mock all http requests using the specified vcr file.'
130 b', otherwise will mock all http requests using the specified vcr file.'
130 b' (ADVANCED)'
131 b' (ADVANCED)'
131 ),
132 ),
132 ),
133 ),
133 ]
134 ]
134
135
135
136
136 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 fullflags = flags + _VCR_FLAGS
138 fullflags = flags + _VCR_FLAGS
138
139
139 def hgmatcher(r1, r2):
140 def hgmatcher(r1, r2):
140 if r1.uri != r2.uri or r1.method != r2.method:
141 if r1.uri != r2.uri or r1.method != r2.method:
141 return False
142 return False
142 r1params = r1.body.split(b'&')
143 r1params = r1.body.split(b'&')
143 r2params = r2.body.split(b'&')
144 r2params = r2.body.split(b'&')
144 return set(r1params) == set(r2params)
145 return set(r1params) == set(r2params)
145
146
146 def sanitiserequest(request):
147 def sanitiserequest(request):
147 request.body = re.sub(
148 request.body = re.sub(
148 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 )
150 )
150 return request
151 return request
151
152
152 def sanitiseresponse(response):
153 def sanitiseresponse(response):
153 if r'set-cookie' in response[r'headers']:
154 if r'set-cookie' in response[r'headers']:
154 del response[r'headers'][r'set-cookie']
155 del response[r'headers'][r'set-cookie']
155 return response
156 return response
156
157
157 def decorate(fn):
158 def decorate(fn):
158 def inner(*args, **kwargs):
159 def inner(*args, **kwargs):
159 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 if cassette:
161 if cassette:
161 import hgdemandimport
162 import hgdemandimport
162
163
163 with hgdemandimport.deactivated():
164 with hgdemandimport.deactivated():
164 import vcr as vcrmod
165 import vcr as vcrmod
165 import vcr.stubs as stubs
166 import vcr.stubs as stubs
166
167
167 vcr = vcrmod.VCR(
168 vcr = vcrmod.VCR(
168 serializer=r'json',
169 serializer=r'json',
169 before_record_request=sanitiserequest,
170 before_record_request=sanitiserequest,
170 before_record_response=sanitiseresponse,
171 before_record_response=sanitiseresponse,
171 custom_patches=[
172 custom_patches=[
172 (
173 (
173 urlmod,
174 urlmod,
174 r'httpconnection',
175 r'httpconnection',
175 stubs.VCRHTTPConnection,
176 stubs.VCRHTTPConnection,
176 ),
177 ),
177 (
178 (
178 urlmod,
179 urlmod,
179 r'httpsconnection',
180 r'httpsconnection',
180 stubs.VCRHTTPSConnection,
181 stubs.VCRHTTPSConnection,
181 ),
182 ),
182 ],
183 ],
183 )
184 )
184 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 return fn(*args, **kwargs)
187 return fn(*args, **kwargs)
187 return fn(*args, **kwargs)
188 return fn(*args, **kwargs)
188
189
189 inner.__name__ = fn.__name__
190 inner.__name__ = fn.__name__
190 inner.__doc__ = fn.__doc__
191 inner.__doc__ = fn.__doc__
191 return command(
192 return command(
192 name,
193 name,
193 fullflags,
194 fullflags,
194 spec,
195 spec,
195 helpcategory=helpcategory,
196 helpcategory=helpcategory,
196 optionalrepo=optionalrepo,
197 optionalrepo=optionalrepo,
197 )(inner)
198 )(inner)
198
199
199 return decorate
200 return decorate
200
201
201
202
202 def urlencodenested(params):
203 def urlencodenested(params):
203 """like urlencode, but works with nested parameters.
204 """like urlencode, but works with nested parameters.
204
205
205 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 """
209 """
209 flatparams = util.sortdict()
210 flatparams = util.sortdict()
210
211
211 def process(prefix, obj):
212 def process(prefix, obj):
212 if isinstance(obj, bool):
213 if isinstance(obj, bool):
213 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 if items is None:
217 if items is None:
217 flatparams[prefix] = obj
218 flatparams[prefix] = obj
218 else:
219 else:
219 for k, v in items(obj):
220 for k, v in items(obj):
220 if prefix:
221 if prefix:
221 process(b'%s[%s]' % (prefix, k), v)
222 process(b'%s[%s]' % (prefix, k), v)
222 else:
223 else:
223 process(k, v)
224 process(k, v)
224
225
225 process(b'', params)
226 process(b'', params)
226 return util.urlreq.urlencode(flatparams)
227 return util.urlreq.urlencode(flatparams)
227
228
228
229
229 def readurltoken(ui):
230 def readurltoken(ui):
230 """return conduit url, token and make sure they exist
231 """return conduit url, token and make sure they exist
231
232
232 Currently read from [auth] config section. In the future, it might
233 Currently read from [auth] config section. In the future, it might
233 make sense to read from .arcconfig and .arcrc as well.
234 make sense to read from .arcconfig and .arcrc as well.
234 """
235 """
235 url = ui.config(b'phabricator', b'url')
236 url = ui.config(b'phabricator', b'url')
236 if not url:
237 if not url:
237 raise error.Abort(
238 raise error.Abort(
238 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 )
240 )
240
241
241 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 token = None
243 token = None
243
244
244 if res:
245 if res:
245 group, auth = res
246 group, auth = res
246
247
247 ui.debug(b"using auth.%s.* for authentication\n" % group)
248 ui.debug(b"using auth.%s.* for authentication\n" % group)
248
249
249 token = auth.get(b'phabtoken')
250 token = auth.get(b'phabtoken')
250
251
251 if not token:
252 if not token:
252 raise error.Abort(
253 raise error.Abort(
253 _(b'Can\'t find conduit token associated to %s') % (url,)
254 _(b'Can\'t find conduit token associated to %s') % (url,)
254 )
255 )
255
256
256 return url, token
257 return url, token
257
258
258
259
259 def callconduit(ui, name, params):
260 def callconduit(ui, name, params):
260 """call Conduit API, params is a dict. return json.loads result, or None"""
261 """call Conduit API, params is a dict. return json.loads result, or None"""
261 host, token = readurltoken(ui)
262 host, token = readurltoken(ui)
262 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 params = params.copy()
265 params = params.copy()
265 params[b'api.token'] = token
266 params[b'api.token'] = token
266 data = urlencodenested(params)
267 data = urlencodenested(params)
267 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 if curlcmd:
269 if curlcmd:
269 sin, sout = procutil.popen2(
270 sin, sout = procutil.popen2(
270 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 )
272 )
272 sin.write(data)
273 sin.write(data)
273 sin.close()
274 sin.close()
274 body = sout.read()
275 body = sout.read()
275 else:
276 else:
276 urlopener = urlmod.opener(ui, authinfo)
277 urlopener = urlmod.opener(ui, authinfo)
277 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 with contextlib.closing(urlopener.open(request)) as rsp:
279 with contextlib.closing(urlopener.open(request)) as rsp:
279 body = rsp.read()
280 body = rsp.read()
280 ui.debug(b'Conduit Response: %s\n' % body)
281 ui.debug(b'Conduit Response: %s\n' % body)
281 parsed = pycompat.rapply(
282 parsed = pycompat.rapply(
282 lambda x: encoding.unitolocal(x)
283 lambda x: encoding.unitolocal(x)
283 if isinstance(x, pycompat.unicode)
284 if isinstance(x, pycompat.unicode)
284 else x,
285 else x,
285 # json.loads only accepts bytes from py3.6+
286 # json.loads only accepts bytes from py3.6+
286 json.loads(encoding.unifromlocal(body)),
287 json.loads(encoding.unifromlocal(body)),
287 )
288 )
288 if parsed.get(b'error_code'):
289 if parsed.get(b'error_code'):
289 msg = _(b'Conduit Error (%s): %s') % (
290 msg = _(b'Conduit Error (%s): %s') % (
290 parsed[b'error_code'],
291 parsed[b'error_code'],
291 parsed[b'error_info'],
292 parsed[b'error_info'],
292 )
293 )
293 raise error.Abort(msg)
294 raise error.Abort(msg)
294 return parsed[b'result']
295 return parsed[b'result']
295
296
296
297
297 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 def debugcallconduit(ui, repo, name):
299 def debugcallconduit(ui, repo, name):
299 """call Conduit API
300 """call Conduit API
300
301
301 Call parameters are read from stdin as a JSON blob. Result will be written
302 Call parameters are read from stdin as a JSON blob. Result will be written
302 to stdout as a JSON blob.
303 to stdout as a JSON blob.
303 """
304 """
304 # json.loads only accepts bytes from 3.6+
305 # json.loads only accepts bytes from 3.6+
305 rawparams = encoding.unifromlocal(ui.fin.read())
306 rawparams = encoding.unifromlocal(ui.fin.read())
306 # json.loads only returns unicode strings
307 # json.loads only returns unicode strings
307 params = pycompat.rapply(
308 params = pycompat.rapply(
308 lambda x: encoding.unitolocal(x)
309 lambda x: encoding.unitolocal(x)
309 if isinstance(x, pycompat.unicode)
310 if isinstance(x, pycompat.unicode)
310 else x,
311 else x,
311 json.loads(rawparams),
312 json.loads(rawparams),
312 )
313 )
313 # json.dumps only accepts unicode strings
314 # json.dumps only accepts unicode strings
314 result = pycompat.rapply(
315 result = pycompat.rapply(
315 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 callconduit(ui, name, params),
317 callconduit(ui, name, params),
317 )
318 )
318 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 ui.write(b'%s\n' % encoding.unitolocal(s))
320 ui.write(b'%s\n' % encoding.unitolocal(s))
320
321
321
322
322 def getrepophid(repo):
323 def getrepophid(repo):
323 """given callsign, return repository PHID or None"""
324 """given callsign, return repository PHID or None"""
324 # developer config: phabricator.repophid
325 # developer config: phabricator.repophid
325 repophid = repo.ui.config(b'phabricator', b'repophid')
326 repophid = repo.ui.config(b'phabricator', b'repophid')
326 if repophid:
327 if repophid:
327 return repophid
328 return repophid
328 callsign = repo.ui.config(b'phabricator', b'callsign')
329 callsign = repo.ui.config(b'phabricator', b'callsign')
329 if not callsign:
330 if not callsign:
330 return None
331 return None
331 query = callconduit(
332 query = callconduit(
332 repo.ui,
333 repo.ui,
333 b'diffusion.repository.search',
334 b'diffusion.repository.search',
334 {b'constraints': {b'callsigns': [callsign]}},
335 {b'constraints': {b'callsigns': [callsign]}},
335 )
336 )
336 if len(query[b'data']) == 0:
337 if len(query[b'data']) == 0:
337 return None
338 return None
338 repophid = query[b'data'][0][b'phid']
339 repophid = query[b'data'][0][b'phid']
339 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 return repophid
341 return repophid
341
342
342
343
343 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 _differentialrevisiondescre = re.compile(
345 _differentialrevisiondescre = re.compile(
345 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 )
347 )
347
348
348
349
349 def getoldnodedrevmap(repo, nodelist):
350 def getoldnodedrevmap(repo, nodelist):
350 """find previous nodes that has been sent to Phabricator
351 """find previous nodes that has been sent to Phabricator
351
352
352 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 for node in nodelist with known previous sent versions, or associated
354 for node in nodelist with known previous sent versions, or associated
354 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 be ``None``.
356 be ``None``.
356
357
357 Examines commit messages like "Differential Revision:" to get the
358 Examines commit messages like "Differential Revision:" to get the
358 association information.
359 association information.
359
360
360 If such commit message line is not found, examines all precursors and their
361 If such commit message line is not found, examines all precursors and their
361 tags. Tags with format like "D1234" are considered a match and the node
362 tags. Tags with format like "D1234" are considered a match and the node
362 with that tag, and the number after "D" (ex. 1234) will be returned.
363 with that tag, and the number after "D" (ex. 1234) will be returned.
363
364
364 The ``old node``, if not None, is guaranteed to be the last diff of
365 The ``old node``, if not None, is guaranteed to be the last diff of
365 corresponding Differential Revision, and exist in the repo.
366 corresponding Differential Revision, and exist in the repo.
366 """
367 """
367 unfi = repo.unfiltered()
368 unfi = repo.unfiltered()
368 nodemap = unfi.changelog.nodemap
369 nodemap = unfi.changelog.nodemap
369
370
370 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 toconfirm = {} # {node: (force, {precnode}, drev)}
372 toconfirm = {} # {node: (force, {precnode}, drev)}
372 for node in nodelist:
373 for node in nodelist:
373 ctx = unfi[node]
374 ctx = unfi[node]
374 # For tags like "D123", put them into "toconfirm" to verify later
375 # For tags like "D123", put them into "toconfirm" to verify later
375 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 for n in precnodes:
377 for n in precnodes:
377 if n in nodemap:
378 if n in nodemap:
378 for tag in unfi.nodetags(n):
379 for tag in unfi.nodetags(n):
379 m = _differentialrevisiontagre.match(tag)
380 m = _differentialrevisiontagre.match(tag)
380 if m:
381 if m:
381 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 continue
383 continue
383
384
384 # Check commit message
385 # Check commit message
385 m = _differentialrevisiondescre.search(ctx.description())
386 m = _differentialrevisiondescre.search(ctx.description())
386 if m:
387 if m:
387 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388
389
389 # Double check if tags are genuine by collecting all old nodes from
390 # Double check if tags are genuine by collecting all old nodes from
390 # Phabricator, and expect precursors overlap with it.
391 # Phabricator, and expect precursors overlap with it.
391 if toconfirm:
392 if toconfirm:
392 drevs = [drev for force, precs, drev in toconfirm.values()]
393 drevs = [drev for force, precs, drev in toconfirm.values()]
393 alldiffs = callconduit(
394 alldiffs = callconduit(
394 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 )
396 )
396 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 for newnode, (force, precset, drev) in toconfirm.items():
398 for newnode, (force, precset, drev) in toconfirm.items():
398 diffs = [
399 diffs = [
399 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 ]
401 ]
401
402
402 # "precursors" as known by Phabricator
403 # "precursors" as known by Phabricator
403 phprecset = set(getnode(d) for d in diffs)
404 phprecset = set(getnode(d) for d in diffs)
404
405
405 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 # and force is not set (when commit message says nothing)
407 # and force is not set (when commit message says nothing)
407 if not force and not bool(phprecset & precset):
408 if not force and not bool(phprecset & precset):
408 tagname = b'D%d' % drev
409 tagname = b'D%d' % drev
409 tags.tag(
410 tags.tag(
410 repo,
411 repo,
411 tagname,
412 tagname,
412 nullid,
413 nullid,
413 message=None,
414 message=None,
414 user=None,
415 user=None,
415 date=None,
416 date=None,
416 local=True,
417 local=True,
417 )
418 )
418 unfi.ui.warn(
419 unfi.ui.warn(
419 _(
420 _(
420 b'D%s: local tag removed - does not match '
421 b'D%s: local tag removed - does not match '
421 b'Differential history\n'
422 b'Differential history\n'
422 )
423 )
423 % drev
424 % drev
424 )
425 )
425 continue
426 continue
426
427
427 # Find the last node using Phabricator metadata, and make sure it
428 # Find the last node using Phabricator metadata, and make sure it
428 # exists in the repo
429 # exists in the repo
429 oldnode = lastdiff = None
430 oldnode = lastdiff = None
430 if diffs:
431 if diffs:
431 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 oldnode = getnode(lastdiff)
433 oldnode = getnode(lastdiff)
433 if oldnode and oldnode not in nodemap:
434 if oldnode and oldnode not in nodemap:
434 oldnode = None
435 oldnode = None
435
436
436 result[newnode] = (oldnode, lastdiff, drev)
437 result[newnode] = (oldnode, lastdiff, drev)
437
438
438 return result
439 return result
439
440
440
441
441 def getdiff(ctx, diffopts):
442 def getdiff(ctx, diffopts):
442 """plain-text diff without header (user, commit message, etc)"""
443 """plain-text diff without header (user, commit message, etc)"""
443 output = util.stringio()
444 output = util.stringio()
444 for chunk, _label in patch.diffui(
445 for chunk, _label in patch.diffui(
445 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 ):
447 ):
447 output.write(chunk)
448 output.write(chunk)
448 return output.getvalue()
449 return output.getvalue()
449
450
450
451
451 class DiffChangeType(object):
452 class DiffChangeType(object):
452 ADD = 1
453 ADD = 1
453 CHANGE = 2
454 CHANGE = 2
454 DELETE = 3
455 DELETE = 3
455 MOVE_AWAY = 4
456 MOVE_AWAY = 4
456 COPY_AWAY = 5
457 COPY_AWAY = 5
457 MOVE_HERE = 6
458 MOVE_HERE = 6
458 COPY_HERE = 7
459 COPY_HERE = 7
459 MULTICOPY = 8
460 MULTICOPY = 8
460
461
461
462
462 class DiffFileType(object):
463 class DiffFileType(object):
463 TEXT = 1
464 TEXT = 1
464 IMAGE = 2
465 IMAGE = 2
465 BINARY = 3
466 BINARY = 3
466
467
467
468
469 @attr.s
470 class phabhunk(dict):
471 """Represents a Differential hunk, which is owned by a Differential change
472 """
473
474 oldOffset = attr.ib(default=0) # camelcase-required
475 oldLength = attr.ib(default=0) # camelcase-required
476 newOffset = attr.ib(default=0) # camelcase-required
477 newLength = attr.ib(default=0) # camelcase-required
478 corpus = attr.ib(default='')
479 # These get added to the phabchange's equivalents
480 addLines = attr.ib(default=0) # camelcase-required
481 delLines = attr.ib(default=0) # camelcase-required
482
483
468 def creatediff(ctx):
484 def creatediff(ctx):
469 """create a Differential Diff"""
485 """create a Differential Diff"""
470 repo = ctx.repo()
486 repo = ctx.repo()
471 repophid = getrepophid(repo)
487 repophid = getrepophid(repo)
472 # Create a "Differential Diff" via "differential.createrawdiff" API
488 # Create a "Differential Diff" via "differential.createrawdiff" API
473 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
489 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
474 if repophid:
490 if repophid:
475 params[b'repositoryPHID'] = repophid
491 params[b'repositoryPHID'] = repophid
476 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
492 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
477 if not diff:
493 if not diff:
478 raise error.Abort(_(b'cannot create diff for %s') % ctx)
494 raise error.Abort(_(b'cannot create diff for %s') % ctx)
479 return diff
495 return diff
480
496
481
497
482 def writediffproperties(ctx, diff):
498 def writediffproperties(ctx, diff):
483 """write metadata to diff so patches could be applied losslessly"""
499 """write metadata to diff so patches could be applied losslessly"""
484 params = {
500 params = {
485 b'diff_id': diff[b'id'],
501 b'diff_id': diff[b'id'],
486 b'name': b'hg:meta',
502 b'name': b'hg:meta',
487 b'data': templatefilters.json(
503 b'data': templatefilters.json(
488 {
504 {
489 b'user': ctx.user(),
505 b'user': ctx.user(),
490 b'date': b'%d %d' % ctx.date(),
506 b'date': b'%d %d' % ctx.date(),
491 b'branch': ctx.branch(),
507 b'branch': ctx.branch(),
492 b'node': ctx.hex(),
508 b'node': ctx.hex(),
493 b'parent': ctx.p1().hex(),
509 b'parent': ctx.p1().hex(),
494 }
510 }
495 ),
511 ),
496 }
512 }
497 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
513 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
498
514
499 params = {
515 params = {
500 b'diff_id': diff[b'id'],
516 b'diff_id': diff[b'id'],
501 b'name': b'local:commits',
517 b'name': b'local:commits',
502 b'data': templatefilters.json(
518 b'data': templatefilters.json(
503 {
519 {
504 ctx.hex(): {
520 ctx.hex(): {
505 b'author': stringutil.person(ctx.user()),
521 b'author': stringutil.person(ctx.user()),
506 b'authorEmail': stringutil.email(ctx.user()),
522 b'authorEmail': stringutil.email(ctx.user()),
507 b'time': int(ctx.date()[0]),
523 b'time': int(ctx.date()[0]),
508 b'commit': ctx.hex(),
524 b'commit': ctx.hex(),
509 b'parents': [ctx.p1().hex()],
525 b'parents': [ctx.p1().hex()],
510 b'branch': ctx.branch(),
526 b'branch': ctx.branch(),
511 },
527 },
512 }
528 }
513 ),
529 ),
514 }
530 }
515 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
531 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
516
532
517
533
518 def createdifferentialrevision(
534 def createdifferentialrevision(
519 ctx,
535 ctx,
520 revid=None,
536 revid=None,
521 parentrevphid=None,
537 parentrevphid=None,
522 oldnode=None,
538 oldnode=None,
523 olddiff=None,
539 olddiff=None,
524 actions=None,
540 actions=None,
525 comment=None,
541 comment=None,
526 ):
542 ):
527 """create or update a Differential Revision
543 """create or update a Differential Revision
528
544
529 If revid is None, create a new Differential Revision, otherwise update
545 If revid is None, create a new Differential Revision, otherwise update
530 revid. If parentrevphid is not None, set it as a dependency.
546 revid. If parentrevphid is not None, set it as a dependency.
531
547
532 If oldnode is not None, check if the patch content (without commit message
548 If oldnode is not None, check if the patch content (without commit message
533 and metadata) has changed before creating another diff.
549 and metadata) has changed before creating another diff.
534
550
535 If actions is not None, they will be appended to the transaction.
551 If actions is not None, they will be appended to the transaction.
536 """
552 """
537 repo = ctx.repo()
553 repo = ctx.repo()
538 if oldnode:
554 if oldnode:
539 diffopts = mdiff.diffopts(git=True, context=32767)
555 diffopts = mdiff.diffopts(git=True, context=32767)
540 oldctx = repo.unfiltered()[oldnode]
556 oldctx = repo.unfiltered()[oldnode]
541 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
557 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
542 else:
558 else:
543 neednewdiff = True
559 neednewdiff = True
544
560
545 transactions = []
561 transactions = []
546 if neednewdiff:
562 if neednewdiff:
547 diff = creatediff(ctx)
563 diff = creatediff(ctx)
548 transactions.append({b'type': b'update', b'value': diff[b'phid']})
564 transactions.append({b'type': b'update', b'value': diff[b'phid']})
549 if comment:
565 if comment:
550 transactions.append({b'type': b'comment', b'value': comment})
566 transactions.append({b'type': b'comment', b'value': comment})
551 else:
567 else:
552 # Even if we don't need to upload a new diff because the patch content
568 # Even if we don't need to upload a new diff because the patch content
553 # does not change. We might still need to update its metadata so
569 # does not change. We might still need to update its metadata so
554 # pushers could know the correct node metadata.
570 # pushers could know the correct node metadata.
555 assert olddiff
571 assert olddiff
556 diff = olddiff
572 diff = olddiff
557 writediffproperties(ctx, diff)
573 writediffproperties(ctx, diff)
558
574
559 # Set the parent Revision every time, so commit re-ordering is picked-up
575 # Set the parent Revision every time, so commit re-ordering is picked-up
560 if parentrevphid:
576 if parentrevphid:
561 transactions.append(
577 transactions.append(
562 {b'type': b'parents.set', b'value': [parentrevphid]}
578 {b'type': b'parents.set', b'value': [parentrevphid]}
563 )
579 )
564
580
565 if actions:
581 if actions:
566 transactions += actions
582 transactions += actions
567
583
568 # Parse commit message and update related fields.
584 # Parse commit message and update related fields.
569 desc = ctx.description()
585 desc = ctx.description()
570 info = callconduit(
586 info = callconduit(
571 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
587 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
572 )
588 )
573 for k, v in info[b'fields'].items():
589 for k, v in info[b'fields'].items():
574 if k in [b'title', b'summary', b'testPlan']:
590 if k in [b'title', b'summary', b'testPlan']:
575 transactions.append({b'type': k, b'value': v})
591 transactions.append({b'type': k, b'value': v})
576
592
577 params = {b'transactions': transactions}
593 params = {b'transactions': transactions}
578 if revid is not None:
594 if revid is not None:
579 # Update an existing Differential Revision
595 # Update an existing Differential Revision
580 params[b'objectIdentifier'] = revid
596 params[b'objectIdentifier'] = revid
581
597
582 revision = callconduit(repo.ui, b'differential.revision.edit', params)
598 revision = callconduit(repo.ui, b'differential.revision.edit', params)
583 if not revision:
599 if not revision:
584 raise error.Abort(_(b'cannot create revision for %s') % ctx)
600 raise error.Abort(_(b'cannot create revision for %s') % ctx)
585
601
586 return revision, diff
602 return revision, diff
587
603
588
604
589 def userphids(repo, names):
605 def userphids(repo, names):
590 """convert user names to PHIDs"""
606 """convert user names to PHIDs"""
591 names = [name.lower() for name in names]
607 names = [name.lower() for name in names]
592 query = {b'constraints': {b'usernames': names}}
608 query = {b'constraints': {b'usernames': names}}
593 result = callconduit(repo.ui, b'user.search', query)
609 result = callconduit(repo.ui, b'user.search', query)
594 # username not found is not an error of the API. So check if we have missed
610 # username not found is not an error of the API. So check if we have missed
595 # some names here.
611 # some names here.
596 data = result[b'data']
612 data = result[b'data']
597 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
613 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
598 unresolved = set(names) - resolved
614 unresolved = set(names) - resolved
599 if unresolved:
615 if unresolved:
600 raise error.Abort(
616 raise error.Abort(
601 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
617 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
602 )
618 )
603 return [entry[b'phid'] for entry in data]
619 return [entry[b'phid'] for entry in data]
604
620
605
621
606 @vcrcommand(
622 @vcrcommand(
607 b'phabsend',
623 b'phabsend',
608 [
624 [
609 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
625 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
610 (b'', b'amend', True, _(b'update commit messages')),
626 (b'', b'amend', True, _(b'update commit messages')),
611 (b'', b'reviewer', [], _(b'specify reviewers')),
627 (b'', b'reviewer', [], _(b'specify reviewers')),
612 (b'', b'blocker', [], _(b'specify blocking reviewers')),
628 (b'', b'blocker', [], _(b'specify blocking reviewers')),
613 (
629 (
614 b'm',
630 b'm',
615 b'comment',
631 b'comment',
616 b'',
632 b'',
617 _(b'add a comment to Revisions with new/updated Diffs'),
633 _(b'add a comment to Revisions with new/updated Diffs'),
618 ),
634 ),
619 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
635 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
620 ],
636 ],
621 _(b'REV [OPTIONS]'),
637 _(b'REV [OPTIONS]'),
622 helpcategory=command.CATEGORY_IMPORT_EXPORT,
638 helpcategory=command.CATEGORY_IMPORT_EXPORT,
623 )
639 )
624 def phabsend(ui, repo, *revs, **opts):
640 def phabsend(ui, repo, *revs, **opts):
625 """upload changesets to Phabricator
641 """upload changesets to Phabricator
626
642
627 If there are multiple revisions specified, they will be send as a stack
643 If there are multiple revisions specified, they will be send as a stack
628 with a linear dependencies relationship using the order specified by the
644 with a linear dependencies relationship using the order specified by the
629 revset.
645 revset.
630
646
631 For the first time uploading changesets, local tags will be created to
647 For the first time uploading changesets, local tags will be created to
632 maintain the association. After the first time, phabsend will check
648 maintain the association. After the first time, phabsend will check
633 obsstore and tags information so it can figure out whether to update an
649 obsstore and tags information so it can figure out whether to update an
634 existing Differential Revision, or create a new one.
650 existing Differential Revision, or create a new one.
635
651
636 If --amend is set, update commit messages so they have the
652 If --amend is set, update commit messages so they have the
637 ``Differential Revision`` URL, remove related tags. This is similar to what
653 ``Differential Revision`` URL, remove related tags. This is similar to what
638 arcanist will do, and is more desired in author-push workflows. Otherwise,
654 arcanist will do, and is more desired in author-push workflows. Otherwise,
639 use local tags to record the ``Differential Revision`` association.
655 use local tags to record the ``Differential Revision`` association.
640
656
641 The --confirm option lets you confirm changesets before sending them. You
657 The --confirm option lets you confirm changesets before sending them. You
642 can also add following to your configuration file to make it default
658 can also add following to your configuration file to make it default
643 behaviour::
659 behaviour::
644
660
645 [phabsend]
661 [phabsend]
646 confirm = true
662 confirm = true
647
663
648 phabsend will check obsstore and the above association to decide whether to
664 phabsend will check obsstore and the above association to decide whether to
649 update an existing Differential Revision, or create a new one.
665 update an existing Differential Revision, or create a new one.
650 """
666 """
651 opts = pycompat.byteskwargs(opts)
667 opts = pycompat.byteskwargs(opts)
652 revs = list(revs) + opts.get(b'rev', [])
668 revs = list(revs) + opts.get(b'rev', [])
653 revs = scmutil.revrange(repo, revs)
669 revs = scmutil.revrange(repo, revs)
654
670
655 if not revs:
671 if not revs:
656 raise error.Abort(_(b'phabsend requires at least one changeset'))
672 raise error.Abort(_(b'phabsend requires at least one changeset'))
657 if opts.get(b'amend'):
673 if opts.get(b'amend'):
658 cmdutil.checkunfinished(repo)
674 cmdutil.checkunfinished(repo)
659
675
660 # {newnode: (oldnode, olddiff, olddrev}
676 # {newnode: (oldnode, olddiff, olddrev}
661 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
677 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
662
678
663 confirm = ui.configbool(b'phabsend', b'confirm')
679 confirm = ui.configbool(b'phabsend', b'confirm')
664 confirm |= bool(opts.get(b'confirm'))
680 confirm |= bool(opts.get(b'confirm'))
665 if confirm:
681 if confirm:
666 confirmed = _confirmbeforesend(repo, revs, oldmap)
682 confirmed = _confirmbeforesend(repo, revs, oldmap)
667 if not confirmed:
683 if not confirmed:
668 raise error.Abort(_(b'phabsend cancelled'))
684 raise error.Abort(_(b'phabsend cancelled'))
669
685
670 actions = []
686 actions = []
671 reviewers = opts.get(b'reviewer', [])
687 reviewers = opts.get(b'reviewer', [])
672 blockers = opts.get(b'blocker', [])
688 blockers = opts.get(b'blocker', [])
673 phids = []
689 phids = []
674 if reviewers:
690 if reviewers:
675 phids.extend(userphids(repo, reviewers))
691 phids.extend(userphids(repo, reviewers))
676 if blockers:
692 if blockers:
677 phids.extend(
693 phids.extend(
678 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
694 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
679 )
695 )
680 if phids:
696 if phids:
681 actions.append({b'type': b'reviewers.add', b'value': phids})
697 actions.append({b'type': b'reviewers.add', b'value': phids})
682
698
683 drevids = [] # [int]
699 drevids = [] # [int]
684 diffmap = {} # {newnode: diff}
700 diffmap = {} # {newnode: diff}
685
701
686 # Send patches one by one so we know their Differential Revision PHIDs and
702 # Send patches one by one so we know their Differential Revision PHIDs and
687 # can provide dependency relationship
703 # can provide dependency relationship
688 lastrevphid = None
704 lastrevphid = None
689 for rev in revs:
705 for rev in revs:
690 ui.debug(b'sending rev %d\n' % rev)
706 ui.debug(b'sending rev %d\n' % rev)
691 ctx = repo[rev]
707 ctx = repo[rev]
692
708
693 # Get Differential Revision ID
709 # Get Differential Revision ID
694 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
710 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
695 if oldnode != ctx.node() or opts.get(b'amend'):
711 if oldnode != ctx.node() or opts.get(b'amend'):
696 # Create or update Differential Revision
712 # Create or update Differential Revision
697 revision, diff = createdifferentialrevision(
713 revision, diff = createdifferentialrevision(
698 ctx,
714 ctx,
699 revid,
715 revid,
700 lastrevphid,
716 lastrevphid,
701 oldnode,
717 oldnode,
702 olddiff,
718 olddiff,
703 actions,
719 actions,
704 opts.get(b'comment'),
720 opts.get(b'comment'),
705 )
721 )
706 diffmap[ctx.node()] = diff
722 diffmap[ctx.node()] = diff
707 newrevid = int(revision[b'object'][b'id'])
723 newrevid = int(revision[b'object'][b'id'])
708 newrevphid = revision[b'object'][b'phid']
724 newrevphid = revision[b'object'][b'phid']
709 if revid:
725 if revid:
710 action = b'updated'
726 action = b'updated'
711 else:
727 else:
712 action = b'created'
728 action = b'created'
713
729
714 # Create a local tag to note the association, if commit message
730 # Create a local tag to note the association, if commit message
715 # does not have it already
731 # does not have it already
716 m = _differentialrevisiondescre.search(ctx.description())
732 m = _differentialrevisiondescre.search(ctx.description())
717 if not m or int(m.group(r'id')) != newrevid:
733 if not m or int(m.group(r'id')) != newrevid:
718 tagname = b'D%d' % newrevid
734 tagname = b'D%d' % newrevid
719 tags.tag(
735 tags.tag(
720 repo,
736 repo,
721 tagname,
737 tagname,
722 ctx.node(),
738 ctx.node(),
723 message=None,
739 message=None,
724 user=None,
740 user=None,
725 date=None,
741 date=None,
726 local=True,
742 local=True,
727 )
743 )
728 else:
744 else:
729 # Nothing changed. But still set "newrevphid" so the next revision
745 # Nothing changed. But still set "newrevphid" so the next revision
730 # could depend on this one and "newrevid" for the summary line.
746 # could depend on this one and "newrevid" for the summary line.
731 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
747 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
732 newrevid = revid
748 newrevid = revid
733 action = b'skipped'
749 action = b'skipped'
734
750
735 actiondesc = ui.label(
751 actiondesc = ui.label(
736 {
752 {
737 b'created': _(b'created'),
753 b'created': _(b'created'),
738 b'skipped': _(b'skipped'),
754 b'skipped': _(b'skipped'),
739 b'updated': _(b'updated'),
755 b'updated': _(b'updated'),
740 }[action],
756 }[action],
741 b'phabricator.action.%s' % action,
757 b'phabricator.action.%s' % action,
742 )
758 )
743 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
759 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
744 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
760 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
745 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
761 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
746 ui.write(
762 ui.write(
747 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
763 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
748 )
764 )
749 drevids.append(newrevid)
765 drevids.append(newrevid)
750 lastrevphid = newrevphid
766 lastrevphid = newrevphid
751
767
752 # Update commit messages and remove tags
768 # Update commit messages and remove tags
753 if opts.get(b'amend'):
769 if opts.get(b'amend'):
754 unfi = repo.unfiltered()
770 unfi = repo.unfiltered()
755 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
771 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
756 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
772 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
757 wnode = unfi[b'.'].node()
773 wnode = unfi[b'.'].node()
758 mapping = {} # {oldnode: [newnode]}
774 mapping = {} # {oldnode: [newnode]}
759 for i, rev in enumerate(revs):
775 for i, rev in enumerate(revs):
760 old = unfi[rev]
776 old = unfi[rev]
761 drevid = drevids[i]
777 drevid = drevids[i]
762 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
778 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
763 newdesc = getdescfromdrev(drev)
779 newdesc = getdescfromdrev(drev)
764 # Make sure commit message contain "Differential Revision"
780 # Make sure commit message contain "Differential Revision"
765 if old.description() != newdesc:
781 if old.description() != newdesc:
766 if old.phase() == phases.public:
782 if old.phase() == phases.public:
767 ui.warn(
783 ui.warn(
768 _(b"warning: not updating public commit %s\n")
784 _(b"warning: not updating public commit %s\n")
769 % scmutil.formatchangeid(old)
785 % scmutil.formatchangeid(old)
770 )
786 )
771 continue
787 continue
772 parents = [
788 parents = [
773 mapping.get(old.p1().node(), (old.p1(),))[0],
789 mapping.get(old.p1().node(), (old.p1(),))[0],
774 mapping.get(old.p2().node(), (old.p2(),))[0],
790 mapping.get(old.p2().node(), (old.p2(),))[0],
775 ]
791 ]
776 new = context.metadataonlyctx(
792 new = context.metadataonlyctx(
777 repo,
793 repo,
778 old,
794 old,
779 parents=parents,
795 parents=parents,
780 text=newdesc,
796 text=newdesc,
781 user=old.user(),
797 user=old.user(),
782 date=old.date(),
798 date=old.date(),
783 extra=old.extra(),
799 extra=old.extra(),
784 )
800 )
785
801
786 newnode = new.commit()
802 newnode = new.commit()
787
803
788 mapping[old.node()] = [newnode]
804 mapping[old.node()] = [newnode]
789 # Update diff property
805 # Update diff property
790 # If it fails just warn and keep going, otherwise the DREV
806 # If it fails just warn and keep going, otherwise the DREV
791 # associations will be lost
807 # associations will be lost
792 try:
808 try:
793 writediffproperties(unfi[newnode], diffmap[old.node()])
809 writediffproperties(unfi[newnode], diffmap[old.node()])
794 except util.urlerr.urlerror:
810 except util.urlerr.urlerror:
795 ui.warnnoi18n(
811 ui.warnnoi18n(
796 b'Failed to update metadata for D%s\n' % drevid
812 b'Failed to update metadata for D%s\n' % drevid
797 )
813 )
798 # Remove local tags since it's no longer necessary
814 # Remove local tags since it's no longer necessary
799 tagname = b'D%d' % drevid
815 tagname = b'D%d' % drevid
800 if tagname in repo.tags():
816 if tagname in repo.tags():
801 tags.tag(
817 tags.tag(
802 repo,
818 repo,
803 tagname,
819 tagname,
804 nullid,
820 nullid,
805 message=None,
821 message=None,
806 user=None,
822 user=None,
807 date=None,
823 date=None,
808 local=True,
824 local=True,
809 )
825 )
810 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
826 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
811 if wnode in mapping:
827 if wnode in mapping:
812 unfi.setparents(mapping[wnode][0])
828 unfi.setparents(mapping[wnode][0])
813
829
814
830
815 # Map from "hg:meta" keys to header understood by "hg import". The order is
831 # Map from "hg:meta" keys to header understood by "hg import". The order is
816 # consistent with "hg export" output.
832 # consistent with "hg export" output.
817 _metanamemap = util.sortdict(
833 _metanamemap = util.sortdict(
818 [
834 [
819 (b'user', b'User'),
835 (b'user', b'User'),
820 (b'date', b'Date'),
836 (b'date', b'Date'),
821 (b'branch', b'Branch'),
837 (b'branch', b'Branch'),
822 (b'node', b'Node ID'),
838 (b'node', b'Node ID'),
823 (b'parent', b'Parent '),
839 (b'parent', b'Parent '),
824 ]
840 ]
825 )
841 )
826
842
827
843
828 def _confirmbeforesend(repo, revs, oldmap):
844 def _confirmbeforesend(repo, revs, oldmap):
829 url, token = readurltoken(repo.ui)
845 url, token = readurltoken(repo.ui)
830 ui = repo.ui
846 ui = repo.ui
831 for rev in revs:
847 for rev in revs:
832 ctx = repo[rev]
848 ctx = repo[rev]
833 desc = ctx.description().splitlines()[0]
849 desc = ctx.description().splitlines()[0]
834 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
850 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
835 if drevid:
851 if drevid:
836 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
852 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
837 else:
853 else:
838 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
854 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
839
855
840 ui.write(
856 ui.write(
841 _(b'%s - %s: %s\n')
857 _(b'%s - %s: %s\n')
842 % (
858 % (
843 drevdesc,
859 drevdesc,
844 ui.label(bytes(ctx), b'phabricator.node'),
860 ui.label(bytes(ctx), b'phabricator.node'),
845 ui.label(desc, b'phabricator.desc'),
861 ui.label(desc, b'phabricator.desc'),
846 )
862 )
847 )
863 )
848
864
849 if ui.promptchoice(
865 if ui.promptchoice(
850 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
866 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
851 ):
867 ):
852 return False
868 return False
853
869
854 return True
870 return True
855
871
856
872
857 _knownstatusnames = {
873 _knownstatusnames = {
858 b'accepted',
874 b'accepted',
859 b'needsreview',
875 b'needsreview',
860 b'needsrevision',
876 b'needsrevision',
861 b'closed',
877 b'closed',
862 b'abandoned',
878 b'abandoned',
863 }
879 }
864
880
865
881
866 def _getstatusname(drev):
882 def _getstatusname(drev):
867 """get normalized status name from a Differential Revision"""
883 """get normalized status name from a Differential Revision"""
868 return drev[b'statusName'].replace(b' ', b'').lower()
884 return drev[b'statusName'].replace(b' ', b'').lower()
869
885
870
886
871 # Small language to specify differential revisions. Support symbols: (), :X,
887 # Small language to specify differential revisions. Support symbols: (), :X,
872 # +, and -.
888 # +, and -.
873
889
874 _elements = {
890 _elements = {
875 # token-type: binding-strength, primary, prefix, infix, suffix
891 # token-type: binding-strength, primary, prefix, infix, suffix
876 b'(': (12, None, (b'group', 1, b')'), None, None),
892 b'(': (12, None, (b'group', 1, b')'), None, None),
877 b':': (8, None, (b'ancestors', 8), None, None),
893 b':': (8, None, (b'ancestors', 8), None, None),
878 b'&': (5, None, None, (b'and_', 5), None),
894 b'&': (5, None, None, (b'and_', 5), None),
879 b'+': (4, None, None, (b'add', 4), None),
895 b'+': (4, None, None, (b'add', 4), None),
880 b'-': (4, None, None, (b'sub', 4), None),
896 b'-': (4, None, None, (b'sub', 4), None),
881 b')': (0, None, None, None, None),
897 b')': (0, None, None, None, None),
882 b'symbol': (0, b'symbol', None, None, None),
898 b'symbol': (0, b'symbol', None, None, None),
883 b'end': (0, None, None, None, None),
899 b'end': (0, None, None, None, None),
884 }
900 }
885
901
886
902
887 def _tokenize(text):
903 def _tokenize(text):
888 view = memoryview(text) # zero-copy slice
904 view = memoryview(text) # zero-copy slice
889 special = b'():+-& '
905 special = b'():+-& '
890 pos = 0
906 pos = 0
891 length = len(text)
907 length = len(text)
892 while pos < length:
908 while pos < length:
893 symbol = b''.join(
909 symbol = b''.join(
894 itertools.takewhile(
910 itertools.takewhile(
895 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
911 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
896 )
912 )
897 )
913 )
898 if symbol:
914 if symbol:
899 yield (b'symbol', symbol, pos)
915 yield (b'symbol', symbol, pos)
900 pos += len(symbol)
916 pos += len(symbol)
901 else: # special char, ignore space
917 else: # special char, ignore space
902 if text[pos] != b' ':
918 if text[pos] != b' ':
903 yield (text[pos], None, pos)
919 yield (text[pos], None, pos)
904 pos += 1
920 pos += 1
905 yield (b'end', None, pos)
921 yield (b'end', None, pos)
906
922
907
923
908 def _parse(text):
924 def _parse(text):
909 tree, pos = parser.parser(_elements).parse(_tokenize(text))
925 tree, pos = parser.parser(_elements).parse(_tokenize(text))
910 if pos != len(text):
926 if pos != len(text):
911 raise error.ParseError(b'invalid token', pos)
927 raise error.ParseError(b'invalid token', pos)
912 return tree
928 return tree
913
929
914
930
915 def _parsedrev(symbol):
931 def _parsedrev(symbol):
916 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
932 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
917 if symbol.startswith(b'D') and symbol[1:].isdigit():
933 if symbol.startswith(b'D') and symbol[1:].isdigit():
918 return int(symbol[1:])
934 return int(symbol[1:])
919 if symbol.isdigit():
935 if symbol.isdigit():
920 return int(symbol)
936 return int(symbol)
921
937
922
938
923 def _prefetchdrevs(tree):
939 def _prefetchdrevs(tree):
924 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
940 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
925 drevs = set()
941 drevs = set()
926 ancestordrevs = set()
942 ancestordrevs = set()
927 op = tree[0]
943 op = tree[0]
928 if op == b'symbol':
944 if op == b'symbol':
929 r = _parsedrev(tree[1])
945 r = _parsedrev(tree[1])
930 if r:
946 if r:
931 drevs.add(r)
947 drevs.add(r)
932 elif op == b'ancestors':
948 elif op == b'ancestors':
933 r, a = _prefetchdrevs(tree[1])
949 r, a = _prefetchdrevs(tree[1])
934 drevs.update(r)
950 drevs.update(r)
935 ancestordrevs.update(r)
951 ancestordrevs.update(r)
936 ancestordrevs.update(a)
952 ancestordrevs.update(a)
937 else:
953 else:
938 for t in tree[1:]:
954 for t in tree[1:]:
939 r, a = _prefetchdrevs(t)
955 r, a = _prefetchdrevs(t)
940 drevs.update(r)
956 drevs.update(r)
941 ancestordrevs.update(a)
957 ancestordrevs.update(a)
942 return drevs, ancestordrevs
958 return drevs, ancestordrevs
943
959
944
960
945 def querydrev(repo, spec):
961 def querydrev(repo, spec):
946 """return a list of "Differential Revision" dicts
962 """return a list of "Differential Revision" dicts
947
963
948 spec is a string using a simple query language, see docstring in phabread
964 spec is a string using a simple query language, see docstring in phabread
949 for details.
965 for details.
950
966
951 A "Differential Revision dict" looks like:
967 A "Differential Revision dict" looks like:
952
968
953 {
969 {
954 "id": "2",
970 "id": "2",
955 "phid": "PHID-DREV-672qvysjcczopag46qty",
971 "phid": "PHID-DREV-672qvysjcczopag46qty",
956 "title": "example",
972 "title": "example",
957 "uri": "https://phab.example.com/D2",
973 "uri": "https://phab.example.com/D2",
958 "dateCreated": "1499181406",
974 "dateCreated": "1499181406",
959 "dateModified": "1499182103",
975 "dateModified": "1499182103",
960 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
976 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
961 "status": "0",
977 "status": "0",
962 "statusName": "Needs Review",
978 "statusName": "Needs Review",
963 "properties": [],
979 "properties": [],
964 "branch": null,
980 "branch": null,
965 "summary": "",
981 "summary": "",
966 "testPlan": "",
982 "testPlan": "",
967 "lineCount": "2",
983 "lineCount": "2",
968 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
984 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
969 "diffs": [
985 "diffs": [
970 "3",
986 "3",
971 "4",
987 "4",
972 ],
988 ],
973 "commits": [],
989 "commits": [],
974 "reviewers": [],
990 "reviewers": [],
975 "ccs": [],
991 "ccs": [],
976 "hashes": [],
992 "hashes": [],
977 "auxiliary": {
993 "auxiliary": {
978 "phabricator:projects": [],
994 "phabricator:projects": [],
979 "phabricator:depends-on": [
995 "phabricator:depends-on": [
980 "PHID-DREV-gbapp366kutjebt7agcd"
996 "PHID-DREV-gbapp366kutjebt7agcd"
981 ]
997 ]
982 },
998 },
983 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
999 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
984 "sourcePath": null
1000 "sourcePath": null
985 }
1001 }
986 """
1002 """
987
1003
988 def fetch(params):
1004 def fetch(params):
989 """params -> single drev or None"""
1005 """params -> single drev or None"""
990 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1006 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
991 if key in prefetched:
1007 if key in prefetched:
992 return prefetched[key]
1008 return prefetched[key]
993 drevs = callconduit(repo.ui, b'differential.query', params)
1009 drevs = callconduit(repo.ui, b'differential.query', params)
994 # Fill prefetched with the result
1010 # Fill prefetched with the result
995 for drev in drevs:
1011 for drev in drevs:
996 prefetched[drev[b'phid']] = drev
1012 prefetched[drev[b'phid']] = drev
997 prefetched[int(drev[b'id'])] = drev
1013 prefetched[int(drev[b'id'])] = drev
998 if key not in prefetched:
1014 if key not in prefetched:
999 raise error.Abort(
1015 raise error.Abort(
1000 _(b'cannot get Differential Revision %r') % params
1016 _(b'cannot get Differential Revision %r') % params
1001 )
1017 )
1002 return prefetched[key]
1018 return prefetched[key]
1003
1019
1004 def getstack(topdrevids):
1020 def getstack(topdrevids):
1005 """given a top, get a stack from the bottom, [id] -> [id]"""
1021 """given a top, get a stack from the bottom, [id] -> [id]"""
1006 visited = set()
1022 visited = set()
1007 result = []
1023 result = []
1008 queue = [{b'ids': [i]} for i in topdrevids]
1024 queue = [{b'ids': [i]} for i in topdrevids]
1009 while queue:
1025 while queue:
1010 params = queue.pop()
1026 params = queue.pop()
1011 drev = fetch(params)
1027 drev = fetch(params)
1012 if drev[b'id'] in visited:
1028 if drev[b'id'] in visited:
1013 continue
1029 continue
1014 visited.add(drev[b'id'])
1030 visited.add(drev[b'id'])
1015 result.append(int(drev[b'id']))
1031 result.append(int(drev[b'id']))
1016 auxiliary = drev.get(b'auxiliary', {})
1032 auxiliary = drev.get(b'auxiliary', {})
1017 depends = auxiliary.get(b'phabricator:depends-on', [])
1033 depends = auxiliary.get(b'phabricator:depends-on', [])
1018 for phid in depends:
1034 for phid in depends:
1019 queue.append({b'phids': [phid]})
1035 queue.append({b'phids': [phid]})
1020 result.reverse()
1036 result.reverse()
1021 return smartset.baseset(result)
1037 return smartset.baseset(result)
1022
1038
1023 # Initialize prefetch cache
1039 # Initialize prefetch cache
1024 prefetched = {} # {id or phid: drev}
1040 prefetched = {} # {id or phid: drev}
1025
1041
1026 tree = _parse(spec)
1042 tree = _parse(spec)
1027 drevs, ancestordrevs = _prefetchdrevs(tree)
1043 drevs, ancestordrevs = _prefetchdrevs(tree)
1028
1044
1029 # developer config: phabricator.batchsize
1045 # developer config: phabricator.batchsize
1030 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1046 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1031
1047
1032 # Prefetch Differential Revisions in batch
1048 # Prefetch Differential Revisions in batch
1033 tofetch = set(drevs)
1049 tofetch = set(drevs)
1034 for r in ancestordrevs:
1050 for r in ancestordrevs:
1035 tofetch.update(range(max(1, r - batchsize), r + 1))
1051 tofetch.update(range(max(1, r - batchsize), r + 1))
1036 if drevs:
1052 if drevs:
1037 fetch({b'ids': list(tofetch)})
1053 fetch({b'ids': list(tofetch)})
1038 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1054 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1039
1055
1040 # Walk through the tree, return smartsets
1056 # Walk through the tree, return smartsets
1041 def walk(tree):
1057 def walk(tree):
1042 op = tree[0]
1058 op = tree[0]
1043 if op == b'symbol':
1059 if op == b'symbol':
1044 drev = _parsedrev(tree[1])
1060 drev = _parsedrev(tree[1])
1045 if drev:
1061 if drev:
1046 return smartset.baseset([drev])
1062 return smartset.baseset([drev])
1047 elif tree[1] in _knownstatusnames:
1063 elif tree[1] in _knownstatusnames:
1048 drevs = [
1064 drevs = [
1049 r
1065 r
1050 for r in validids
1066 for r in validids
1051 if _getstatusname(prefetched[r]) == tree[1]
1067 if _getstatusname(prefetched[r]) == tree[1]
1052 ]
1068 ]
1053 return smartset.baseset(drevs)
1069 return smartset.baseset(drevs)
1054 else:
1070 else:
1055 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1071 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1056 elif op in {b'and_', b'add', b'sub'}:
1072 elif op in {b'and_', b'add', b'sub'}:
1057 assert len(tree) == 3
1073 assert len(tree) == 3
1058 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1074 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1059 elif op == b'group':
1075 elif op == b'group':
1060 return walk(tree[1])
1076 return walk(tree[1])
1061 elif op == b'ancestors':
1077 elif op == b'ancestors':
1062 return getstack(walk(tree[1]))
1078 return getstack(walk(tree[1]))
1063 else:
1079 else:
1064 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1080 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1065
1081
1066 return [prefetched[r] for r in walk(tree)]
1082 return [prefetched[r] for r in walk(tree)]
1067
1083
1068
1084
1069 def getdescfromdrev(drev):
1085 def getdescfromdrev(drev):
1070 """get description (commit message) from "Differential Revision"
1086 """get description (commit message) from "Differential Revision"
1071
1087
1072 This is similar to differential.getcommitmessage API. But we only care
1088 This is similar to differential.getcommitmessage API. But we only care
1073 about limited fields: title, summary, test plan, and URL.
1089 about limited fields: title, summary, test plan, and URL.
1074 """
1090 """
1075 title = drev[b'title']
1091 title = drev[b'title']
1076 summary = drev[b'summary'].rstrip()
1092 summary = drev[b'summary'].rstrip()
1077 testplan = drev[b'testPlan'].rstrip()
1093 testplan = drev[b'testPlan'].rstrip()
1078 if testplan:
1094 if testplan:
1079 testplan = b'Test Plan:\n%s' % testplan
1095 testplan = b'Test Plan:\n%s' % testplan
1080 uri = b'Differential Revision: %s' % drev[b'uri']
1096 uri = b'Differential Revision: %s' % drev[b'uri']
1081 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1097 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1082
1098
1083
1099
1084 def getdiffmeta(diff):
1100 def getdiffmeta(diff):
1085 """get commit metadata (date, node, user, p1) from a diff object
1101 """get commit metadata (date, node, user, p1) from a diff object
1086
1102
1087 The metadata could be "hg:meta", sent by phabsend, like:
1103 The metadata could be "hg:meta", sent by phabsend, like:
1088
1104
1089 "properties": {
1105 "properties": {
1090 "hg:meta": {
1106 "hg:meta": {
1091 "date": "1499571514 25200",
1107 "date": "1499571514 25200",
1092 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1108 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1093 "user": "Foo Bar <foo@example.com>",
1109 "user": "Foo Bar <foo@example.com>",
1094 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1110 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1095 }
1111 }
1096 }
1112 }
1097
1113
1098 Or converted from "local:commits", sent by "arc", like:
1114 Or converted from "local:commits", sent by "arc", like:
1099
1115
1100 "properties": {
1116 "properties": {
1101 "local:commits": {
1117 "local:commits": {
1102 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1118 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1103 "author": "Foo Bar",
1119 "author": "Foo Bar",
1104 "time": 1499546314,
1120 "time": 1499546314,
1105 "branch": "default",
1121 "branch": "default",
1106 "tag": "",
1122 "tag": "",
1107 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1123 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1108 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1124 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1109 "local": "1000",
1125 "local": "1000",
1110 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1126 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1111 "summary": "...",
1127 "summary": "...",
1112 "message": "...",
1128 "message": "...",
1113 "authorEmail": "foo@example.com"
1129 "authorEmail": "foo@example.com"
1114 }
1130 }
1115 }
1131 }
1116 }
1132 }
1117
1133
1118 Note: metadata extracted from "local:commits" will lose time zone
1134 Note: metadata extracted from "local:commits" will lose time zone
1119 information.
1135 information.
1120 """
1136 """
1121 props = diff.get(b'properties') or {}
1137 props = diff.get(b'properties') or {}
1122 meta = props.get(b'hg:meta')
1138 meta = props.get(b'hg:meta')
1123 if not meta:
1139 if not meta:
1124 if props.get(b'local:commits'):
1140 if props.get(b'local:commits'):
1125 commit = sorted(props[b'local:commits'].values())[0]
1141 commit = sorted(props[b'local:commits'].values())[0]
1126 meta = {}
1142 meta = {}
1127 if b'author' in commit and b'authorEmail' in commit:
1143 if b'author' in commit and b'authorEmail' in commit:
1128 meta[b'user'] = b'%s <%s>' % (
1144 meta[b'user'] = b'%s <%s>' % (
1129 commit[b'author'],
1145 commit[b'author'],
1130 commit[b'authorEmail'],
1146 commit[b'authorEmail'],
1131 )
1147 )
1132 if b'time' in commit:
1148 if b'time' in commit:
1133 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1149 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1134 if b'branch' in commit:
1150 if b'branch' in commit:
1135 meta[b'branch'] = commit[b'branch']
1151 meta[b'branch'] = commit[b'branch']
1136 node = commit.get(b'commit', commit.get(b'rev'))
1152 node = commit.get(b'commit', commit.get(b'rev'))
1137 if node:
1153 if node:
1138 meta[b'node'] = node
1154 meta[b'node'] = node
1139 if len(commit.get(b'parents', ())) >= 1:
1155 if len(commit.get(b'parents', ())) >= 1:
1140 meta[b'parent'] = commit[b'parents'][0]
1156 meta[b'parent'] = commit[b'parents'][0]
1141 else:
1157 else:
1142 meta = {}
1158 meta = {}
1143 if b'date' not in meta and b'dateCreated' in diff:
1159 if b'date' not in meta and b'dateCreated' in diff:
1144 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1160 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1145 if b'branch' not in meta and diff.get(b'branch'):
1161 if b'branch' not in meta and diff.get(b'branch'):
1146 meta[b'branch'] = diff[b'branch']
1162 meta[b'branch'] = diff[b'branch']
1147 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1163 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1148 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1164 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1149 return meta
1165 return meta
1150
1166
1151
1167
1152 def readpatch(repo, drevs, write):
1168 def readpatch(repo, drevs, write):
1153 """generate plain-text patch readable by 'hg import'
1169 """generate plain-text patch readable by 'hg import'
1154
1170
1155 write is usually ui.write. drevs is what "querydrev" returns, results of
1171 write is usually ui.write. drevs is what "querydrev" returns, results of
1156 "differential.query".
1172 "differential.query".
1157 """
1173 """
1158 # Prefetch hg:meta property for all diffs
1174 # Prefetch hg:meta property for all diffs
1159 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1175 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1160 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1176 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1161
1177
1162 # Generate patch for each drev
1178 # Generate patch for each drev
1163 for drev in drevs:
1179 for drev in drevs:
1164 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1180 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1165
1181
1166 diffid = max(int(v) for v in drev[b'diffs'])
1182 diffid = max(int(v) for v in drev[b'diffs'])
1167 body = callconduit(
1183 body = callconduit(
1168 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1184 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1169 )
1185 )
1170 desc = getdescfromdrev(drev)
1186 desc = getdescfromdrev(drev)
1171 header = b'# HG changeset patch\n'
1187 header = b'# HG changeset patch\n'
1172
1188
1173 # Try to preserve metadata from hg:meta property. Write hg patch
1189 # Try to preserve metadata from hg:meta property. Write hg patch
1174 # headers that can be read by the "import" command. See patchheadermap
1190 # headers that can be read by the "import" command. See patchheadermap
1175 # and extract in mercurial/patch.py for supported headers.
1191 # and extract in mercurial/patch.py for supported headers.
1176 meta = getdiffmeta(diffs[b'%d' % diffid])
1192 meta = getdiffmeta(diffs[b'%d' % diffid])
1177 for k in _metanamemap.keys():
1193 for k in _metanamemap.keys():
1178 if k in meta:
1194 if k in meta:
1179 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1195 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1180
1196
1181 content = b'%s%s\n%s' % (header, desc, body)
1197 content = b'%s%s\n%s' % (header, desc, body)
1182 write(content)
1198 write(content)
1183
1199
1184
1200
1185 @vcrcommand(
1201 @vcrcommand(
1186 b'phabread',
1202 b'phabread',
1187 [(b'', b'stack', False, _(b'read dependencies'))],
1203 [(b'', b'stack', False, _(b'read dependencies'))],
1188 _(b'DREVSPEC [OPTIONS]'),
1204 _(b'DREVSPEC [OPTIONS]'),
1189 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1205 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1190 )
1206 )
1191 def phabread(ui, repo, spec, **opts):
1207 def phabread(ui, repo, spec, **opts):
1192 """print patches from Phabricator suitable for importing
1208 """print patches from Phabricator suitable for importing
1193
1209
1194 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1210 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1195 the number ``123``. It could also have common operators like ``+``, ``-``,
1211 the number ``123``. It could also have common operators like ``+``, ``-``,
1196 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1212 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1197 select a stack.
1213 select a stack.
1198
1214
1199 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1215 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1200 could be used to filter patches by status. For performance reason, they
1216 could be used to filter patches by status. For performance reason, they
1201 only represent a subset of non-status selections and cannot be used alone.
1217 only represent a subset of non-status selections and cannot be used alone.
1202
1218
1203 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1219 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1204 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1220 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1205 stack up to D9.
1221 stack up to D9.
1206
1222
1207 If --stack is given, follow dependencies information and read all patches.
1223 If --stack is given, follow dependencies information and read all patches.
1208 It is equivalent to the ``:`` operator.
1224 It is equivalent to the ``:`` operator.
1209 """
1225 """
1210 opts = pycompat.byteskwargs(opts)
1226 opts = pycompat.byteskwargs(opts)
1211 if opts.get(b'stack'):
1227 if opts.get(b'stack'):
1212 spec = b':(%s)' % spec
1228 spec = b':(%s)' % spec
1213 drevs = querydrev(repo, spec)
1229 drevs = querydrev(repo, spec)
1214 readpatch(repo, drevs, ui.write)
1230 readpatch(repo, drevs, ui.write)
1215
1231
1216
1232
1217 @vcrcommand(
1233 @vcrcommand(
1218 b'phabupdate',
1234 b'phabupdate',
1219 [
1235 [
1220 (b'', b'accept', False, _(b'accept revisions')),
1236 (b'', b'accept', False, _(b'accept revisions')),
1221 (b'', b'reject', False, _(b'reject revisions')),
1237 (b'', b'reject', False, _(b'reject revisions')),
1222 (b'', b'abandon', False, _(b'abandon revisions')),
1238 (b'', b'abandon', False, _(b'abandon revisions')),
1223 (b'', b'reclaim', False, _(b'reclaim revisions')),
1239 (b'', b'reclaim', False, _(b'reclaim revisions')),
1224 (b'm', b'comment', b'', _(b'comment on the last revision')),
1240 (b'm', b'comment', b'', _(b'comment on the last revision')),
1225 ],
1241 ],
1226 _(b'DREVSPEC [OPTIONS]'),
1242 _(b'DREVSPEC [OPTIONS]'),
1227 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1243 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1228 )
1244 )
1229 def phabupdate(ui, repo, spec, **opts):
1245 def phabupdate(ui, repo, spec, **opts):
1230 """update Differential Revision in batch
1246 """update Differential Revision in batch
1231
1247
1232 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1233 """
1249 """
1234 opts = pycompat.byteskwargs(opts)
1250 opts = pycompat.byteskwargs(opts)
1235 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1251 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1236 if len(flags) > 1:
1252 if len(flags) > 1:
1237 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1253 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1238
1254
1239 actions = []
1255 actions = []
1240 for f in flags:
1256 for f in flags:
1241 actions.append({b'type': f, b'value': b'true'})
1257 actions.append({b'type': f, b'value': b'true'})
1242
1258
1243 drevs = querydrev(repo, spec)
1259 drevs = querydrev(repo, spec)
1244 for i, drev in enumerate(drevs):
1260 for i, drev in enumerate(drevs):
1245 if i + 1 == len(drevs) and opts.get(b'comment'):
1261 if i + 1 == len(drevs) and opts.get(b'comment'):
1246 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1262 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1247 if actions:
1263 if actions:
1248 params = {
1264 params = {
1249 b'objectIdentifier': drev[b'phid'],
1265 b'objectIdentifier': drev[b'phid'],
1250 b'transactions': actions,
1266 b'transactions': actions,
1251 }
1267 }
1252 callconduit(ui, b'differential.revision.edit', params)
1268 callconduit(ui, b'differential.revision.edit', params)
1253
1269
1254
1270
1255 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1271 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1256 def template_review(context, mapping):
1272 def template_review(context, mapping):
1257 """:phabreview: Object describing the review for this changeset.
1273 """:phabreview: Object describing the review for this changeset.
1258 Has attributes `url` and `id`.
1274 Has attributes `url` and `id`.
1259 """
1275 """
1260 ctx = context.resource(mapping, b'ctx')
1276 ctx = context.resource(mapping, b'ctx')
1261 m = _differentialrevisiondescre.search(ctx.description())
1277 m = _differentialrevisiondescre.search(ctx.description())
1262 if m:
1278 if m:
1263 return templateutil.hybriddict(
1279 return templateutil.hybriddict(
1264 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1280 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1265 )
1281 )
1266 else:
1282 else:
1267 tags = ctx.repo().nodetags(ctx.node())
1283 tags = ctx.repo().nodetags(ctx.node())
1268 for t in tags:
1284 for t in tags:
1269 if _differentialrevisiontagre.match(t):
1285 if _differentialrevisiontagre.match(t):
1270 url = ctx.repo().ui.config(b'phabricator', b'url')
1286 url = ctx.repo().ui.config(b'phabricator', b'url')
1271 if not url.endswith(b'/'):
1287 if not url.endswith(b'/'):
1272 url += b'/'
1288 url += b'/'
1273 url += t
1289 url += t
1274
1290
1275 return templateutil.hybriddict({b'url': url, b'id': t,})
1291 return templateutil.hybriddict({b'url': url, b'id': t,})
1276 return None
1292 return None
General Comments 0
You need to be logged in to leave comments. Login now