##// END OF EJS Templates
phabricator: add the DiffChangeType and DiffFileType constants...
Ian Moody -
r43452:a66e2844 default
parent child Browse files
Show More
@@ -1,1259 +1,1276
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial.pycompat import getattr
52 from mercurial.pycompat import getattr
53 from mercurial import (
53 from mercurial import (
54 cmdutil,
54 cmdutil,
55 context,
55 context,
56 encoding,
56 encoding,
57 error,
57 error,
58 exthelper,
58 exthelper,
59 httpconnection as httpconnectionmod,
59 httpconnection as httpconnectionmod,
60 mdiff,
60 mdiff,
61 obsutil,
61 obsutil,
62 parser,
62 parser,
63 patch,
63 patch,
64 phases,
64 phases,
65 pycompat,
65 pycompat,
66 scmutil,
66 scmutil,
67 smartset,
67 smartset,
68 tags,
68 tags,
69 templatefilters,
69 templatefilters,
70 templateutil,
70 templateutil,
71 url as urlmod,
71 url as urlmod,
72 util,
72 util,
73 )
73 )
74 from mercurial.utils import (
74 from mercurial.utils import (
75 procutil,
75 procutil,
76 stringutil,
76 stringutil,
77 )
77 )
78
78
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
81 # be specifying the version(s) of Mercurial they are tested with, or
81 # be specifying the version(s) of Mercurial they are tested with, or
82 # leave the attribute unspecified.
82 # leave the attribute unspecified.
83 testedwith = b'ships-with-hg-core'
83 testedwith = b'ships-with-hg-core'
84
84
85 eh = exthelper.exthelper()
85 eh = exthelper.exthelper()
86
86
87 cmdtable = eh.cmdtable
87 cmdtable = eh.cmdtable
88 command = eh.command
88 command = eh.command
89 configtable = eh.configtable
89 configtable = eh.configtable
90 templatekeyword = eh.templatekeyword
90 templatekeyword = eh.templatekeyword
91
91
92 # developer config: phabricator.batchsize
92 # developer config: phabricator.batchsize
93 eh.configitem(
93 eh.configitem(
94 b'phabricator', b'batchsize', default=12,
94 b'phabricator', b'batchsize', default=12,
95 )
95 )
96 eh.configitem(
96 eh.configitem(
97 b'phabricator', b'callsign', default=None,
97 b'phabricator', b'callsign', default=None,
98 )
98 )
99 eh.configitem(
99 eh.configitem(
100 b'phabricator', b'curlcmd', default=None,
100 b'phabricator', b'curlcmd', default=None,
101 )
101 )
102 # developer config: phabricator.repophid
102 # developer config: phabricator.repophid
103 eh.configitem(
103 eh.configitem(
104 b'phabricator', b'repophid', default=None,
104 b'phabricator', b'repophid', default=None,
105 )
105 )
106 eh.configitem(
106 eh.configitem(
107 b'phabricator', b'url', default=None,
107 b'phabricator', b'url', default=None,
108 )
108 )
109 eh.configitem(
109 eh.configitem(
110 b'phabsend', b'confirm', default=False,
110 b'phabsend', b'confirm', default=False,
111 )
111 )
112
112
113 colortable = {
113 colortable = {
114 b'phabricator.action.created': b'green',
114 b'phabricator.action.created': b'green',
115 b'phabricator.action.skipped': b'magenta',
115 b'phabricator.action.skipped': b'magenta',
116 b'phabricator.action.updated': b'magenta',
116 b'phabricator.action.updated': b'magenta',
117 b'phabricator.desc': b'',
117 b'phabricator.desc': b'',
118 b'phabricator.drev': b'bold',
118 b'phabricator.drev': b'bold',
119 b'phabricator.node': b'',
119 b'phabricator.node': b'',
120 }
120 }
121
121
122 _VCR_FLAGS = [
122 _VCR_FLAGS = [
123 (
123 (
124 b'',
124 b'',
125 b'test-vcr',
125 b'test-vcr',
126 b'',
126 b'',
127 _(
127 _(
128 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
128 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
129 b', otherwise will mock all http requests using the specified vcr file.'
129 b', otherwise will mock all http requests using the specified vcr file.'
130 b' (ADVANCED)'
130 b' (ADVANCED)'
131 ),
131 ),
132 ),
132 ),
133 ]
133 ]
134
134
135
135
136 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
136 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
137 fullflags = flags + _VCR_FLAGS
137 fullflags = flags + _VCR_FLAGS
138
138
139 def hgmatcher(r1, r2):
139 def hgmatcher(r1, r2):
140 if r1.uri != r2.uri or r1.method != r2.method:
140 if r1.uri != r2.uri or r1.method != r2.method:
141 return False
141 return False
142 r1params = r1.body.split(b'&')
142 r1params = r1.body.split(b'&')
143 r2params = r2.body.split(b'&')
143 r2params = r2.body.split(b'&')
144 return set(r1params) == set(r2params)
144 return set(r1params) == set(r2params)
145
145
146 def sanitiserequest(request):
146 def sanitiserequest(request):
147 request.body = re.sub(
147 request.body = re.sub(
148 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
148 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
149 )
149 )
150 return request
150 return request
151
151
152 def sanitiseresponse(response):
152 def sanitiseresponse(response):
153 if r'set-cookie' in response[r'headers']:
153 if r'set-cookie' in response[r'headers']:
154 del response[r'headers'][r'set-cookie']
154 del response[r'headers'][r'set-cookie']
155 return response
155 return response
156
156
157 def decorate(fn):
157 def decorate(fn):
158 def inner(*args, **kwargs):
158 def inner(*args, **kwargs):
159 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
159 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
160 if cassette:
160 if cassette:
161 import hgdemandimport
161 import hgdemandimport
162
162
163 with hgdemandimport.deactivated():
163 with hgdemandimport.deactivated():
164 import vcr as vcrmod
164 import vcr as vcrmod
165 import vcr.stubs as stubs
165 import vcr.stubs as stubs
166
166
167 vcr = vcrmod.VCR(
167 vcr = vcrmod.VCR(
168 serializer=r'json',
168 serializer=r'json',
169 before_record_request=sanitiserequest,
169 before_record_request=sanitiserequest,
170 before_record_response=sanitiseresponse,
170 before_record_response=sanitiseresponse,
171 custom_patches=[
171 custom_patches=[
172 (
172 (
173 urlmod,
173 urlmod,
174 r'httpconnection',
174 r'httpconnection',
175 stubs.VCRHTTPConnection,
175 stubs.VCRHTTPConnection,
176 ),
176 ),
177 (
177 (
178 urlmod,
178 urlmod,
179 r'httpsconnection',
179 r'httpsconnection',
180 stubs.VCRHTTPSConnection,
180 stubs.VCRHTTPSConnection,
181 ),
181 ),
182 ],
182 ],
183 )
183 )
184 vcr.register_matcher(r'hgmatcher', hgmatcher)
184 vcr.register_matcher(r'hgmatcher', hgmatcher)
185 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
185 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
186 return fn(*args, **kwargs)
186 return fn(*args, **kwargs)
187 return fn(*args, **kwargs)
187 return fn(*args, **kwargs)
188
188
189 inner.__name__ = fn.__name__
189 inner.__name__ = fn.__name__
190 inner.__doc__ = fn.__doc__
190 inner.__doc__ = fn.__doc__
191 return command(
191 return command(
192 name,
192 name,
193 fullflags,
193 fullflags,
194 spec,
194 spec,
195 helpcategory=helpcategory,
195 helpcategory=helpcategory,
196 optionalrepo=optionalrepo,
196 optionalrepo=optionalrepo,
197 )(inner)
197 )(inner)
198
198
199 return decorate
199 return decorate
200
200
201
201
202 def urlencodenested(params):
202 def urlencodenested(params):
203 """like urlencode, but works with nested parameters.
203 """like urlencode, but works with nested parameters.
204
204
205 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
205 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
206 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
206 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
207 urlencode. Note: the encoding is consistent with PHP's http_build_query.
207 urlencode. Note: the encoding is consistent with PHP's http_build_query.
208 """
208 """
209 flatparams = util.sortdict()
209 flatparams = util.sortdict()
210
210
211 def process(prefix, obj):
211 def process(prefix, obj):
212 if isinstance(obj, bool):
212 if isinstance(obj, bool):
213 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
213 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
214 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
214 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
215 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
215 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
216 if items is None:
216 if items is None:
217 flatparams[prefix] = obj
217 flatparams[prefix] = obj
218 else:
218 else:
219 for k, v in items(obj):
219 for k, v in items(obj):
220 if prefix:
220 if prefix:
221 process(b'%s[%s]' % (prefix, k), v)
221 process(b'%s[%s]' % (prefix, k), v)
222 else:
222 else:
223 process(k, v)
223 process(k, v)
224
224
225 process(b'', params)
225 process(b'', params)
226 return util.urlreq.urlencode(flatparams)
226 return util.urlreq.urlencode(flatparams)
227
227
228
228
229 def readurltoken(ui):
229 def readurltoken(ui):
230 """return conduit url, token and make sure they exist
230 """return conduit url, token and make sure they exist
231
231
232 Currently read from [auth] config section. In the future, it might
232 Currently read from [auth] config section. In the future, it might
233 make sense to read from .arcconfig and .arcrc as well.
233 make sense to read from .arcconfig and .arcrc as well.
234 """
234 """
235 url = ui.config(b'phabricator', b'url')
235 url = ui.config(b'phabricator', b'url')
236 if not url:
236 if not url:
237 raise error.Abort(
237 raise error.Abort(
238 _(b'config %s.%s is required') % (b'phabricator', b'url')
238 _(b'config %s.%s is required') % (b'phabricator', b'url')
239 )
239 )
240
240
241 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
241 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
242 token = None
242 token = None
243
243
244 if res:
244 if res:
245 group, auth = res
245 group, auth = res
246
246
247 ui.debug(b"using auth.%s.* for authentication\n" % group)
247 ui.debug(b"using auth.%s.* for authentication\n" % group)
248
248
249 token = auth.get(b'phabtoken')
249 token = auth.get(b'phabtoken')
250
250
251 if not token:
251 if not token:
252 raise error.Abort(
252 raise error.Abort(
253 _(b'Can\'t find conduit token associated to %s') % (url,)
253 _(b'Can\'t find conduit token associated to %s') % (url,)
254 )
254 )
255
255
256 return url, token
256 return url, token
257
257
258
258
259 def callconduit(ui, name, params):
259 def callconduit(ui, name, params):
260 """call Conduit API, params is a dict. return json.loads result, or None"""
260 """call Conduit API, params is a dict. return json.loads result, or None"""
261 host, token = readurltoken(ui)
261 host, token = readurltoken(ui)
262 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
262 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
263 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
263 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
264 params = params.copy()
264 params = params.copy()
265 params[b'api.token'] = token
265 params[b'api.token'] = token
266 data = urlencodenested(params)
266 data = urlencodenested(params)
267 curlcmd = ui.config(b'phabricator', b'curlcmd')
267 curlcmd = ui.config(b'phabricator', b'curlcmd')
268 if curlcmd:
268 if curlcmd:
269 sin, sout = procutil.popen2(
269 sin, sout = procutil.popen2(
270 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
270 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
271 )
271 )
272 sin.write(data)
272 sin.write(data)
273 sin.close()
273 sin.close()
274 body = sout.read()
274 body = sout.read()
275 else:
275 else:
276 urlopener = urlmod.opener(ui, authinfo)
276 urlopener = urlmod.opener(ui, authinfo)
277 request = util.urlreq.request(pycompat.strurl(url), data=data)
277 request = util.urlreq.request(pycompat.strurl(url), data=data)
278 with contextlib.closing(urlopener.open(request)) as rsp:
278 with contextlib.closing(urlopener.open(request)) as rsp:
279 body = rsp.read()
279 body = rsp.read()
280 ui.debug(b'Conduit Response: %s\n' % body)
280 ui.debug(b'Conduit Response: %s\n' % body)
281 parsed = pycompat.rapply(
281 parsed = pycompat.rapply(
282 lambda x: encoding.unitolocal(x)
282 lambda x: encoding.unitolocal(x)
283 if isinstance(x, pycompat.unicode)
283 if isinstance(x, pycompat.unicode)
284 else x,
284 else x,
285 # json.loads only accepts bytes from py3.6+
285 # json.loads only accepts bytes from py3.6+
286 json.loads(encoding.unifromlocal(body)),
286 json.loads(encoding.unifromlocal(body)),
287 )
287 )
288 if parsed.get(b'error_code'):
288 if parsed.get(b'error_code'):
289 msg = _(b'Conduit Error (%s): %s') % (
289 msg = _(b'Conduit Error (%s): %s') % (
290 parsed[b'error_code'],
290 parsed[b'error_code'],
291 parsed[b'error_info'],
291 parsed[b'error_info'],
292 )
292 )
293 raise error.Abort(msg)
293 raise error.Abort(msg)
294 return parsed[b'result']
294 return parsed[b'result']
295
295
296
296
297 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
297 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
298 def debugcallconduit(ui, repo, name):
298 def debugcallconduit(ui, repo, name):
299 """call Conduit API
299 """call Conduit API
300
300
301 Call parameters are read from stdin as a JSON blob. Result will be written
301 Call parameters are read from stdin as a JSON blob. Result will be written
302 to stdout as a JSON blob.
302 to stdout as a JSON blob.
303 """
303 """
304 # json.loads only accepts bytes from 3.6+
304 # json.loads only accepts bytes from 3.6+
305 rawparams = encoding.unifromlocal(ui.fin.read())
305 rawparams = encoding.unifromlocal(ui.fin.read())
306 # json.loads only returns unicode strings
306 # json.loads only returns unicode strings
307 params = pycompat.rapply(
307 params = pycompat.rapply(
308 lambda x: encoding.unitolocal(x)
308 lambda x: encoding.unitolocal(x)
309 if isinstance(x, pycompat.unicode)
309 if isinstance(x, pycompat.unicode)
310 else x,
310 else x,
311 json.loads(rawparams),
311 json.loads(rawparams),
312 )
312 )
313 # json.dumps only accepts unicode strings
313 # json.dumps only accepts unicode strings
314 result = pycompat.rapply(
314 result = pycompat.rapply(
315 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
315 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
316 callconduit(ui, name, params),
316 callconduit(ui, name, params),
317 )
317 )
318 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
318 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
319 ui.write(b'%s\n' % encoding.unitolocal(s))
319 ui.write(b'%s\n' % encoding.unitolocal(s))
320
320
321
321
322 def getrepophid(repo):
322 def getrepophid(repo):
323 """given callsign, return repository PHID or None"""
323 """given callsign, return repository PHID or None"""
324 # developer config: phabricator.repophid
324 # developer config: phabricator.repophid
325 repophid = repo.ui.config(b'phabricator', b'repophid')
325 repophid = repo.ui.config(b'phabricator', b'repophid')
326 if repophid:
326 if repophid:
327 return repophid
327 return repophid
328 callsign = repo.ui.config(b'phabricator', b'callsign')
328 callsign = repo.ui.config(b'phabricator', b'callsign')
329 if not callsign:
329 if not callsign:
330 return None
330 return None
331 query = callconduit(
331 query = callconduit(
332 repo.ui,
332 repo.ui,
333 b'diffusion.repository.search',
333 b'diffusion.repository.search',
334 {b'constraints': {b'callsigns': [callsign]}},
334 {b'constraints': {b'callsigns': [callsign]}},
335 )
335 )
336 if len(query[b'data']) == 0:
336 if len(query[b'data']) == 0:
337 return None
337 return None
338 repophid = query[b'data'][0][b'phid']
338 repophid = query[b'data'][0][b'phid']
339 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
339 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
340 return repophid
340 return repophid
341
341
342
342
343 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
343 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
344 _differentialrevisiondescre = re.compile(
344 _differentialrevisiondescre = re.compile(
345 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
345 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
346 )
346 )
347
347
348
348
349 def getoldnodedrevmap(repo, nodelist):
349 def getoldnodedrevmap(repo, nodelist):
350 """find previous nodes that has been sent to Phabricator
350 """find previous nodes that has been sent to Phabricator
351
351
352 return {node: (oldnode, Differential diff, Differential Revision ID)}
352 return {node: (oldnode, Differential diff, Differential Revision ID)}
353 for node in nodelist with known previous sent versions, or associated
353 for node in nodelist with known previous sent versions, or associated
354 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
354 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
355 be ``None``.
355 be ``None``.
356
356
357 Examines commit messages like "Differential Revision:" to get the
357 Examines commit messages like "Differential Revision:" to get the
358 association information.
358 association information.
359
359
360 If such commit message line is not found, examines all precursors and their
360 If such commit message line is not found, examines all precursors and their
361 tags. Tags with format like "D1234" are considered a match and the node
361 tags. Tags with format like "D1234" are considered a match and the node
362 with that tag, and the number after "D" (ex. 1234) will be returned.
362 with that tag, and the number after "D" (ex. 1234) will be returned.
363
363
364 The ``old node``, if not None, is guaranteed to be the last diff of
364 The ``old node``, if not None, is guaranteed to be the last diff of
365 corresponding Differential Revision, and exist in the repo.
365 corresponding Differential Revision, and exist in the repo.
366 """
366 """
367 unfi = repo.unfiltered()
367 unfi = repo.unfiltered()
368 nodemap = unfi.changelog.nodemap
368 nodemap = unfi.changelog.nodemap
369
369
370 result = {} # {node: (oldnode?, lastdiff?, drev)}
370 result = {} # {node: (oldnode?, lastdiff?, drev)}
371 toconfirm = {} # {node: (force, {precnode}, drev)}
371 toconfirm = {} # {node: (force, {precnode}, drev)}
372 for node in nodelist:
372 for node in nodelist:
373 ctx = unfi[node]
373 ctx = unfi[node]
374 # For tags like "D123", put them into "toconfirm" to verify later
374 # For tags like "D123", put them into "toconfirm" to verify later
375 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
375 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
376 for n in precnodes:
376 for n in precnodes:
377 if n in nodemap:
377 if n in nodemap:
378 for tag in unfi.nodetags(n):
378 for tag in unfi.nodetags(n):
379 m = _differentialrevisiontagre.match(tag)
379 m = _differentialrevisiontagre.match(tag)
380 if m:
380 if m:
381 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
381 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
382 continue
382 continue
383
383
384 # Check commit message
384 # Check commit message
385 m = _differentialrevisiondescre.search(ctx.description())
385 m = _differentialrevisiondescre.search(ctx.description())
386 if m:
386 if m:
387 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
387 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
388
388
389 # Double check if tags are genuine by collecting all old nodes from
389 # Double check if tags are genuine by collecting all old nodes from
390 # Phabricator, and expect precursors overlap with it.
390 # Phabricator, and expect precursors overlap with it.
391 if toconfirm:
391 if toconfirm:
392 drevs = [drev for force, precs, drev in toconfirm.values()]
392 drevs = [drev for force, precs, drev in toconfirm.values()]
393 alldiffs = callconduit(
393 alldiffs = callconduit(
394 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
394 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
395 )
395 )
396 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
396 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
397 for newnode, (force, precset, drev) in toconfirm.items():
397 for newnode, (force, precset, drev) in toconfirm.items():
398 diffs = [
398 diffs = [
399 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
399 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
400 ]
400 ]
401
401
402 # "precursors" as known by Phabricator
402 # "precursors" as known by Phabricator
403 phprecset = set(getnode(d) for d in diffs)
403 phprecset = set(getnode(d) for d in diffs)
404
404
405 # Ignore if precursors (Phabricator and local repo) do not overlap,
405 # Ignore if precursors (Phabricator and local repo) do not overlap,
406 # and force is not set (when commit message says nothing)
406 # and force is not set (when commit message says nothing)
407 if not force and not bool(phprecset & precset):
407 if not force and not bool(phprecset & precset):
408 tagname = b'D%d' % drev
408 tagname = b'D%d' % drev
409 tags.tag(
409 tags.tag(
410 repo,
410 repo,
411 tagname,
411 tagname,
412 nullid,
412 nullid,
413 message=None,
413 message=None,
414 user=None,
414 user=None,
415 date=None,
415 date=None,
416 local=True,
416 local=True,
417 )
417 )
418 unfi.ui.warn(
418 unfi.ui.warn(
419 _(
419 _(
420 b'D%s: local tag removed - does not match '
420 b'D%s: local tag removed - does not match '
421 b'Differential history\n'
421 b'Differential history\n'
422 )
422 )
423 % drev
423 % drev
424 )
424 )
425 continue
425 continue
426
426
427 # Find the last node using Phabricator metadata, and make sure it
427 # Find the last node using Phabricator metadata, and make sure it
428 # exists in the repo
428 # exists in the repo
429 oldnode = lastdiff = None
429 oldnode = lastdiff = None
430 if diffs:
430 if diffs:
431 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
431 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
432 oldnode = getnode(lastdiff)
432 oldnode = getnode(lastdiff)
433 if oldnode and oldnode not in nodemap:
433 if oldnode and oldnode not in nodemap:
434 oldnode = None
434 oldnode = None
435
435
436 result[newnode] = (oldnode, lastdiff, drev)
436 result[newnode] = (oldnode, lastdiff, drev)
437
437
438 return result
438 return result
439
439
440
440
441 def getdiff(ctx, diffopts):
441 def getdiff(ctx, diffopts):
442 """plain-text diff without header (user, commit message, etc)"""
442 """plain-text diff without header (user, commit message, etc)"""
443 output = util.stringio()
443 output = util.stringio()
444 for chunk, _label in patch.diffui(
444 for chunk, _label in patch.diffui(
445 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
445 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
446 ):
446 ):
447 output.write(chunk)
447 output.write(chunk)
448 return output.getvalue()
448 return output.getvalue()
449
449
450
450
451 class DiffChangeType(object):
452 ADD = 1
453 CHANGE = 2
454 DELETE = 3
455 MOVE_AWAY = 4
456 COPY_AWAY = 5
457 MOVE_HERE = 6
458 COPY_HERE = 7
459 MULTICOPY = 8
460
461
462 class DiffFileType(object):
463 TEXT = 1
464 IMAGE = 2
465 BINARY = 3
466
467
451 def creatediff(ctx):
468 def creatediff(ctx):
452 """create a Differential Diff"""
469 """create a Differential Diff"""
453 repo = ctx.repo()
470 repo = ctx.repo()
454 repophid = getrepophid(repo)
471 repophid = getrepophid(repo)
455 # Create a "Differential Diff" via "differential.createrawdiff" API
472 # Create a "Differential Diff" via "differential.createrawdiff" API
456 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
473 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
457 if repophid:
474 if repophid:
458 params[b'repositoryPHID'] = repophid
475 params[b'repositoryPHID'] = repophid
459 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
476 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
460 if not diff:
477 if not diff:
461 raise error.Abort(_(b'cannot create diff for %s') % ctx)
478 raise error.Abort(_(b'cannot create diff for %s') % ctx)
462 return diff
479 return diff
463
480
464
481
465 def writediffproperties(ctx, diff):
482 def writediffproperties(ctx, diff):
466 """write metadata to diff so patches could be applied losslessly"""
483 """write metadata to diff so patches could be applied losslessly"""
467 params = {
484 params = {
468 b'diff_id': diff[b'id'],
485 b'diff_id': diff[b'id'],
469 b'name': b'hg:meta',
486 b'name': b'hg:meta',
470 b'data': templatefilters.json(
487 b'data': templatefilters.json(
471 {
488 {
472 b'user': ctx.user(),
489 b'user': ctx.user(),
473 b'date': b'%d %d' % ctx.date(),
490 b'date': b'%d %d' % ctx.date(),
474 b'branch': ctx.branch(),
491 b'branch': ctx.branch(),
475 b'node': ctx.hex(),
492 b'node': ctx.hex(),
476 b'parent': ctx.p1().hex(),
493 b'parent': ctx.p1().hex(),
477 }
494 }
478 ),
495 ),
479 }
496 }
480 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
497 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
481
498
482 params = {
499 params = {
483 b'diff_id': diff[b'id'],
500 b'diff_id': diff[b'id'],
484 b'name': b'local:commits',
501 b'name': b'local:commits',
485 b'data': templatefilters.json(
502 b'data': templatefilters.json(
486 {
503 {
487 ctx.hex(): {
504 ctx.hex(): {
488 b'author': stringutil.person(ctx.user()),
505 b'author': stringutil.person(ctx.user()),
489 b'authorEmail': stringutil.email(ctx.user()),
506 b'authorEmail': stringutil.email(ctx.user()),
490 b'time': int(ctx.date()[0]),
507 b'time': int(ctx.date()[0]),
491 b'commit': ctx.hex(),
508 b'commit': ctx.hex(),
492 b'parents': [ctx.p1().hex()],
509 b'parents': [ctx.p1().hex()],
493 b'branch': ctx.branch(),
510 b'branch': ctx.branch(),
494 },
511 },
495 }
512 }
496 ),
513 ),
497 }
514 }
498 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
515 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
499
516
500
517
501 def createdifferentialrevision(
518 def createdifferentialrevision(
502 ctx,
519 ctx,
503 revid=None,
520 revid=None,
504 parentrevphid=None,
521 parentrevphid=None,
505 oldnode=None,
522 oldnode=None,
506 olddiff=None,
523 olddiff=None,
507 actions=None,
524 actions=None,
508 comment=None,
525 comment=None,
509 ):
526 ):
510 """create or update a Differential Revision
527 """create or update a Differential Revision
511
528
512 If revid is None, create a new Differential Revision, otherwise update
529 If revid is None, create a new Differential Revision, otherwise update
513 revid. If parentrevphid is not None, set it as a dependency.
530 revid. If parentrevphid is not None, set it as a dependency.
514
531
515 If oldnode is not None, check if the patch content (without commit message
532 If oldnode is not None, check if the patch content (without commit message
516 and metadata) has changed before creating another diff.
533 and metadata) has changed before creating another diff.
517
534
518 If actions is not None, they will be appended to the transaction.
535 If actions is not None, they will be appended to the transaction.
519 """
536 """
520 repo = ctx.repo()
537 repo = ctx.repo()
521 if oldnode:
538 if oldnode:
522 diffopts = mdiff.diffopts(git=True, context=32767)
539 diffopts = mdiff.diffopts(git=True, context=32767)
523 oldctx = repo.unfiltered()[oldnode]
540 oldctx = repo.unfiltered()[oldnode]
524 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
541 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
525 else:
542 else:
526 neednewdiff = True
543 neednewdiff = True
527
544
528 transactions = []
545 transactions = []
529 if neednewdiff:
546 if neednewdiff:
530 diff = creatediff(ctx)
547 diff = creatediff(ctx)
531 transactions.append({b'type': b'update', b'value': diff[b'phid']})
548 transactions.append({b'type': b'update', b'value': diff[b'phid']})
532 if comment:
549 if comment:
533 transactions.append({b'type': b'comment', b'value': comment})
550 transactions.append({b'type': b'comment', b'value': comment})
534 else:
551 else:
535 # Even if we don't need to upload a new diff because the patch content
552 # Even if we don't need to upload a new diff because the patch content
536 # does not change. We might still need to update its metadata so
553 # does not change. We might still need to update its metadata so
537 # pushers could know the correct node metadata.
554 # pushers could know the correct node metadata.
538 assert olddiff
555 assert olddiff
539 diff = olddiff
556 diff = olddiff
540 writediffproperties(ctx, diff)
557 writediffproperties(ctx, diff)
541
558
542 # Set the parent Revision every time, so commit re-ordering is picked-up
559 # Set the parent Revision every time, so commit re-ordering is picked-up
543 if parentrevphid:
560 if parentrevphid:
544 transactions.append(
561 transactions.append(
545 {b'type': b'parents.set', b'value': [parentrevphid]}
562 {b'type': b'parents.set', b'value': [parentrevphid]}
546 )
563 )
547
564
548 if actions:
565 if actions:
549 transactions += actions
566 transactions += actions
550
567
551 # Parse commit message and update related fields.
568 # Parse commit message and update related fields.
552 desc = ctx.description()
569 desc = ctx.description()
553 info = callconduit(
570 info = callconduit(
554 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
571 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
555 )
572 )
556 for k, v in info[b'fields'].items():
573 for k, v in info[b'fields'].items():
557 if k in [b'title', b'summary', b'testPlan']:
574 if k in [b'title', b'summary', b'testPlan']:
558 transactions.append({b'type': k, b'value': v})
575 transactions.append({b'type': k, b'value': v})
559
576
560 params = {b'transactions': transactions}
577 params = {b'transactions': transactions}
561 if revid is not None:
578 if revid is not None:
562 # Update an existing Differential Revision
579 # Update an existing Differential Revision
563 params[b'objectIdentifier'] = revid
580 params[b'objectIdentifier'] = revid
564
581
565 revision = callconduit(repo.ui, b'differential.revision.edit', params)
582 revision = callconduit(repo.ui, b'differential.revision.edit', params)
566 if not revision:
583 if not revision:
567 raise error.Abort(_(b'cannot create revision for %s') % ctx)
584 raise error.Abort(_(b'cannot create revision for %s') % ctx)
568
585
569 return revision, diff
586 return revision, diff
570
587
571
588
572 def userphids(repo, names):
589 def userphids(repo, names):
573 """convert user names to PHIDs"""
590 """convert user names to PHIDs"""
574 names = [name.lower() for name in names]
591 names = [name.lower() for name in names]
575 query = {b'constraints': {b'usernames': names}}
592 query = {b'constraints': {b'usernames': names}}
576 result = callconduit(repo.ui, b'user.search', query)
593 result = callconduit(repo.ui, b'user.search', query)
577 # username not found is not an error of the API. So check if we have missed
594 # username not found is not an error of the API. So check if we have missed
578 # some names here.
595 # some names here.
579 data = result[b'data']
596 data = result[b'data']
580 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
597 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
581 unresolved = set(names) - resolved
598 unresolved = set(names) - resolved
582 if unresolved:
599 if unresolved:
583 raise error.Abort(
600 raise error.Abort(
584 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
601 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
585 )
602 )
586 return [entry[b'phid'] for entry in data]
603 return [entry[b'phid'] for entry in data]
587
604
588
605
589 @vcrcommand(
606 @vcrcommand(
590 b'phabsend',
607 b'phabsend',
591 [
608 [
592 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
609 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
593 (b'', b'amend', True, _(b'update commit messages')),
610 (b'', b'amend', True, _(b'update commit messages')),
594 (b'', b'reviewer', [], _(b'specify reviewers')),
611 (b'', b'reviewer', [], _(b'specify reviewers')),
595 (b'', b'blocker', [], _(b'specify blocking reviewers')),
612 (b'', b'blocker', [], _(b'specify blocking reviewers')),
596 (
613 (
597 b'm',
614 b'm',
598 b'comment',
615 b'comment',
599 b'',
616 b'',
600 _(b'add a comment to Revisions with new/updated Diffs'),
617 _(b'add a comment to Revisions with new/updated Diffs'),
601 ),
618 ),
602 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
619 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
603 ],
620 ],
604 _(b'REV [OPTIONS]'),
621 _(b'REV [OPTIONS]'),
605 helpcategory=command.CATEGORY_IMPORT_EXPORT,
622 helpcategory=command.CATEGORY_IMPORT_EXPORT,
606 )
623 )
607 def phabsend(ui, repo, *revs, **opts):
624 def phabsend(ui, repo, *revs, **opts):
608 """upload changesets to Phabricator
625 """upload changesets to Phabricator
609
626
610 If there are multiple revisions specified, they will be send as a stack
627 If there are multiple revisions specified, they will be send as a stack
611 with a linear dependencies relationship using the order specified by the
628 with a linear dependencies relationship using the order specified by the
612 revset.
629 revset.
613
630
614 For the first time uploading changesets, local tags will be created to
631 For the first time uploading changesets, local tags will be created to
615 maintain the association. After the first time, phabsend will check
632 maintain the association. After the first time, phabsend will check
616 obsstore and tags information so it can figure out whether to update an
633 obsstore and tags information so it can figure out whether to update an
617 existing Differential Revision, or create a new one.
634 existing Differential Revision, or create a new one.
618
635
619 If --amend is set, update commit messages so they have the
636 If --amend is set, update commit messages so they have the
620 ``Differential Revision`` URL, remove related tags. This is similar to what
637 ``Differential Revision`` URL, remove related tags. This is similar to what
621 arcanist will do, and is more desired in author-push workflows. Otherwise,
638 arcanist will do, and is more desired in author-push workflows. Otherwise,
622 use local tags to record the ``Differential Revision`` association.
639 use local tags to record the ``Differential Revision`` association.
623
640
624 The --confirm option lets you confirm changesets before sending them. You
641 The --confirm option lets you confirm changesets before sending them. You
625 can also add following to your configuration file to make it default
642 can also add following to your configuration file to make it default
626 behaviour::
643 behaviour::
627
644
628 [phabsend]
645 [phabsend]
629 confirm = true
646 confirm = true
630
647
631 phabsend will check obsstore and the above association to decide whether to
648 phabsend will check obsstore and the above association to decide whether to
632 update an existing Differential Revision, or create a new one.
649 update an existing Differential Revision, or create a new one.
633 """
650 """
634 opts = pycompat.byteskwargs(opts)
651 opts = pycompat.byteskwargs(opts)
635 revs = list(revs) + opts.get(b'rev', [])
652 revs = list(revs) + opts.get(b'rev', [])
636 revs = scmutil.revrange(repo, revs)
653 revs = scmutil.revrange(repo, revs)
637
654
638 if not revs:
655 if not revs:
639 raise error.Abort(_(b'phabsend requires at least one changeset'))
656 raise error.Abort(_(b'phabsend requires at least one changeset'))
640 if opts.get(b'amend'):
657 if opts.get(b'amend'):
641 cmdutil.checkunfinished(repo)
658 cmdutil.checkunfinished(repo)
642
659
643 # {newnode: (oldnode, olddiff, olddrev}
660 # {newnode: (oldnode, olddiff, olddrev}
644 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
661 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
645
662
646 confirm = ui.configbool(b'phabsend', b'confirm')
663 confirm = ui.configbool(b'phabsend', b'confirm')
647 confirm |= bool(opts.get(b'confirm'))
664 confirm |= bool(opts.get(b'confirm'))
648 if confirm:
665 if confirm:
649 confirmed = _confirmbeforesend(repo, revs, oldmap)
666 confirmed = _confirmbeforesend(repo, revs, oldmap)
650 if not confirmed:
667 if not confirmed:
651 raise error.Abort(_(b'phabsend cancelled'))
668 raise error.Abort(_(b'phabsend cancelled'))
652
669
653 actions = []
670 actions = []
654 reviewers = opts.get(b'reviewer', [])
671 reviewers = opts.get(b'reviewer', [])
655 blockers = opts.get(b'blocker', [])
672 blockers = opts.get(b'blocker', [])
656 phids = []
673 phids = []
657 if reviewers:
674 if reviewers:
658 phids.extend(userphids(repo, reviewers))
675 phids.extend(userphids(repo, reviewers))
659 if blockers:
676 if blockers:
660 phids.extend(
677 phids.extend(
661 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
678 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
662 )
679 )
663 if phids:
680 if phids:
664 actions.append({b'type': b'reviewers.add', b'value': phids})
681 actions.append({b'type': b'reviewers.add', b'value': phids})
665
682
666 drevids = [] # [int]
683 drevids = [] # [int]
667 diffmap = {} # {newnode: diff}
684 diffmap = {} # {newnode: diff}
668
685
669 # Send patches one by one so we know their Differential Revision PHIDs and
686 # Send patches one by one so we know their Differential Revision PHIDs and
670 # can provide dependency relationship
687 # can provide dependency relationship
671 lastrevphid = None
688 lastrevphid = None
672 for rev in revs:
689 for rev in revs:
673 ui.debug(b'sending rev %d\n' % rev)
690 ui.debug(b'sending rev %d\n' % rev)
674 ctx = repo[rev]
691 ctx = repo[rev]
675
692
676 # Get Differential Revision ID
693 # Get Differential Revision ID
677 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
694 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
678 if oldnode != ctx.node() or opts.get(b'amend'):
695 if oldnode != ctx.node() or opts.get(b'amend'):
679 # Create or update Differential Revision
696 # Create or update Differential Revision
680 revision, diff = createdifferentialrevision(
697 revision, diff = createdifferentialrevision(
681 ctx,
698 ctx,
682 revid,
699 revid,
683 lastrevphid,
700 lastrevphid,
684 oldnode,
701 oldnode,
685 olddiff,
702 olddiff,
686 actions,
703 actions,
687 opts.get(b'comment'),
704 opts.get(b'comment'),
688 )
705 )
689 diffmap[ctx.node()] = diff
706 diffmap[ctx.node()] = diff
690 newrevid = int(revision[b'object'][b'id'])
707 newrevid = int(revision[b'object'][b'id'])
691 newrevphid = revision[b'object'][b'phid']
708 newrevphid = revision[b'object'][b'phid']
692 if revid:
709 if revid:
693 action = b'updated'
710 action = b'updated'
694 else:
711 else:
695 action = b'created'
712 action = b'created'
696
713
697 # Create a local tag to note the association, if commit message
714 # Create a local tag to note the association, if commit message
698 # does not have it already
715 # does not have it already
699 m = _differentialrevisiondescre.search(ctx.description())
716 m = _differentialrevisiondescre.search(ctx.description())
700 if not m or int(m.group(r'id')) != newrevid:
717 if not m or int(m.group(r'id')) != newrevid:
701 tagname = b'D%d' % newrevid
718 tagname = b'D%d' % newrevid
702 tags.tag(
719 tags.tag(
703 repo,
720 repo,
704 tagname,
721 tagname,
705 ctx.node(),
722 ctx.node(),
706 message=None,
723 message=None,
707 user=None,
724 user=None,
708 date=None,
725 date=None,
709 local=True,
726 local=True,
710 )
727 )
711 else:
728 else:
712 # Nothing changed. But still set "newrevphid" so the next revision
729 # Nothing changed. But still set "newrevphid" so the next revision
713 # could depend on this one and "newrevid" for the summary line.
730 # could depend on this one and "newrevid" for the summary line.
714 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
731 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
715 newrevid = revid
732 newrevid = revid
716 action = b'skipped'
733 action = b'skipped'
717
734
718 actiondesc = ui.label(
735 actiondesc = ui.label(
719 {
736 {
720 b'created': _(b'created'),
737 b'created': _(b'created'),
721 b'skipped': _(b'skipped'),
738 b'skipped': _(b'skipped'),
722 b'updated': _(b'updated'),
739 b'updated': _(b'updated'),
723 }[action],
740 }[action],
724 b'phabricator.action.%s' % action,
741 b'phabricator.action.%s' % action,
725 )
742 )
726 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
743 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
727 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
744 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
728 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
745 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
729 ui.write(
746 ui.write(
730 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
747 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
731 )
748 )
732 drevids.append(newrevid)
749 drevids.append(newrevid)
733 lastrevphid = newrevphid
750 lastrevphid = newrevphid
734
751
735 # Update commit messages and remove tags
752 # Update commit messages and remove tags
736 if opts.get(b'amend'):
753 if opts.get(b'amend'):
737 unfi = repo.unfiltered()
754 unfi = repo.unfiltered()
738 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
755 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
739 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
756 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
740 wnode = unfi[b'.'].node()
757 wnode = unfi[b'.'].node()
741 mapping = {} # {oldnode: [newnode]}
758 mapping = {} # {oldnode: [newnode]}
742 for i, rev in enumerate(revs):
759 for i, rev in enumerate(revs):
743 old = unfi[rev]
760 old = unfi[rev]
744 drevid = drevids[i]
761 drevid = drevids[i]
745 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
762 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
746 newdesc = getdescfromdrev(drev)
763 newdesc = getdescfromdrev(drev)
747 # Make sure commit message contain "Differential Revision"
764 # Make sure commit message contain "Differential Revision"
748 if old.description() != newdesc:
765 if old.description() != newdesc:
749 if old.phase() == phases.public:
766 if old.phase() == phases.public:
750 ui.warn(
767 ui.warn(
751 _(b"warning: not updating public commit %s\n")
768 _(b"warning: not updating public commit %s\n")
752 % scmutil.formatchangeid(old)
769 % scmutil.formatchangeid(old)
753 )
770 )
754 continue
771 continue
755 parents = [
772 parents = [
756 mapping.get(old.p1().node(), (old.p1(),))[0],
773 mapping.get(old.p1().node(), (old.p1(),))[0],
757 mapping.get(old.p2().node(), (old.p2(),))[0],
774 mapping.get(old.p2().node(), (old.p2(),))[0],
758 ]
775 ]
759 new = context.metadataonlyctx(
776 new = context.metadataonlyctx(
760 repo,
777 repo,
761 old,
778 old,
762 parents=parents,
779 parents=parents,
763 text=newdesc,
780 text=newdesc,
764 user=old.user(),
781 user=old.user(),
765 date=old.date(),
782 date=old.date(),
766 extra=old.extra(),
783 extra=old.extra(),
767 )
784 )
768
785
769 newnode = new.commit()
786 newnode = new.commit()
770
787
771 mapping[old.node()] = [newnode]
788 mapping[old.node()] = [newnode]
772 # Update diff property
789 # Update diff property
773 # If it fails just warn and keep going, otherwise the DREV
790 # If it fails just warn and keep going, otherwise the DREV
774 # associations will be lost
791 # associations will be lost
775 try:
792 try:
776 writediffproperties(unfi[newnode], diffmap[old.node()])
793 writediffproperties(unfi[newnode], diffmap[old.node()])
777 except util.urlerr.urlerror:
794 except util.urlerr.urlerror:
778 ui.warnnoi18n(
795 ui.warnnoi18n(
779 b'Failed to update metadata for D%s\n' % drevid
796 b'Failed to update metadata for D%s\n' % drevid
780 )
797 )
781 # Remove local tags since it's no longer necessary
798 # Remove local tags since it's no longer necessary
782 tagname = b'D%d' % drevid
799 tagname = b'D%d' % drevid
783 if tagname in repo.tags():
800 if tagname in repo.tags():
784 tags.tag(
801 tags.tag(
785 repo,
802 repo,
786 tagname,
803 tagname,
787 nullid,
804 nullid,
788 message=None,
805 message=None,
789 user=None,
806 user=None,
790 date=None,
807 date=None,
791 local=True,
808 local=True,
792 )
809 )
793 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
810 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
794 if wnode in mapping:
811 if wnode in mapping:
795 unfi.setparents(mapping[wnode][0])
812 unfi.setparents(mapping[wnode][0])
796
813
797
814
798 # Map from "hg:meta" keys to header understood by "hg import". The order is
815 # Map from "hg:meta" keys to header understood by "hg import". The order is
799 # consistent with "hg export" output.
816 # consistent with "hg export" output.
800 _metanamemap = util.sortdict(
817 _metanamemap = util.sortdict(
801 [
818 [
802 (b'user', b'User'),
819 (b'user', b'User'),
803 (b'date', b'Date'),
820 (b'date', b'Date'),
804 (b'branch', b'Branch'),
821 (b'branch', b'Branch'),
805 (b'node', b'Node ID'),
822 (b'node', b'Node ID'),
806 (b'parent', b'Parent '),
823 (b'parent', b'Parent '),
807 ]
824 ]
808 )
825 )
809
826
810
827
811 def _confirmbeforesend(repo, revs, oldmap):
828 def _confirmbeforesend(repo, revs, oldmap):
812 url, token = readurltoken(repo.ui)
829 url, token = readurltoken(repo.ui)
813 ui = repo.ui
830 ui = repo.ui
814 for rev in revs:
831 for rev in revs:
815 ctx = repo[rev]
832 ctx = repo[rev]
816 desc = ctx.description().splitlines()[0]
833 desc = ctx.description().splitlines()[0]
817 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
834 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
818 if drevid:
835 if drevid:
819 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
836 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
820 else:
837 else:
821 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
838 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
822
839
823 ui.write(
840 ui.write(
824 _(b'%s - %s: %s\n')
841 _(b'%s - %s: %s\n')
825 % (
842 % (
826 drevdesc,
843 drevdesc,
827 ui.label(bytes(ctx), b'phabricator.node'),
844 ui.label(bytes(ctx), b'phabricator.node'),
828 ui.label(desc, b'phabricator.desc'),
845 ui.label(desc, b'phabricator.desc'),
829 )
846 )
830 )
847 )
831
848
832 if ui.promptchoice(
849 if ui.promptchoice(
833 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
850 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
834 ):
851 ):
835 return False
852 return False
836
853
837 return True
854 return True
838
855
839
856
840 _knownstatusnames = {
857 _knownstatusnames = {
841 b'accepted',
858 b'accepted',
842 b'needsreview',
859 b'needsreview',
843 b'needsrevision',
860 b'needsrevision',
844 b'closed',
861 b'closed',
845 b'abandoned',
862 b'abandoned',
846 }
863 }
847
864
848
865
849 def _getstatusname(drev):
866 def _getstatusname(drev):
850 """get normalized status name from a Differential Revision"""
867 """get normalized status name from a Differential Revision"""
851 return drev[b'statusName'].replace(b' ', b'').lower()
868 return drev[b'statusName'].replace(b' ', b'').lower()
852
869
853
870
854 # Small language to specify differential revisions. Support symbols: (), :X,
871 # Small language to specify differential revisions. Support symbols: (), :X,
855 # +, and -.
872 # +, and -.
856
873
857 _elements = {
874 _elements = {
858 # token-type: binding-strength, primary, prefix, infix, suffix
875 # token-type: binding-strength, primary, prefix, infix, suffix
859 b'(': (12, None, (b'group', 1, b')'), None, None),
876 b'(': (12, None, (b'group', 1, b')'), None, None),
860 b':': (8, None, (b'ancestors', 8), None, None),
877 b':': (8, None, (b'ancestors', 8), None, None),
861 b'&': (5, None, None, (b'and_', 5), None),
878 b'&': (5, None, None, (b'and_', 5), None),
862 b'+': (4, None, None, (b'add', 4), None),
879 b'+': (4, None, None, (b'add', 4), None),
863 b'-': (4, None, None, (b'sub', 4), None),
880 b'-': (4, None, None, (b'sub', 4), None),
864 b')': (0, None, None, None, None),
881 b')': (0, None, None, None, None),
865 b'symbol': (0, b'symbol', None, None, None),
882 b'symbol': (0, b'symbol', None, None, None),
866 b'end': (0, None, None, None, None),
883 b'end': (0, None, None, None, None),
867 }
884 }
868
885
869
886
870 def _tokenize(text):
887 def _tokenize(text):
871 view = memoryview(text) # zero-copy slice
888 view = memoryview(text) # zero-copy slice
872 special = b'():+-& '
889 special = b'():+-& '
873 pos = 0
890 pos = 0
874 length = len(text)
891 length = len(text)
875 while pos < length:
892 while pos < length:
876 symbol = b''.join(
893 symbol = b''.join(
877 itertools.takewhile(
894 itertools.takewhile(
878 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
895 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
879 )
896 )
880 )
897 )
881 if symbol:
898 if symbol:
882 yield (b'symbol', symbol, pos)
899 yield (b'symbol', symbol, pos)
883 pos += len(symbol)
900 pos += len(symbol)
884 else: # special char, ignore space
901 else: # special char, ignore space
885 if text[pos] != b' ':
902 if text[pos] != b' ':
886 yield (text[pos], None, pos)
903 yield (text[pos], None, pos)
887 pos += 1
904 pos += 1
888 yield (b'end', None, pos)
905 yield (b'end', None, pos)
889
906
890
907
891 def _parse(text):
908 def _parse(text):
892 tree, pos = parser.parser(_elements).parse(_tokenize(text))
909 tree, pos = parser.parser(_elements).parse(_tokenize(text))
893 if pos != len(text):
910 if pos != len(text):
894 raise error.ParseError(b'invalid token', pos)
911 raise error.ParseError(b'invalid token', pos)
895 return tree
912 return tree
896
913
897
914
898 def _parsedrev(symbol):
915 def _parsedrev(symbol):
899 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
916 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
900 if symbol.startswith(b'D') and symbol[1:].isdigit():
917 if symbol.startswith(b'D') and symbol[1:].isdigit():
901 return int(symbol[1:])
918 return int(symbol[1:])
902 if symbol.isdigit():
919 if symbol.isdigit():
903 return int(symbol)
920 return int(symbol)
904
921
905
922
906 def _prefetchdrevs(tree):
923 def _prefetchdrevs(tree):
907 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
924 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
908 drevs = set()
925 drevs = set()
909 ancestordrevs = set()
926 ancestordrevs = set()
910 op = tree[0]
927 op = tree[0]
911 if op == b'symbol':
928 if op == b'symbol':
912 r = _parsedrev(tree[1])
929 r = _parsedrev(tree[1])
913 if r:
930 if r:
914 drevs.add(r)
931 drevs.add(r)
915 elif op == b'ancestors':
932 elif op == b'ancestors':
916 r, a = _prefetchdrevs(tree[1])
933 r, a = _prefetchdrevs(tree[1])
917 drevs.update(r)
934 drevs.update(r)
918 ancestordrevs.update(r)
935 ancestordrevs.update(r)
919 ancestordrevs.update(a)
936 ancestordrevs.update(a)
920 else:
937 else:
921 for t in tree[1:]:
938 for t in tree[1:]:
922 r, a = _prefetchdrevs(t)
939 r, a = _prefetchdrevs(t)
923 drevs.update(r)
940 drevs.update(r)
924 ancestordrevs.update(a)
941 ancestordrevs.update(a)
925 return drevs, ancestordrevs
942 return drevs, ancestordrevs
926
943
927
944
928 def querydrev(repo, spec):
945 def querydrev(repo, spec):
929 """return a list of "Differential Revision" dicts
946 """return a list of "Differential Revision" dicts
930
947
931 spec is a string using a simple query language, see docstring in phabread
948 spec is a string using a simple query language, see docstring in phabread
932 for details.
949 for details.
933
950
934 A "Differential Revision dict" looks like:
951 A "Differential Revision dict" looks like:
935
952
936 {
953 {
937 "id": "2",
954 "id": "2",
938 "phid": "PHID-DREV-672qvysjcczopag46qty",
955 "phid": "PHID-DREV-672qvysjcczopag46qty",
939 "title": "example",
956 "title": "example",
940 "uri": "https://phab.example.com/D2",
957 "uri": "https://phab.example.com/D2",
941 "dateCreated": "1499181406",
958 "dateCreated": "1499181406",
942 "dateModified": "1499182103",
959 "dateModified": "1499182103",
943 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
960 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
944 "status": "0",
961 "status": "0",
945 "statusName": "Needs Review",
962 "statusName": "Needs Review",
946 "properties": [],
963 "properties": [],
947 "branch": null,
964 "branch": null,
948 "summary": "",
965 "summary": "",
949 "testPlan": "",
966 "testPlan": "",
950 "lineCount": "2",
967 "lineCount": "2",
951 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
968 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
952 "diffs": [
969 "diffs": [
953 "3",
970 "3",
954 "4",
971 "4",
955 ],
972 ],
956 "commits": [],
973 "commits": [],
957 "reviewers": [],
974 "reviewers": [],
958 "ccs": [],
975 "ccs": [],
959 "hashes": [],
976 "hashes": [],
960 "auxiliary": {
977 "auxiliary": {
961 "phabricator:projects": [],
978 "phabricator:projects": [],
962 "phabricator:depends-on": [
979 "phabricator:depends-on": [
963 "PHID-DREV-gbapp366kutjebt7agcd"
980 "PHID-DREV-gbapp366kutjebt7agcd"
964 ]
981 ]
965 },
982 },
966 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
983 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
967 "sourcePath": null
984 "sourcePath": null
968 }
985 }
969 """
986 """
970
987
971 def fetch(params):
988 def fetch(params):
972 """params -> single drev or None"""
989 """params -> single drev or None"""
973 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
990 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
974 if key in prefetched:
991 if key in prefetched:
975 return prefetched[key]
992 return prefetched[key]
976 drevs = callconduit(repo.ui, b'differential.query', params)
993 drevs = callconduit(repo.ui, b'differential.query', params)
977 # Fill prefetched with the result
994 # Fill prefetched with the result
978 for drev in drevs:
995 for drev in drevs:
979 prefetched[drev[b'phid']] = drev
996 prefetched[drev[b'phid']] = drev
980 prefetched[int(drev[b'id'])] = drev
997 prefetched[int(drev[b'id'])] = drev
981 if key not in prefetched:
998 if key not in prefetched:
982 raise error.Abort(
999 raise error.Abort(
983 _(b'cannot get Differential Revision %r') % params
1000 _(b'cannot get Differential Revision %r') % params
984 )
1001 )
985 return prefetched[key]
1002 return prefetched[key]
986
1003
987 def getstack(topdrevids):
1004 def getstack(topdrevids):
988 """given a top, get a stack from the bottom, [id] -> [id]"""
1005 """given a top, get a stack from the bottom, [id] -> [id]"""
989 visited = set()
1006 visited = set()
990 result = []
1007 result = []
991 queue = [{b'ids': [i]} for i in topdrevids]
1008 queue = [{b'ids': [i]} for i in topdrevids]
992 while queue:
1009 while queue:
993 params = queue.pop()
1010 params = queue.pop()
994 drev = fetch(params)
1011 drev = fetch(params)
995 if drev[b'id'] in visited:
1012 if drev[b'id'] in visited:
996 continue
1013 continue
997 visited.add(drev[b'id'])
1014 visited.add(drev[b'id'])
998 result.append(int(drev[b'id']))
1015 result.append(int(drev[b'id']))
999 auxiliary = drev.get(b'auxiliary', {})
1016 auxiliary = drev.get(b'auxiliary', {})
1000 depends = auxiliary.get(b'phabricator:depends-on', [])
1017 depends = auxiliary.get(b'phabricator:depends-on', [])
1001 for phid in depends:
1018 for phid in depends:
1002 queue.append({b'phids': [phid]})
1019 queue.append({b'phids': [phid]})
1003 result.reverse()
1020 result.reverse()
1004 return smartset.baseset(result)
1021 return smartset.baseset(result)
1005
1022
1006 # Initialize prefetch cache
1023 # Initialize prefetch cache
1007 prefetched = {} # {id or phid: drev}
1024 prefetched = {} # {id or phid: drev}
1008
1025
1009 tree = _parse(spec)
1026 tree = _parse(spec)
1010 drevs, ancestordrevs = _prefetchdrevs(tree)
1027 drevs, ancestordrevs = _prefetchdrevs(tree)
1011
1028
1012 # developer config: phabricator.batchsize
1029 # developer config: phabricator.batchsize
1013 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1030 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1014
1031
1015 # Prefetch Differential Revisions in batch
1032 # Prefetch Differential Revisions in batch
1016 tofetch = set(drevs)
1033 tofetch = set(drevs)
1017 for r in ancestordrevs:
1034 for r in ancestordrevs:
1018 tofetch.update(range(max(1, r - batchsize), r + 1))
1035 tofetch.update(range(max(1, r - batchsize), r + 1))
1019 if drevs:
1036 if drevs:
1020 fetch({b'ids': list(tofetch)})
1037 fetch({b'ids': list(tofetch)})
1021 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1038 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1022
1039
1023 # Walk through the tree, return smartsets
1040 # Walk through the tree, return smartsets
1024 def walk(tree):
1041 def walk(tree):
1025 op = tree[0]
1042 op = tree[0]
1026 if op == b'symbol':
1043 if op == b'symbol':
1027 drev = _parsedrev(tree[1])
1044 drev = _parsedrev(tree[1])
1028 if drev:
1045 if drev:
1029 return smartset.baseset([drev])
1046 return smartset.baseset([drev])
1030 elif tree[1] in _knownstatusnames:
1047 elif tree[1] in _knownstatusnames:
1031 drevs = [
1048 drevs = [
1032 r
1049 r
1033 for r in validids
1050 for r in validids
1034 if _getstatusname(prefetched[r]) == tree[1]
1051 if _getstatusname(prefetched[r]) == tree[1]
1035 ]
1052 ]
1036 return smartset.baseset(drevs)
1053 return smartset.baseset(drevs)
1037 else:
1054 else:
1038 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1055 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1039 elif op in {b'and_', b'add', b'sub'}:
1056 elif op in {b'and_', b'add', b'sub'}:
1040 assert len(tree) == 3
1057 assert len(tree) == 3
1041 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1058 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1042 elif op == b'group':
1059 elif op == b'group':
1043 return walk(tree[1])
1060 return walk(tree[1])
1044 elif op == b'ancestors':
1061 elif op == b'ancestors':
1045 return getstack(walk(tree[1]))
1062 return getstack(walk(tree[1]))
1046 else:
1063 else:
1047 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1064 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1048
1065
1049 return [prefetched[r] for r in walk(tree)]
1066 return [prefetched[r] for r in walk(tree)]
1050
1067
1051
1068
1052 def getdescfromdrev(drev):
1069 def getdescfromdrev(drev):
1053 """get description (commit message) from "Differential Revision"
1070 """get description (commit message) from "Differential Revision"
1054
1071
1055 This is similar to differential.getcommitmessage API. But we only care
1072 This is similar to differential.getcommitmessage API. But we only care
1056 about limited fields: title, summary, test plan, and URL.
1073 about limited fields: title, summary, test plan, and URL.
1057 """
1074 """
1058 title = drev[b'title']
1075 title = drev[b'title']
1059 summary = drev[b'summary'].rstrip()
1076 summary = drev[b'summary'].rstrip()
1060 testplan = drev[b'testPlan'].rstrip()
1077 testplan = drev[b'testPlan'].rstrip()
1061 if testplan:
1078 if testplan:
1062 testplan = b'Test Plan:\n%s' % testplan
1079 testplan = b'Test Plan:\n%s' % testplan
1063 uri = b'Differential Revision: %s' % drev[b'uri']
1080 uri = b'Differential Revision: %s' % drev[b'uri']
1064 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1081 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1065
1082
1066
1083
1067 def getdiffmeta(diff):
1084 def getdiffmeta(diff):
1068 """get commit metadata (date, node, user, p1) from a diff object
1085 """get commit metadata (date, node, user, p1) from a diff object
1069
1086
1070 The metadata could be "hg:meta", sent by phabsend, like:
1087 The metadata could be "hg:meta", sent by phabsend, like:
1071
1088
1072 "properties": {
1089 "properties": {
1073 "hg:meta": {
1090 "hg:meta": {
1074 "date": "1499571514 25200",
1091 "date": "1499571514 25200",
1075 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1092 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1076 "user": "Foo Bar <foo@example.com>",
1093 "user": "Foo Bar <foo@example.com>",
1077 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1094 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1078 }
1095 }
1079 }
1096 }
1080
1097
1081 Or converted from "local:commits", sent by "arc", like:
1098 Or converted from "local:commits", sent by "arc", like:
1082
1099
1083 "properties": {
1100 "properties": {
1084 "local:commits": {
1101 "local:commits": {
1085 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1102 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1086 "author": "Foo Bar",
1103 "author": "Foo Bar",
1087 "time": 1499546314,
1104 "time": 1499546314,
1088 "branch": "default",
1105 "branch": "default",
1089 "tag": "",
1106 "tag": "",
1090 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1107 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1091 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1108 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1092 "local": "1000",
1109 "local": "1000",
1093 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1110 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1094 "summary": "...",
1111 "summary": "...",
1095 "message": "...",
1112 "message": "...",
1096 "authorEmail": "foo@example.com"
1113 "authorEmail": "foo@example.com"
1097 }
1114 }
1098 }
1115 }
1099 }
1116 }
1100
1117
1101 Note: metadata extracted from "local:commits" will lose time zone
1118 Note: metadata extracted from "local:commits" will lose time zone
1102 information.
1119 information.
1103 """
1120 """
1104 props = diff.get(b'properties') or {}
1121 props = diff.get(b'properties') or {}
1105 meta = props.get(b'hg:meta')
1122 meta = props.get(b'hg:meta')
1106 if not meta:
1123 if not meta:
1107 if props.get(b'local:commits'):
1124 if props.get(b'local:commits'):
1108 commit = sorted(props[b'local:commits'].values())[0]
1125 commit = sorted(props[b'local:commits'].values())[0]
1109 meta = {}
1126 meta = {}
1110 if b'author' in commit and b'authorEmail' in commit:
1127 if b'author' in commit and b'authorEmail' in commit:
1111 meta[b'user'] = b'%s <%s>' % (
1128 meta[b'user'] = b'%s <%s>' % (
1112 commit[b'author'],
1129 commit[b'author'],
1113 commit[b'authorEmail'],
1130 commit[b'authorEmail'],
1114 )
1131 )
1115 if b'time' in commit:
1132 if b'time' in commit:
1116 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1133 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1117 if b'branch' in commit:
1134 if b'branch' in commit:
1118 meta[b'branch'] = commit[b'branch']
1135 meta[b'branch'] = commit[b'branch']
1119 node = commit.get(b'commit', commit.get(b'rev'))
1136 node = commit.get(b'commit', commit.get(b'rev'))
1120 if node:
1137 if node:
1121 meta[b'node'] = node
1138 meta[b'node'] = node
1122 if len(commit.get(b'parents', ())) >= 1:
1139 if len(commit.get(b'parents', ())) >= 1:
1123 meta[b'parent'] = commit[b'parents'][0]
1140 meta[b'parent'] = commit[b'parents'][0]
1124 else:
1141 else:
1125 meta = {}
1142 meta = {}
1126 if b'date' not in meta and b'dateCreated' in diff:
1143 if b'date' not in meta and b'dateCreated' in diff:
1127 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1144 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1128 if b'branch' not in meta and diff.get(b'branch'):
1145 if b'branch' not in meta and diff.get(b'branch'):
1129 meta[b'branch'] = diff[b'branch']
1146 meta[b'branch'] = diff[b'branch']
1130 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1147 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1131 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1148 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1132 return meta
1149 return meta
1133
1150
1134
1151
1135 def readpatch(repo, drevs, write):
1152 def readpatch(repo, drevs, write):
1136 """generate plain-text patch readable by 'hg import'
1153 """generate plain-text patch readable by 'hg import'
1137
1154
1138 write is usually ui.write. drevs is what "querydrev" returns, results of
1155 write is usually ui.write. drevs is what "querydrev" returns, results of
1139 "differential.query".
1156 "differential.query".
1140 """
1157 """
1141 # Prefetch hg:meta property for all diffs
1158 # Prefetch hg:meta property for all diffs
1142 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1159 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1143 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1160 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1144
1161
1145 # Generate patch for each drev
1162 # Generate patch for each drev
1146 for drev in drevs:
1163 for drev in drevs:
1147 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1164 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1148
1165
1149 diffid = max(int(v) for v in drev[b'diffs'])
1166 diffid = max(int(v) for v in drev[b'diffs'])
1150 body = callconduit(
1167 body = callconduit(
1151 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1168 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1152 )
1169 )
1153 desc = getdescfromdrev(drev)
1170 desc = getdescfromdrev(drev)
1154 header = b'# HG changeset patch\n'
1171 header = b'# HG changeset patch\n'
1155
1172
1156 # Try to preserve metadata from hg:meta property. Write hg patch
1173 # Try to preserve metadata from hg:meta property. Write hg patch
1157 # headers that can be read by the "import" command. See patchheadermap
1174 # headers that can be read by the "import" command. See patchheadermap
1158 # and extract in mercurial/patch.py for supported headers.
1175 # and extract in mercurial/patch.py for supported headers.
1159 meta = getdiffmeta(diffs[b'%d' % diffid])
1176 meta = getdiffmeta(diffs[b'%d' % diffid])
1160 for k in _metanamemap.keys():
1177 for k in _metanamemap.keys():
1161 if k in meta:
1178 if k in meta:
1162 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1179 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1163
1180
1164 content = b'%s%s\n%s' % (header, desc, body)
1181 content = b'%s%s\n%s' % (header, desc, body)
1165 write(content)
1182 write(content)
1166
1183
1167
1184
1168 @vcrcommand(
1185 @vcrcommand(
1169 b'phabread',
1186 b'phabread',
1170 [(b'', b'stack', False, _(b'read dependencies'))],
1187 [(b'', b'stack', False, _(b'read dependencies'))],
1171 _(b'DREVSPEC [OPTIONS]'),
1188 _(b'DREVSPEC [OPTIONS]'),
1172 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1189 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1173 )
1190 )
1174 def phabread(ui, repo, spec, **opts):
1191 def phabread(ui, repo, spec, **opts):
1175 """print patches from Phabricator suitable for importing
1192 """print patches from Phabricator suitable for importing
1176
1193
1177 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1194 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1178 the number ``123``. It could also have common operators like ``+``, ``-``,
1195 the number ``123``. It could also have common operators like ``+``, ``-``,
1179 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1196 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1180 select a stack.
1197 select a stack.
1181
1198
1182 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1199 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1183 could be used to filter patches by status. For performance reason, they
1200 could be used to filter patches by status. For performance reason, they
1184 only represent a subset of non-status selections and cannot be used alone.
1201 only represent a subset of non-status selections and cannot be used alone.
1185
1202
1186 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1203 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1187 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1204 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1188 stack up to D9.
1205 stack up to D9.
1189
1206
1190 If --stack is given, follow dependencies information and read all patches.
1207 If --stack is given, follow dependencies information and read all patches.
1191 It is equivalent to the ``:`` operator.
1208 It is equivalent to the ``:`` operator.
1192 """
1209 """
1193 opts = pycompat.byteskwargs(opts)
1210 opts = pycompat.byteskwargs(opts)
1194 if opts.get(b'stack'):
1211 if opts.get(b'stack'):
1195 spec = b':(%s)' % spec
1212 spec = b':(%s)' % spec
1196 drevs = querydrev(repo, spec)
1213 drevs = querydrev(repo, spec)
1197 readpatch(repo, drevs, ui.write)
1214 readpatch(repo, drevs, ui.write)
1198
1215
1199
1216
1200 @vcrcommand(
1217 @vcrcommand(
1201 b'phabupdate',
1218 b'phabupdate',
1202 [
1219 [
1203 (b'', b'accept', False, _(b'accept revisions')),
1220 (b'', b'accept', False, _(b'accept revisions')),
1204 (b'', b'reject', False, _(b'reject revisions')),
1221 (b'', b'reject', False, _(b'reject revisions')),
1205 (b'', b'abandon', False, _(b'abandon revisions')),
1222 (b'', b'abandon', False, _(b'abandon revisions')),
1206 (b'', b'reclaim', False, _(b'reclaim revisions')),
1223 (b'', b'reclaim', False, _(b'reclaim revisions')),
1207 (b'm', b'comment', b'', _(b'comment on the last revision')),
1224 (b'm', b'comment', b'', _(b'comment on the last revision')),
1208 ],
1225 ],
1209 _(b'DREVSPEC [OPTIONS]'),
1226 _(b'DREVSPEC [OPTIONS]'),
1210 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1227 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1211 )
1228 )
1212 def phabupdate(ui, repo, spec, **opts):
1229 def phabupdate(ui, repo, spec, **opts):
1213 """update Differential Revision in batch
1230 """update Differential Revision in batch
1214
1231
1215 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1232 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1216 """
1233 """
1217 opts = pycompat.byteskwargs(opts)
1234 opts = pycompat.byteskwargs(opts)
1218 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1235 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1219 if len(flags) > 1:
1236 if len(flags) > 1:
1220 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1237 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1221
1238
1222 actions = []
1239 actions = []
1223 for f in flags:
1240 for f in flags:
1224 actions.append({b'type': f, b'value': b'true'})
1241 actions.append({b'type': f, b'value': b'true'})
1225
1242
1226 drevs = querydrev(repo, spec)
1243 drevs = querydrev(repo, spec)
1227 for i, drev in enumerate(drevs):
1244 for i, drev in enumerate(drevs):
1228 if i + 1 == len(drevs) and opts.get(b'comment'):
1245 if i + 1 == len(drevs) and opts.get(b'comment'):
1229 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1246 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1230 if actions:
1247 if actions:
1231 params = {
1248 params = {
1232 b'objectIdentifier': drev[b'phid'],
1249 b'objectIdentifier': drev[b'phid'],
1233 b'transactions': actions,
1250 b'transactions': actions,
1234 }
1251 }
1235 callconduit(ui, b'differential.revision.edit', params)
1252 callconduit(ui, b'differential.revision.edit', params)
1236
1253
1237
1254
1238 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1255 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1239 def template_review(context, mapping):
1256 def template_review(context, mapping):
1240 """:phabreview: Object describing the review for this changeset.
1257 """:phabreview: Object describing the review for this changeset.
1241 Has attributes `url` and `id`.
1258 Has attributes `url` and `id`.
1242 """
1259 """
1243 ctx = context.resource(mapping, b'ctx')
1260 ctx = context.resource(mapping, b'ctx')
1244 m = _differentialrevisiondescre.search(ctx.description())
1261 m = _differentialrevisiondescre.search(ctx.description())
1245 if m:
1262 if m:
1246 return templateutil.hybriddict(
1263 return templateutil.hybriddict(
1247 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1264 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1248 )
1265 )
1249 else:
1266 else:
1250 tags = ctx.repo().nodetags(ctx.node())
1267 tags = ctx.repo().nodetags(ctx.node())
1251 for t in tags:
1268 for t in tags:
1252 if _differentialrevisiontagre.match(t):
1269 if _differentialrevisiontagre.match(t):
1253 url = ctx.repo().ui.config(b'phabricator', b'url')
1270 url = ctx.repo().ui.config(b'phabricator', b'url')
1254 if not url.endswith(b'/'):
1271 if not url.endswith(b'/'):
1255 url += b'/'
1272 url += b'/'
1256 url += t
1273 url += t
1257
1274
1258 return templateutil.hybriddict({b'url': url, b'id': t,})
1275 return templateutil.hybriddict({b'url': url, b'id': t,})
1259 return None
1276 return None
General Comments 0
You need to be logged in to leave comments. Login now