##// END OF EJS Templates
phabricator: use True primitive instead of b'true' for phabupdate actions...
Ian Moody -
r43659:e57bf37e stable draft
parent child Browse files
Show More
@@ -1,1651 +1,1651 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = util.urlreq.parseqs(r1.body)
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
148 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
149 for key in r1params:
150 if key not in r2params:
150 if key not in r2params:
151 return False
151 return False
152 value = r1params[key][0]
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = json.loads(value)
155 r1json = json.loads(value)
156 r2json = json.loads(r2params[key][0])
156 r2json = json.loads(r2params[key][0])
157 if r1json != r2json:
157 if r1json != r2json:
158 return False
158 return False
159 elif r2params[key][0] != value:
159 elif r2params[key][0] != value:
160 return False
160 return False
161 return True
161 return True
162
162
163 def sanitiserequest(request):
163 def sanitiserequest(request):
164 request.body = re.sub(
164 request.body = re.sub(
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 )
166 )
167 return request
167 return request
168
168
169 def sanitiseresponse(response):
169 def sanitiseresponse(response):
170 if r'set-cookie' in response[r'headers']:
170 if r'set-cookie' in response[r'headers']:
171 del response[r'headers'][r'set-cookie']
171 del response[r'headers'][r'set-cookie']
172 return response
172 return response
173
173
174 def decorate(fn):
174 def decorate(fn):
175 def inner(*args, **kwargs):
175 def inner(*args, **kwargs):
176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
177 if cassette:
177 if cassette:
178 import hgdemandimport
178 import hgdemandimport
179
179
180 with hgdemandimport.deactivated():
180 with hgdemandimport.deactivated():
181 import vcr as vcrmod
181 import vcr as vcrmod
182 import vcr.stubs as stubs
182 import vcr.stubs as stubs
183
183
184 vcr = vcrmod.VCR(
184 vcr = vcrmod.VCR(
185 serializer=r'json',
185 serializer=r'json',
186 before_record_request=sanitiserequest,
186 before_record_request=sanitiserequest,
187 before_record_response=sanitiseresponse,
187 before_record_response=sanitiseresponse,
188 custom_patches=[
188 custom_patches=[
189 (
189 (
190 urlmod,
190 urlmod,
191 r'httpconnection',
191 r'httpconnection',
192 stubs.VCRHTTPConnection,
192 stubs.VCRHTTPConnection,
193 ),
193 ),
194 (
194 (
195 urlmod,
195 urlmod,
196 r'httpsconnection',
196 r'httpsconnection',
197 stubs.VCRHTTPSConnection,
197 stubs.VCRHTTPSConnection,
198 ),
198 ),
199 ],
199 ],
200 )
200 )
201 vcr.register_matcher(r'hgmatcher', hgmatcher)
201 vcr.register_matcher(r'hgmatcher', hgmatcher)
202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
203 return fn(*args, **kwargs)
203 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
205
205
206 inner.__name__ = fn.__name__
206 inner.__name__ = fn.__name__
207 inner.__doc__ = fn.__doc__
207 inner.__doc__ = fn.__doc__
208 return command(
208 return command(
209 name,
209 name,
210 fullflags,
210 fullflags,
211 spec,
211 spec,
212 helpcategory=helpcategory,
212 helpcategory=helpcategory,
213 optionalrepo=optionalrepo,
213 optionalrepo=optionalrepo,
214 )(inner)
214 )(inner)
215
215
216 return decorate
216 return decorate
217
217
218
218
219 def urlencodenested(params):
219 def urlencodenested(params):
220 """like urlencode, but works with nested parameters.
220 """like urlencode, but works with nested parameters.
221
221
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 """
225 """
226 flatparams = util.sortdict()
226 flatparams = util.sortdict()
227
227
228 def process(prefix, obj):
228 def process(prefix, obj):
229 if isinstance(obj, bool):
229 if isinstance(obj, bool):
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 if items is None:
233 if items is None:
234 flatparams[prefix] = obj
234 flatparams[prefix] = obj
235 else:
235 else:
236 for k, v in items(obj):
236 for k, v in items(obj):
237 if prefix:
237 if prefix:
238 process(b'%s[%s]' % (prefix, k), v)
238 process(b'%s[%s]' % (prefix, k), v)
239 else:
239 else:
240 process(k, v)
240 process(k, v)
241
241
242 process(b'', params)
242 process(b'', params)
243 return util.urlreq.urlencode(flatparams)
243 return util.urlreq.urlencode(flatparams)
244
244
245
245
246 def readurltoken(ui):
246 def readurltoken(ui):
247 """return conduit url, token and make sure they exist
247 """return conduit url, token and make sure they exist
248
248
249 Currently read from [auth] config section. In the future, it might
249 Currently read from [auth] config section. In the future, it might
250 make sense to read from .arcconfig and .arcrc as well.
250 make sense to read from .arcconfig and .arcrc as well.
251 """
251 """
252 url = ui.config(b'phabricator', b'url')
252 url = ui.config(b'phabricator', b'url')
253 if not url:
253 if not url:
254 raise error.Abort(
254 raise error.Abort(
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 )
256 )
257
257
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 token = None
259 token = None
260
260
261 if res:
261 if res:
262 group, auth = res
262 group, auth = res
263
263
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
265
265
266 token = auth.get(b'phabtoken')
266 token = auth.get(b'phabtoken')
267
267
268 if not token:
268 if not token:
269 raise error.Abort(
269 raise error.Abort(
270 _(b'Can\'t find conduit token associated to %s') % (url,)
270 _(b'Can\'t find conduit token associated to %s') % (url,)
271 )
271 )
272
272
273 return url, token
273 return url, token
274
274
275
275
276 def callconduit(ui, name, params):
276 def callconduit(ui, name, params):
277 """call Conduit API, params is a dict. return json.loads result, or None"""
277 """call Conduit API, params is a dict. return json.loads result, or None"""
278 host, token = readurltoken(ui)
278 host, token = readurltoken(ui)
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 params = params.copy()
281 params = params.copy()
282 params[b'__conduit__'] = {
282 params[b'__conduit__'] = {
283 b'token': token,
283 b'token': token,
284 }
284 }
285 rawdata = {
285 rawdata = {
286 b'params': templatefilters.json(params),
286 b'params': templatefilters.json(params),
287 b'output': b'json',
287 b'output': b'json',
288 b'__conduit__': 1,
288 b'__conduit__': 1,
289 }
289 }
290 data = urlencodenested(rawdata)
290 data = urlencodenested(rawdata)
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 if curlcmd:
292 if curlcmd:
293 sin, sout = procutil.popen2(
293 sin, sout = procutil.popen2(
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 )
295 )
296 sin.write(data)
296 sin.write(data)
297 sin.close()
297 sin.close()
298 body = sout.read()
298 body = sout.read()
299 else:
299 else:
300 urlopener = urlmod.opener(ui, authinfo)
300 urlopener = urlmod.opener(ui, authinfo)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 with contextlib.closing(urlopener.open(request)) as rsp:
302 with contextlib.closing(urlopener.open(request)) as rsp:
303 body = rsp.read()
303 body = rsp.read()
304 ui.debug(b'Conduit Response: %s\n' % body)
304 ui.debug(b'Conduit Response: %s\n' % body)
305 parsed = pycompat.rapply(
305 parsed = pycompat.rapply(
306 lambda x: encoding.unitolocal(x)
306 lambda x: encoding.unitolocal(x)
307 if isinstance(x, pycompat.unicode)
307 if isinstance(x, pycompat.unicode)
308 else x,
308 else x,
309 # json.loads only accepts bytes from py3.6+
309 # json.loads only accepts bytes from py3.6+
310 json.loads(encoding.unifromlocal(body)),
310 json.loads(encoding.unifromlocal(body)),
311 )
311 )
312 if parsed.get(b'error_code'):
312 if parsed.get(b'error_code'):
313 msg = _(b'Conduit Error (%s): %s') % (
313 msg = _(b'Conduit Error (%s): %s') % (
314 parsed[b'error_code'],
314 parsed[b'error_code'],
315 parsed[b'error_info'],
315 parsed[b'error_info'],
316 )
316 )
317 raise error.Abort(msg)
317 raise error.Abort(msg)
318 return parsed[b'result']
318 return parsed[b'result']
319
319
320
320
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 def debugcallconduit(ui, repo, name):
322 def debugcallconduit(ui, repo, name):
323 """call Conduit API
323 """call Conduit API
324
324
325 Call parameters are read from stdin as a JSON blob. Result will be written
325 Call parameters are read from stdin as a JSON blob. Result will be written
326 to stdout as a JSON blob.
326 to stdout as a JSON blob.
327 """
327 """
328 # json.loads only accepts bytes from 3.6+
328 # json.loads only accepts bytes from 3.6+
329 rawparams = encoding.unifromlocal(ui.fin.read())
329 rawparams = encoding.unifromlocal(ui.fin.read())
330 # json.loads only returns unicode strings
330 # json.loads only returns unicode strings
331 params = pycompat.rapply(
331 params = pycompat.rapply(
332 lambda x: encoding.unitolocal(x)
332 lambda x: encoding.unitolocal(x)
333 if isinstance(x, pycompat.unicode)
333 if isinstance(x, pycompat.unicode)
334 else x,
334 else x,
335 json.loads(rawparams),
335 json.loads(rawparams),
336 )
336 )
337 # json.dumps only accepts unicode strings
337 # json.dumps only accepts unicode strings
338 result = pycompat.rapply(
338 result = pycompat.rapply(
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 callconduit(ui, name, params),
340 callconduit(ui, name, params),
341 )
341 )
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
344
344
345
345
346 def getrepophid(repo):
346 def getrepophid(repo):
347 """given callsign, return repository PHID or None"""
347 """given callsign, return repository PHID or None"""
348 # developer config: phabricator.repophid
348 # developer config: phabricator.repophid
349 repophid = repo.ui.config(b'phabricator', b'repophid')
349 repophid = repo.ui.config(b'phabricator', b'repophid')
350 if repophid:
350 if repophid:
351 return repophid
351 return repophid
352 callsign = repo.ui.config(b'phabricator', b'callsign')
352 callsign = repo.ui.config(b'phabricator', b'callsign')
353 if not callsign:
353 if not callsign:
354 return None
354 return None
355 query = callconduit(
355 query = callconduit(
356 repo.ui,
356 repo.ui,
357 b'diffusion.repository.search',
357 b'diffusion.repository.search',
358 {b'constraints': {b'callsigns': [callsign]}},
358 {b'constraints': {b'callsigns': [callsign]}},
359 )
359 )
360 if len(query[b'data']) == 0:
360 if len(query[b'data']) == 0:
361 return None
361 return None
362 repophid = query[b'data'][0][b'phid']
362 repophid = query[b'data'][0][b'phid']
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 return repophid
364 return repophid
365
365
366
366
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 _differentialrevisiondescre = re.compile(
368 _differentialrevisiondescre = re.compile(
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 )
370 )
371
371
372
372
373 def getoldnodedrevmap(repo, nodelist):
373 def getoldnodedrevmap(repo, nodelist):
374 """find previous nodes that has been sent to Phabricator
374 """find previous nodes that has been sent to Phabricator
375
375
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 for node in nodelist with known previous sent versions, or associated
377 for node in nodelist with known previous sent versions, or associated
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 be ``None``.
379 be ``None``.
380
380
381 Examines commit messages like "Differential Revision:" to get the
381 Examines commit messages like "Differential Revision:" to get the
382 association information.
382 association information.
383
383
384 If such commit message line is not found, examines all precursors and their
384 If such commit message line is not found, examines all precursors and their
385 tags. Tags with format like "D1234" are considered a match and the node
385 tags. Tags with format like "D1234" are considered a match and the node
386 with that tag, and the number after "D" (ex. 1234) will be returned.
386 with that tag, and the number after "D" (ex. 1234) will be returned.
387
387
388 The ``old node``, if not None, is guaranteed to be the last diff of
388 The ``old node``, if not None, is guaranteed to be the last diff of
389 corresponding Differential Revision, and exist in the repo.
389 corresponding Differential Revision, and exist in the repo.
390 """
390 """
391 unfi = repo.unfiltered()
391 unfi = repo.unfiltered()
392 nodemap = unfi.changelog.nodemap
392 nodemap = unfi.changelog.nodemap
393
393
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
396 for node in nodelist:
396 for node in nodelist:
397 ctx = unfi[node]
397 ctx = unfi[node]
398 # For tags like "D123", put them into "toconfirm" to verify later
398 # For tags like "D123", put them into "toconfirm" to verify later
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 for n in precnodes:
400 for n in precnodes:
401 if n in nodemap:
401 if n in nodemap:
402 for tag in unfi.nodetags(n):
402 for tag in unfi.nodetags(n):
403 m = _differentialrevisiontagre.match(tag)
403 m = _differentialrevisiontagre.match(tag)
404 if m:
404 if m:
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 continue
406 continue
407
407
408 # Check commit message
408 # Check commit message
409 m = _differentialrevisiondescre.search(ctx.description())
409 m = _differentialrevisiondescre.search(ctx.description())
410 if m:
410 if m:
411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
412
412
413 # Double check if tags are genuine by collecting all old nodes from
413 # Double check if tags are genuine by collecting all old nodes from
414 # Phabricator, and expect precursors overlap with it.
414 # Phabricator, and expect precursors overlap with it.
415 if toconfirm:
415 if toconfirm:
416 drevs = [drev for force, precs, drev in toconfirm.values()]
416 drevs = [drev for force, precs, drev in toconfirm.values()]
417 alldiffs = callconduit(
417 alldiffs = callconduit(
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
419 )
419 )
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
421 for newnode, (force, precset, drev) in toconfirm.items():
421 for newnode, (force, precset, drev) in toconfirm.items():
422 diffs = [
422 diffs = [
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
424 ]
424 ]
425
425
426 # "precursors" as known by Phabricator
426 # "precursors" as known by Phabricator
427 phprecset = set(getnode(d) for d in diffs)
427 phprecset = set(getnode(d) for d in diffs)
428
428
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
430 # and force is not set (when commit message says nothing)
430 # and force is not set (when commit message says nothing)
431 if not force and not bool(phprecset & precset):
431 if not force and not bool(phprecset & precset):
432 tagname = b'D%d' % drev
432 tagname = b'D%d' % drev
433 tags.tag(
433 tags.tag(
434 repo,
434 repo,
435 tagname,
435 tagname,
436 nullid,
436 nullid,
437 message=None,
437 message=None,
438 user=None,
438 user=None,
439 date=None,
439 date=None,
440 local=True,
440 local=True,
441 )
441 )
442 unfi.ui.warn(
442 unfi.ui.warn(
443 _(
443 _(
444 b'D%s: local tag removed - does not match '
444 b'D%s: local tag removed - does not match '
445 b'Differential history\n'
445 b'Differential history\n'
446 )
446 )
447 % drev
447 % drev
448 )
448 )
449 continue
449 continue
450
450
451 # Find the last node using Phabricator metadata, and make sure it
451 # Find the last node using Phabricator metadata, and make sure it
452 # exists in the repo
452 # exists in the repo
453 oldnode = lastdiff = None
453 oldnode = lastdiff = None
454 if diffs:
454 if diffs:
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
456 oldnode = getnode(lastdiff)
456 oldnode = getnode(lastdiff)
457 if oldnode and oldnode not in nodemap:
457 if oldnode and oldnode not in nodemap:
458 oldnode = None
458 oldnode = None
459
459
460 result[newnode] = (oldnode, lastdiff, drev)
460 result[newnode] = (oldnode, lastdiff, drev)
461
461
462 return result
462 return result
463
463
464
464
465 def getdiff(ctx, diffopts):
465 def getdiff(ctx, diffopts):
466 """plain-text diff without header (user, commit message, etc)"""
466 """plain-text diff without header (user, commit message, etc)"""
467 output = util.stringio()
467 output = util.stringio()
468 for chunk, _label in patch.diffui(
468 for chunk, _label in patch.diffui(
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
470 ):
470 ):
471 output.write(chunk)
471 output.write(chunk)
472 return output.getvalue()
472 return output.getvalue()
473
473
474
474
475 class DiffChangeType(object):
475 class DiffChangeType(object):
476 ADD = 1
476 ADD = 1
477 CHANGE = 2
477 CHANGE = 2
478 DELETE = 3
478 DELETE = 3
479 MOVE_AWAY = 4
479 MOVE_AWAY = 4
480 COPY_AWAY = 5
480 COPY_AWAY = 5
481 MOVE_HERE = 6
481 MOVE_HERE = 6
482 COPY_HERE = 7
482 COPY_HERE = 7
483 MULTICOPY = 8
483 MULTICOPY = 8
484
484
485
485
486 class DiffFileType(object):
486 class DiffFileType(object):
487 TEXT = 1
487 TEXT = 1
488 IMAGE = 2
488 IMAGE = 2
489 BINARY = 3
489 BINARY = 3
490
490
491
491
492 @attr.s
492 @attr.s
493 class phabhunk(dict):
493 class phabhunk(dict):
494 """Represents a Differential hunk, which is owned by a Differential change
494 """Represents a Differential hunk, which is owned by a Differential change
495 """
495 """
496
496
497 oldOffset = attr.ib(default=0) # camelcase-required
497 oldOffset = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
501 corpus = attr.ib(default='')
501 corpus = attr.ib(default='')
502 # These get added to the phabchange's equivalents
502 # These get added to the phabchange's equivalents
503 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
505
505
506
506
507 @attr.s
507 @attr.s
508 class phabchange(object):
508 class phabchange(object):
509 """Represents a Differential change, owns Differential hunks and owned by a
509 """Represents a Differential change, owns Differential hunks and owned by a
510 Differential diff. Each one represents one file in a diff.
510 Differential diff. Each one represents one file in a diff.
511 """
511 """
512
512
513 currentPath = attr.ib(default=None) # camelcase-required
513 currentPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
516 metadata = attr.ib(default=attr.Factory(dict))
516 metadata = attr.ib(default=attr.Factory(dict))
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
519 type = attr.ib(default=DiffChangeType.CHANGE)
519 type = attr.ib(default=DiffChangeType.CHANGE)
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
524 hunks = attr.ib(default=attr.Factory(list))
524 hunks = attr.ib(default=attr.Factory(list))
525
525
526 def copynewmetadatatoold(self):
526 def copynewmetadatatoold(self):
527 for key in list(self.metadata.keys()):
527 for key in list(self.metadata.keys()):
528 newkey = key.replace(b'new:', b'old:')
528 newkey = key.replace(b'new:', b'old:')
529 self.metadata[newkey] = self.metadata[key]
529 self.metadata[newkey] = self.metadata[key]
530
530
531 def addoldmode(self, value):
531 def addoldmode(self, value):
532 self.oldProperties[b'unix:filemode'] = value
532 self.oldProperties[b'unix:filemode'] = value
533
533
534 def addnewmode(self, value):
534 def addnewmode(self, value):
535 self.newProperties[b'unix:filemode'] = value
535 self.newProperties[b'unix:filemode'] = value
536
536
537 def addhunk(self, hunk):
537 def addhunk(self, hunk):
538 if not isinstance(hunk, phabhunk):
538 if not isinstance(hunk, phabhunk):
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
541 # It's useful to include these stats since the Phab web UI shows them,
541 # It's useful to include these stats since the Phab web UI shows them,
542 # and uses them to estimate how large a change a Revision is. Also used
542 # and uses them to estimate how large a change a Revision is. Also used
543 # in email subjects for the [+++--] bit.
543 # in email subjects for the [+++--] bit.
544 self.addLines += hunk.addLines
544 self.addLines += hunk.addLines
545 self.delLines += hunk.delLines
545 self.delLines += hunk.delLines
546
546
547
547
548 @attr.s
548 @attr.s
549 class phabdiff(object):
549 class phabdiff(object):
550 """Represents a Differential diff, owns Differential changes. Corresponds
550 """Represents a Differential diff, owns Differential changes. Corresponds
551 to a commit.
551 to a commit.
552 """
552 """
553
553
554 # Doesn't seem to be any reason to send this (output of uname -n)
554 # Doesn't seem to be any reason to send this (output of uname -n)
555 sourceMachine = attr.ib(default=b'') # camelcase-required
555 sourceMachine = attr.ib(default=b'') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
560 branch = attr.ib(default=b'default')
560 branch = attr.ib(default=b'default')
561 bookmark = attr.ib(default=None)
561 bookmark = attr.ib(default=None)
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
565 changes = attr.ib(default=attr.Factory(dict))
565 changes = attr.ib(default=attr.Factory(dict))
566 repositoryPHID = attr.ib(default=None) # camelcase-required
566 repositoryPHID = attr.ib(default=None) # camelcase-required
567
567
568 def addchange(self, change):
568 def addchange(self, change):
569 if not isinstance(change, phabchange):
569 if not isinstance(change, phabchange):
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
571 self.changes[change.currentPath] = pycompat.byteskwargs(
571 self.changes[change.currentPath] = pycompat.byteskwargs(
572 attr.asdict(change)
572 attr.asdict(change)
573 )
573 )
574
574
575
575
576 def maketext(pchange, ctx, fname):
576 def maketext(pchange, ctx, fname):
577 """populate the phabchange for a text file"""
577 """populate the phabchange for a text file"""
578 repo = ctx.repo()
578 repo = ctx.repo()
579 fmatcher = match.exact([fname])
579 fmatcher = match.exact([fname])
580 diffopts = mdiff.diffopts(git=True, context=32767)
580 diffopts = mdiff.diffopts(git=True, context=32767)
581 _pfctx, _fctx, header, fhunks = next(
581 _pfctx, _fctx, header, fhunks = next(
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
583 )
583 )
584
584
585 for fhunk in fhunks:
585 for fhunk in fhunks:
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
587 corpus = b''.join(lines[1:])
587 corpus = b''.join(lines[1:])
588 shunk = list(header)
588 shunk = list(header)
589 shunk.extend(lines)
589 shunk.extend(lines)
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
591 patch.diffstatdata(util.iterlines(shunk))
591 patch.diffstatdata(util.iterlines(shunk))
592 )
592 )
593 pchange.addhunk(
593 pchange.addhunk(
594 phabhunk(
594 phabhunk(
595 oldOffset,
595 oldOffset,
596 oldLength,
596 oldLength,
597 newOffset,
597 newOffset,
598 newLength,
598 newLength,
599 corpus,
599 corpus,
600 addLines,
600 addLines,
601 delLines,
601 delLines,
602 )
602 )
603 )
603 )
604
604
605
605
606 def uploadchunks(fctx, fphid):
606 def uploadchunks(fctx, fphid):
607 """upload large binary files as separate chunks.
607 """upload large binary files as separate chunks.
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
609 """
609 """
610 ui = fctx.repo().ui
610 ui = fctx.repo().ui
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
612 progress = ui.makeprogress(
612 progress = ui.makeprogress(
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
614 )
614 )
615 for chunk in chunks:
615 for chunk in chunks:
616 progress.increment()
616 progress.increment()
617 if chunk[b'complete']:
617 if chunk[b'complete']:
618 continue
618 continue
619 bstart = int(chunk[b'byteStart'])
619 bstart = int(chunk[b'byteStart'])
620 bend = int(chunk[b'byteEnd'])
620 bend = int(chunk[b'byteEnd'])
621 callconduit(
621 callconduit(
622 ui,
622 ui,
623 b'file.uploadchunk',
623 b'file.uploadchunk',
624 {
624 {
625 b'filePHID': fphid,
625 b'filePHID': fphid,
626 b'byteStart': bstart,
626 b'byteStart': bstart,
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
628 b'dataEncoding': b'base64',
628 b'dataEncoding': b'base64',
629 },
629 },
630 )
630 )
631 progress.complete()
631 progress.complete()
632
632
633
633
634 def uploadfile(fctx):
634 def uploadfile(fctx):
635 """upload binary files to Phabricator"""
635 """upload binary files to Phabricator"""
636 repo = fctx.repo()
636 repo = fctx.repo()
637 ui = repo.ui
637 ui = repo.ui
638 fname = fctx.path()
638 fname = fctx.path()
639 size = fctx.size()
639 size = fctx.size()
640 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
640 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
641
641
642 # an allocate call is required first to see if an upload is even required
642 # an allocate call is required first to see if an upload is even required
643 # (Phab might already have it) and to determine if chunking is needed
643 # (Phab might already have it) and to determine if chunking is needed
644 allocateparams = {
644 allocateparams = {
645 b'name': fname,
645 b'name': fname,
646 b'contentLength': size,
646 b'contentLength': size,
647 b'contentHash': fhash,
647 b'contentHash': fhash,
648 }
648 }
649 filealloc = callconduit(ui, b'file.allocate', allocateparams)
649 filealloc = callconduit(ui, b'file.allocate', allocateparams)
650 fphid = filealloc[b'filePHID']
650 fphid = filealloc[b'filePHID']
651
651
652 if filealloc[b'upload']:
652 if filealloc[b'upload']:
653 ui.write(_(b'uploading %s\n') % bytes(fctx))
653 ui.write(_(b'uploading %s\n') % bytes(fctx))
654 if not fphid:
654 if not fphid:
655 uploadparams = {
655 uploadparams = {
656 b'name': fname,
656 b'name': fname,
657 b'data_base64': base64.b64encode(fctx.data()),
657 b'data_base64': base64.b64encode(fctx.data()),
658 }
658 }
659 fphid = callconduit(ui, b'file.upload', uploadparams)
659 fphid = callconduit(ui, b'file.upload', uploadparams)
660 else:
660 else:
661 uploadchunks(fctx, fphid)
661 uploadchunks(fctx, fphid)
662 else:
662 else:
663 ui.debug(b'server already has %s\n' % bytes(fctx))
663 ui.debug(b'server already has %s\n' % bytes(fctx))
664
664
665 if not fphid:
665 if not fphid:
666 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
666 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
667
667
668 return fphid
668 return fphid
669
669
670
670
671 def addoldbinary(pchange, fctx, originalfname):
671 def addoldbinary(pchange, fctx, originalfname):
672 """add the metadata for the previous version of a binary file to the
672 """add the metadata for the previous version of a binary file to the
673 phabchange for the new version
673 phabchange for the new version
674 """
674 """
675 oldfctx = fctx.p1()[originalfname]
675 oldfctx = fctx.p1()[originalfname]
676 if fctx.cmp(oldfctx):
676 if fctx.cmp(oldfctx):
677 # Files differ, add the old one
677 # Files differ, add the old one
678 pchange.metadata[b'old:file:size'] = oldfctx.size()
678 pchange.metadata[b'old:file:size'] = oldfctx.size()
679 mimeguess, _enc = mimetypes.guess_type(
679 mimeguess, _enc = mimetypes.guess_type(
680 encoding.unifromlocal(oldfctx.path())
680 encoding.unifromlocal(oldfctx.path())
681 )
681 )
682 if mimeguess:
682 if mimeguess:
683 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
683 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
684 mimeguess
684 mimeguess
685 )
685 )
686 fphid = uploadfile(oldfctx)
686 fphid = uploadfile(oldfctx)
687 pchange.metadata[b'old:binary-phid'] = fphid
687 pchange.metadata[b'old:binary-phid'] = fphid
688 else:
688 else:
689 # If it's left as IMAGE/BINARY web UI might try to display it
689 # If it's left as IMAGE/BINARY web UI might try to display it
690 pchange.fileType = DiffFileType.TEXT
690 pchange.fileType = DiffFileType.TEXT
691 pchange.copynewmetadatatoold()
691 pchange.copynewmetadatatoold()
692
692
693
693
694 def makebinary(pchange, fctx):
694 def makebinary(pchange, fctx):
695 """populate the phabchange for a binary file"""
695 """populate the phabchange for a binary file"""
696 pchange.fileType = DiffFileType.BINARY
696 pchange.fileType = DiffFileType.BINARY
697 fphid = uploadfile(fctx)
697 fphid = uploadfile(fctx)
698 pchange.metadata[b'new:binary-phid'] = fphid
698 pchange.metadata[b'new:binary-phid'] = fphid
699 pchange.metadata[b'new:file:size'] = fctx.size()
699 pchange.metadata[b'new:file:size'] = fctx.size()
700 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
700 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
701 if mimeguess:
701 if mimeguess:
702 mimeguess = pycompat.bytestr(mimeguess)
702 mimeguess = pycompat.bytestr(mimeguess)
703 pchange.metadata[b'new:file:mime-type'] = mimeguess
703 pchange.metadata[b'new:file:mime-type'] = mimeguess
704 if mimeguess.startswith(b'image/'):
704 if mimeguess.startswith(b'image/'):
705 pchange.fileType = DiffFileType.IMAGE
705 pchange.fileType = DiffFileType.IMAGE
706
706
707
707
708 # Copied from mercurial/patch.py
708 # Copied from mercurial/patch.py
709 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
709 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
710
710
711
711
712 def notutf8(fctx):
712 def notutf8(fctx):
713 """detect non-UTF-8 text files since Phabricator requires them to be marked
713 """detect non-UTF-8 text files since Phabricator requires them to be marked
714 as binary
714 as binary
715 """
715 """
716 try:
716 try:
717 fctx.data().decode('utf-8')
717 fctx.data().decode('utf-8')
718 if fctx.parents():
718 if fctx.parents():
719 fctx.p1().data().decode('utf-8')
719 fctx.p1().data().decode('utf-8')
720 return False
720 return False
721 except UnicodeDecodeError:
721 except UnicodeDecodeError:
722 fctx.repo().ui.write(
722 fctx.repo().ui.write(
723 _(b'file %s detected as non-UTF-8, marked as binary\n')
723 _(b'file %s detected as non-UTF-8, marked as binary\n')
724 % fctx.path()
724 % fctx.path()
725 )
725 )
726 return True
726 return True
727
727
728
728
729 def addremoved(pdiff, ctx, removed):
729 def addremoved(pdiff, ctx, removed):
730 """add removed files to the phabdiff. Shouldn't include moves"""
730 """add removed files to the phabdiff. Shouldn't include moves"""
731 for fname in removed:
731 for fname in removed:
732 pchange = phabchange(
732 pchange = phabchange(
733 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
733 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
734 )
734 )
735 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
735 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
736 fctx = ctx.p1()[fname]
736 fctx = ctx.p1()[fname]
737 if not (fctx.isbinary() or notutf8(fctx)):
737 if not (fctx.isbinary() or notutf8(fctx)):
738 maketext(pchange, ctx, fname)
738 maketext(pchange, ctx, fname)
739
739
740 pdiff.addchange(pchange)
740 pdiff.addchange(pchange)
741
741
742
742
743 def addmodified(pdiff, ctx, modified):
743 def addmodified(pdiff, ctx, modified):
744 """add modified files to the phabdiff"""
744 """add modified files to the phabdiff"""
745 for fname in modified:
745 for fname in modified:
746 fctx = ctx[fname]
746 fctx = ctx[fname]
747 pchange = phabchange(currentPath=fname, oldPath=fname)
747 pchange = phabchange(currentPath=fname, oldPath=fname)
748 filemode = gitmode[ctx[fname].flags()]
748 filemode = gitmode[ctx[fname].flags()]
749 originalmode = gitmode[ctx.p1()[fname].flags()]
749 originalmode = gitmode[ctx.p1()[fname].flags()]
750 if filemode != originalmode:
750 if filemode != originalmode:
751 pchange.addoldmode(originalmode)
751 pchange.addoldmode(originalmode)
752 pchange.addnewmode(filemode)
752 pchange.addnewmode(filemode)
753
753
754 if fctx.isbinary() or notutf8(fctx):
754 if fctx.isbinary() or notutf8(fctx):
755 makebinary(pchange, fctx)
755 makebinary(pchange, fctx)
756 addoldbinary(pchange, fctx, fname)
756 addoldbinary(pchange, fctx, fname)
757 else:
757 else:
758 maketext(pchange, ctx, fname)
758 maketext(pchange, ctx, fname)
759
759
760 pdiff.addchange(pchange)
760 pdiff.addchange(pchange)
761
761
762
762
763 def addadded(pdiff, ctx, added, removed):
763 def addadded(pdiff, ctx, added, removed):
764 """add file adds to the phabdiff, both new files and copies/moves"""
764 """add file adds to the phabdiff, both new files and copies/moves"""
765 # Keep track of files that've been recorded as moved/copied, so if there are
765 # Keep track of files that've been recorded as moved/copied, so if there are
766 # additional copies we can mark them (moves get removed from removed)
766 # additional copies we can mark them (moves get removed from removed)
767 copiedchanges = {}
767 copiedchanges = {}
768 movedchanges = {}
768 movedchanges = {}
769 for fname in added:
769 for fname in added:
770 fctx = ctx[fname]
770 fctx = ctx[fname]
771 pchange = phabchange(currentPath=fname)
771 pchange = phabchange(currentPath=fname)
772
772
773 filemode = gitmode[ctx[fname].flags()]
773 filemode = gitmode[ctx[fname].flags()]
774 renamed = fctx.renamed()
774 renamed = fctx.renamed()
775
775
776 if renamed:
776 if renamed:
777 originalfname = renamed[0]
777 originalfname = renamed[0]
778 originalmode = gitmode[ctx.p1()[originalfname].flags()]
778 originalmode = gitmode[ctx.p1()[originalfname].flags()]
779 pchange.oldPath = originalfname
779 pchange.oldPath = originalfname
780
780
781 if originalfname in removed:
781 if originalfname in removed:
782 origpchange = phabchange(
782 origpchange = phabchange(
783 currentPath=originalfname,
783 currentPath=originalfname,
784 oldPath=originalfname,
784 oldPath=originalfname,
785 type=DiffChangeType.MOVE_AWAY,
785 type=DiffChangeType.MOVE_AWAY,
786 awayPaths=[fname],
786 awayPaths=[fname],
787 )
787 )
788 movedchanges[originalfname] = origpchange
788 movedchanges[originalfname] = origpchange
789 removed.remove(originalfname)
789 removed.remove(originalfname)
790 pchange.type = DiffChangeType.MOVE_HERE
790 pchange.type = DiffChangeType.MOVE_HERE
791 elif originalfname in movedchanges:
791 elif originalfname in movedchanges:
792 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
792 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
793 movedchanges[originalfname].awayPaths.append(fname)
793 movedchanges[originalfname].awayPaths.append(fname)
794 pchange.type = DiffChangeType.COPY_HERE
794 pchange.type = DiffChangeType.COPY_HERE
795 else: # pure copy
795 else: # pure copy
796 if originalfname not in copiedchanges:
796 if originalfname not in copiedchanges:
797 origpchange = phabchange(
797 origpchange = phabchange(
798 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
798 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
799 )
799 )
800 copiedchanges[originalfname] = origpchange
800 copiedchanges[originalfname] = origpchange
801 else:
801 else:
802 origpchange = copiedchanges[originalfname]
802 origpchange = copiedchanges[originalfname]
803 origpchange.awayPaths.append(fname)
803 origpchange.awayPaths.append(fname)
804 pchange.type = DiffChangeType.COPY_HERE
804 pchange.type = DiffChangeType.COPY_HERE
805
805
806 if filemode != originalmode:
806 if filemode != originalmode:
807 pchange.addoldmode(originalmode)
807 pchange.addoldmode(originalmode)
808 pchange.addnewmode(filemode)
808 pchange.addnewmode(filemode)
809 else: # Brand-new file
809 else: # Brand-new file
810 pchange.addnewmode(gitmode[fctx.flags()])
810 pchange.addnewmode(gitmode[fctx.flags()])
811 pchange.type = DiffChangeType.ADD
811 pchange.type = DiffChangeType.ADD
812
812
813 if fctx.isbinary() or notutf8(fctx):
813 if fctx.isbinary() or notutf8(fctx):
814 makebinary(pchange, fctx)
814 makebinary(pchange, fctx)
815 if renamed:
815 if renamed:
816 addoldbinary(pchange, fctx, originalfname)
816 addoldbinary(pchange, fctx, originalfname)
817 else:
817 else:
818 maketext(pchange, ctx, fname)
818 maketext(pchange, ctx, fname)
819
819
820 pdiff.addchange(pchange)
820 pdiff.addchange(pchange)
821
821
822 for _path, copiedchange in copiedchanges.items():
822 for _path, copiedchange in copiedchanges.items():
823 pdiff.addchange(copiedchange)
823 pdiff.addchange(copiedchange)
824 for _path, movedchange in movedchanges.items():
824 for _path, movedchange in movedchanges.items():
825 pdiff.addchange(movedchange)
825 pdiff.addchange(movedchange)
826
826
827
827
828 def creatediff(ctx):
828 def creatediff(ctx):
829 """create a Differential Diff"""
829 """create a Differential Diff"""
830 repo = ctx.repo()
830 repo = ctx.repo()
831 repophid = getrepophid(repo)
831 repophid = getrepophid(repo)
832 # Create a "Differential Diff" via "differential.creatediff" API
832 # Create a "Differential Diff" via "differential.creatediff" API
833 pdiff = phabdiff(
833 pdiff = phabdiff(
834 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
834 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
835 branch=b'%s' % ctx.branch(),
835 branch=b'%s' % ctx.branch(),
836 )
836 )
837 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
837 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
838 # addadded will remove moved files from removed, so addremoved won't get
838 # addadded will remove moved files from removed, so addremoved won't get
839 # them
839 # them
840 addadded(pdiff, ctx, added, removed)
840 addadded(pdiff, ctx, added, removed)
841 addmodified(pdiff, ctx, modified)
841 addmodified(pdiff, ctx, modified)
842 addremoved(pdiff, ctx, removed)
842 addremoved(pdiff, ctx, removed)
843 if repophid:
843 if repophid:
844 pdiff.repositoryPHID = repophid
844 pdiff.repositoryPHID = repophid
845 diff = callconduit(
845 diff = callconduit(
846 repo.ui,
846 repo.ui,
847 b'differential.creatediff',
847 b'differential.creatediff',
848 pycompat.byteskwargs(attr.asdict(pdiff)),
848 pycompat.byteskwargs(attr.asdict(pdiff)),
849 )
849 )
850 if not diff:
850 if not diff:
851 raise error.Abort(_(b'cannot create diff for %s') % ctx)
851 raise error.Abort(_(b'cannot create diff for %s') % ctx)
852 return diff
852 return diff
853
853
854
854
855 def writediffproperties(ctx, diff):
855 def writediffproperties(ctx, diff):
856 """write metadata to diff so patches could be applied losslessly"""
856 """write metadata to diff so patches could be applied losslessly"""
857 # creatediff returns with a diffid but query returns with an id
857 # creatediff returns with a diffid but query returns with an id
858 diffid = diff.get(b'diffid', diff.get(b'id'))
858 diffid = diff.get(b'diffid', diff.get(b'id'))
859 params = {
859 params = {
860 b'diff_id': diffid,
860 b'diff_id': diffid,
861 b'name': b'hg:meta',
861 b'name': b'hg:meta',
862 b'data': templatefilters.json(
862 b'data': templatefilters.json(
863 {
863 {
864 b'user': ctx.user(),
864 b'user': ctx.user(),
865 b'date': b'%d %d' % ctx.date(),
865 b'date': b'%d %d' % ctx.date(),
866 b'branch': ctx.branch(),
866 b'branch': ctx.branch(),
867 b'node': ctx.hex(),
867 b'node': ctx.hex(),
868 b'parent': ctx.p1().hex(),
868 b'parent': ctx.p1().hex(),
869 }
869 }
870 ),
870 ),
871 }
871 }
872 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
872 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
873
873
874 params = {
874 params = {
875 b'diff_id': diffid,
875 b'diff_id': diffid,
876 b'name': b'local:commits',
876 b'name': b'local:commits',
877 b'data': templatefilters.json(
877 b'data': templatefilters.json(
878 {
878 {
879 ctx.hex(): {
879 ctx.hex(): {
880 b'author': stringutil.person(ctx.user()),
880 b'author': stringutil.person(ctx.user()),
881 b'authorEmail': stringutil.email(ctx.user()),
881 b'authorEmail': stringutil.email(ctx.user()),
882 b'time': int(ctx.date()[0]),
882 b'time': int(ctx.date()[0]),
883 b'commit': ctx.hex(),
883 b'commit': ctx.hex(),
884 b'parents': [ctx.p1().hex()],
884 b'parents': [ctx.p1().hex()],
885 b'branch': ctx.branch(),
885 b'branch': ctx.branch(),
886 },
886 },
887 }
887 }
888 ),
888 ),
889 }
889 }
890 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
890 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
891
891
892
892
893 def createdifferentialrevision(
893 def createdifferentialrevision(
894 ctx,
894 ctx,
895 revid=None,
895 revid=None,
896 parentrevphid=None,
896 parentrevphid=None,
897 oldnode=None,
897 oldnode=None,
898 olddiff=None,
898 olddiff=None,
899 actions=None,
899 actions=None,
900 comment=None,
900 comment=None,
901 ):
901 ):
902 """create or update a Differential Revision
902 """create or update a Differential Revision
903
903
904 If revid is None, create a new Differential Revision, otherwise update
904 If revid is None, create a new Differential Revision, otherwise update
905 revid. If parentrevphid is not None, set it as a dependency.
905 revid. If parentrevphid is not None, set it as a dependency.
906
906
907 If oldnode is not None, check if the patch content (without commit message
907 If oldnode is not None, check if the patch content (without commit message
908 and metadata) has changed before creating another diff.
908 and metadata) has changed before creating another diff.
909
909
910 If actions is not None, they will be appended to the transaction.
910 If actions is not None, they will be appended to the transaction.
911 """
911 """
912 repo = ctx.repo()
912 repo = ctx.repo()
913 if oldnode:
913 if oldnode:
914 diffopts = mdiff.diffopts(git=True, context=32767)
914 diffopts = mdiff.diffopts(git=True, context=32767)
915 oldctx = repo.unfiltered()[oldnode]
915 oldctx = repo.unfiltered()[oldnode]
916 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
916 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
917 else:
917 else:
918 neednewdiff = True
918 neednewdiff = True
919
919
920 transactions = []
920 transactions = []
921 if neednewdiff:
921 if neednewdiff:
922 diff = creatediff(ctx)
922 diff = creatediff(ctx)
923 transactions.append({b'type': b'update', b'value': diff[b'phid']})
923 transactions.append({b'type': b'update', b'value': diff[b'phid']})
924 if comment:
924 if comment:
925 transactions.append({b'type': b'comment', b'value': comment})
925 transactions.append({b'type': b'comment', b'value': comment})
926 else:
926 else:
927 # Even if we don't need to upload a new diff because the patch content
927 # Even if we don't need to upload a new diff because the patch content
928 # does not change. We might still need to update its metadata so
928 # does not change. We might still need to update its metadata so
929 # pushers could know the correct node metadata.
929 # pushers could know the correct node metadata.
930 assert olddiff
930 assert olddiff
931 diff = olddiff
931 diff = olddiff
932 writediffproperties(ctx, diff)
932 writediffproperties(ctx, diff)
933
933
934 # Set the parent Revision every time, so commit re-ordering is picked-up
934 # Set the parent Revision every time, so commit re-ordering is picked-up
935 if parentrevphid:
935 if parentrevphid:
936 transactions.append(
936 transactions.append(
937 {b'type': b'parents.set', b'value': [parentrevphid]}
937 {b'type': b'parents.set', b'value': [parentrevphid]}
938 )
938 )
939
939
940 if actions:
940 if actions:
941 transactions += actions
941 transactions += actions
942
942
943 # Parse commit message and update related fields.
943 # Parse commit message and update related fields.
944 desc = ctx.description()
944 desc = ctx.description()
945 info = callconduit(
945 info = callconduit(
946 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
946 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
947 )
947 )
948 for k, v in info[b'fields'].items():
948 for k, v in info[b'fields'].items():
949 if k in [b'title', b'summary', b'testPlan']:
949 if k in [b'title', b'summary', b'testPlan']:
950 transactions.append({b'type': k, b'value': v})
950 transactions.append({b'type': k, b'value': v})
951
951
952 params = {b'transactions': transactions}
952 params = {b'transactions': transactions}
953 if revid is not None:
953 if revid is not None:
954 # Update an existing Differential Revision
954 # Update an existing Differential Revision
955 params[b'objectIdentifier'] = revid
955 params[b'objectIdentifier'] = revid
956
956
957 revision = callconduit(repo.ui, b'differential.revision.edit', params)
957 revision = callconduit(repo.ui, b'differential.revision.edit', params)
958 if not revision:
958 if not revision:
959 raise error.Abort(_(b'cannot create revision for %s') % ctx)
959 raise error.Abort(_(b'cannot create revision for %s') % ctx)
960
960
961 return revision, diff
961 return revision, diff
962
962
963
963
964 def userphids(repo, names):
964 def userphids(repo, names):
965 """convert user names to PHIDs"""
965 """convert user names to PHIDs"""
966 names = [name.lower() for name in names]
966 names = [name.lower() for name in names]
967 query = {b'constraints': {b'usernames': names}}
967 query = {b'constraints': {b'usernames': names}}
968 result = callconduit(repo.ui, b'user.search', query)
968 result = callconduit(repo.ui, b'user.search', query)
969 # username not found is not an error of the API. So check if we have missed
969 # username not found is not an error of the API. So check if we have missed
970 # some names here.
970 # some names here.
971 data = result[b'data']
971 data = result[b'data']
972 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
972 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
973 unresolved = set(names) - resolved
973 unresolved = set(names) - resolved
974 if unresolved:
974 if unresolved:
975 raise error.Abort(
975 raise error.Abort(
976 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
976 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
977 )
977 )
978 return [entry[b'phid'] for entry in data]
978 return [entry[b'phid'] for entry in data]
979
979
980
980
981 @vcrcommand(
981 @vcrcommand(
982 b'phabsend',
982 b'phabsend',
983 [
983 [
984 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
984 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
985 (b'', b'amend', True, _(b'update commit messages')),
985 (b'', b'amend', True, _(b'update commit messages')),
986 (b'', b'reviewer', [], _(b'specify reviewers')),
986 (b'', b'reviewer', [], _(b'specify reviewers')),
987 (b'', b'blocker', [], _(b'specify blocking reviewers')),
987 (b'', b'blocker', [], _(b'specify blocking reviewers')),
988 (
988 (
989 b'm',
989 b'm',
990 b'comment',
990 b'comment',
991 b'',
991 b'',
992 _(b'add a comment to Revisions with new/updated Diffs'),
992 _(b'add a comment to Revisions with new/updated Diffs'),
993 ),
993 ),
994 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
994 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
995 ],
995 ],
996 _(b'REV [OPTIONS]'),
996 _(b'REV [OPTIONS]'),
997 helpcategory=command.CATEGORY_IMPORT_EXPORT,
997 helpcategory=command.CATEGORY_IMPORT_EXPORT,
998 )
998 )
999 def phabsend(ui, repo, *revs, **opts):
999 def phabsend(ui, repo, *revs, **opts):
1000 """upload changesets to Phabricator
1000 """upload changesets to Phabricator
1001
1001
1002 If there are multiple revisions specified, they will be send as a stack
1002 If there are multiple revisions specified, they will be send as a stack
1003 with a linear dependencies relationship using the order specified by the
1003 with a linear dependencies relationship using the order specified by the
1004 revset.
1004 revset.
1005
1005
1006 For the first time uploading changesets, local tags will be created to
1006 For the first time uploading changesets, local tags will be created to
1007 maintain the association. After the first time, phabsend will check
1007 maintain the association. After the first time, phabsend will check
1008 obsstore and tags information so it can figure out whether to update an
1008 obsstore and tags information so it can figure out whether to update an
1009 existing Differential Revision, or create a new one.
1009 existing Differential Revision, or create a new one.
1010
1010
1011 If --amend is set, update commit messages so they have the
1011 If --amend is set, update commit messages so they have the
1012 ``Differential Revision`` URL, remove related tags. This is similar to what
1012 ``Differential Revision`` URL, remove related tags. This is similar to what
1013 arcanist will do, and is more desired in author-push workflows. Otherwise,
1013 arcanist will do, and is more desired in author-push workflows. Otherwise,
1014 use local tags to record the ``Differential Revision`` association.
1014 use local tags to record the ``Differential Revision`` association.
1015
1015
1016 The --confirm option lets you confirm changesets before sending them. You
1016 The --confirm option lets you confirm changesets before sending them. You
1017 can also add following to your configuration file to make it default
1017 can also add following to your configuration file to make it default
1018 behaviour::
1018 behaviour::
1019
1019
1020 [phabsend]
1020 [phabsend]
1021 confirm = true
1021 confirm = true
1022
1022
1023 phabsend will check obsstore and the above association to decide whether to
1023 phabsend will check obsstore and the above association to decide whether to
1024 update an existing Differential Revision, or create a new one.
1024 update an existing Differential Revision, or create a new one.
1025 """
1025 """
1026 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1027 revs = list(revs) + opts.get(b'rev', [])
1027 revs = list(revs) + opts.get(b'rev', [])
1028 revs = scmutil.revrange(repo, revs)
1028 revs = scmutil.revrange(repo, revs)
1029
1029
1030 if not revs:
1030 if not revs:
1031 raise error.Abort(_(b'phabsend requires at least one changeset'))
1031 raise error.Abort(_(b'phabsend requires at least one changeset'))
1032 if opts.get(b'amend'):
1032 if opts.get(b'amend'):
1033 cmdutil.checkunfinished(repo)
1033 cmdutil.checkunfinished(repo)
1034
1034
1035 # {newnode: (oldnode, olddiff, olddrev}
1035 # {newnode: (oldnode, olddiff, olddrev}
1036 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1036 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1037
1037
1038 confirm = ui.configbool(b'phabsend', b'confirm')
1038 confirm = ui.configbool(b'phabsend', b'confirm')
1039 confirm |= bool(opts.get(b'confirm'))
1039 confirm |= bool(opts.get(b'confirm'))
1040 if confirm:
1040 if confirm:
1041 confirmed = _confirmbeforesend(repo, revs, oldmap)
1041 confirmed = _confirmbeforesend(repo, revs, oldmap)
1042 if not confirmed:
1042 if not confirmed:
1043 raise error.Abort(_(b'phabsend cancelled'))
1043 raise error.Abort(_(b'phabsend cancelled'))
1044
1044
1045 actions = []
1045 actions = []
1046 reviewers = opts.get(b'reviewer', [])
1046 reviewers = opts.get(b'reviewer', [])
1047 blockers = opts.get(b'blocker', [])
1047 blockers = opts.get(b'blocker', [])
1048 phids = []
1048 phids = []
1049 if reviewers:
1049 if reviewers:
1050 phids.extend(userphids(repo, reviewers))
1050 phids.extend(userphids(repo, reviewers))
1051 if blockers:
1051 if blockers:
1052 phids.extend(
1052 phids.extend(
1053 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1053 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1054 )
1054 )
1055 if phids:
1055 if phids:
1056 actions.append({b'type': b'reviewers.add', b'value': phids})
1056 actions.append({b'type': b'reviewers.add', b'value': phids})
1057
1057
1058 drevids = [] # [int]
1058 drevids = [] # [int]
1059 diffmap = {} # {newnode: diff}
1059 diffmap = {} # {newnode: diff}
1060
1060
1061 # Send patches one by one so we know their Differential Revision PHIDs and
1061 # Send patches one by one so we know their Differential Revision PHIDs and
1062 # can provide dependency relationship
1062 # can provide dependency relationship
1063 lastrevphid = None
1063 lastrevphid = None
1064 for rev in revs:
1064 for rev in revs:
1065 ui.debug(b'sending rev %d\n' % rev)
1065 ui.debug(b'sending rev %d\n' % rev)
1066 ctx = repo[rev]
1066 ctx = repo[rev]
1067
1067
1068 # Get Differential Revision ID
1068 # Get Differential Revision ID
1069 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1069 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1070 if oldnode != ctx.node() or opts.get(b'amend'):
1070 if oldnode != ctx.node() or opts.get(b'amend'):
1071 # Create or update Differential Revision
1071 # Create or update Differential Revision
1072 revision, diff = createdifferentialrevision(
1072 revision, diff = createdifferentialrevision(
1073 ctx,
1073 ctx,
1074 revid,
1074 revid,
1075 lastrevphid,
1075 lastrevphid,
1076 oldnode,
1076 oldnode,
1077 olddiff,
1077 olddiff,
1078 actions,
1078 actions,
1079 opts.get(b'comment'),
1079 opts.get(b'comment'),
1080 )
1080 )
1081 diffmap[ctx.node()] = diff
1081 diffmap[ctx.node()] = diff
1082 newrevid = int(revision[b'object'][b'id'])
1082 newrevid = int(revision[b'object'][b'id'])
1083 newrevphid = revision[b'object'][b'phid']
1083 newrevphid = revision[b'object'][b'phid']
1084 if revid:
1084 if revid:
1085 action = b'updated'
1085 action = b'updated'
1086 else:
1086 else:
1087 action = b'created'
1087 action = b'created'
1088
1088
1089 # Create a local tag to note the association, if commit message
1089 # Create a local tag to note the association, if commit message
1090 # does not have it already
1090 # does not have it already
1091 m = _differentialrevisiondescre.search(ctx.description())
1091 m = _differentialrevisiondescre.search(ctx.description())
1092 if not m or int(m.group(r'id')) != newrevid:
1092 if not m or int(m.group(r'id')) != newrevid:
1093 tagname = b'D%d' % newrevid
1093 tagname = b'D%d' % newrevid
1094 tags.tag(
1094 tags.tag(
1095 repo,
1095 repo,
1096 tagname,
1096 tagname,
1097 ctx.node(),
1097 ctx.node(),
1098 message=None,
1098 message=None,
1099 user=None,
1099 user=None,
1100 date=None,
1100 date=None,
1101 local=True,
1101 local=True,
1102 )
1102 )
1103 else:
1103 else:
1104 # Nothing changed. But still set "newrevphid" so the next revision
1104 # Nothing changed. But still set "newrevphid" so the next revision
1105 # could depend on this one and "newrevid" for the summary line.
1105 # could depend on this one and "newrevid" for the summary line.
1106 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1106 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1107 newrevid = revid
1107 newrevid = revid
1108 action = b'skipped'
1108 action = b'skipped'
1109
1109
1110 actiondesc = ui.label(
1110 actiondesc = ui.label(
1111 {
1111 {
1112 b'created': _(b'created'),
1112 b'created': _(b'created'),
1113 b'skipped': _(b'skipped'),
1113 b'skipped': _(b'skipped'),
1114 b'updated': _(b'updated'),
1114 b'updated': _(b'updated'),
1115 }[action],
1115 }[action],
1116 b'phabricator.action.%s' % action,
1116 b'phabricator.action.%s' % action,
1117 )
1117 )
1118 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1118 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1119 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1119 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1120 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1120 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1121 ui.write(
1121 ui.write(
1122 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1122 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1123 )
1123 )
1124 drevids.append(newrevid)
1124 drevids.append(newrevid)
1125 lastrevphid = newrevphid
1125 lastrevphid = newrevphid
1126
1126
1127 # Update commit messages and remove tags
1127 # Update commit messages and remove tags
1128 if opts.get(b'amend'):
1128 if opts.get(b'amend'):
1129 unfi = repo.unfiltered()
1129 unfi = repo.unfiltered()
1130 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1130 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1131 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1131 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1132 wnode = unfi[b'.'].node()
1132 wnode = unfi[b'.'].node()
1133 mapping = {} # {oldnode: [newnode]}
1133 mapping = {} # {oldnode: [newnode]}
1134 for i, rev in enumerate(revs):
1134 for i, rev in enumerate(revs):
1135 old = unfi[rev]
1135 old = unfi[rev]
1136 drevid = drevids[i]
1136 drevid = drevids[i]
1137 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1137 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1138 newdesc = getdescfromdrev(drev)
1138 newdesc = getdescfromdrev(drev)
1139 # Make sure commit message contain "Differential Revision"
1139 # Make sure commit message contain "Differential Revision"
1140 if old.description() != newdesc:
1140 if old.description() != newdesc:
1141 if old.phase() == phases.public:
1141 if old.phase() == phases.public:
1142 ui.warn(
1142 ui.warn(
1143 _(b"warning: not updating public commit %s\n")
1143 _(b"warning: not updating public commit %s\n")
1144 % scmutil.formatchangeid(old)
1144 % scmutil.formatchangeid(old)
1145 )
1145 )
1146 continue
1146 continue
1147 parents = [
1147 parents = [
1148 mapping.get(old.p1().node(), (old.p1(),))[0],
1148 mapping.get(old.p1().node(), (old.p1(),))[0],
1149 mapping.get(old.p2().node(), (old.p2(),))[0],
1149 mapping.get(old.p2().node(), (old.p2(),))[0],
1150 ]
1150 ]
1151 new = context.metadataonlyctx(
1151 new = context.metadataonlyctx(
1152 repo,
1152 repo,
1153 old,
1153 old,
1154 parents=parents,
1154 parents=parents,
1155 text=newdesc,
1155 text=newdesc,
1156 user=old.user(),
1156 user=old.user(),
1157 date=old.date(),
1157 date=old.date(),
1158 extra=old.extra(),
1158 extra=old.extra(),
1159 )
1159 )
1160
1160
1161 newnode = new.commit()
1161 newnode = new.commit()
1162
1162
1163 mapping[old.node()] = [newnode]
1163 mapping[old.node()] = [newnode]
1164 # Update diff property
1164 # Update diff property
1165 # If it fails just warn and keep going, otherwise the DREV
1165 # If it fails just warn and keep going, otherwise the DREV
1166 # associations will be lost
1166 # associations will be lost
1167 try:
1167 try:
1168 writediffproperties(unfi[newnode], diffmap[old.node()])
1168 writediffproperties(unfi[newnode], diffmap[old.node()])
1169 except util.urlerr.urlerror:
1169 except util.urlerr.urlerror:
1170 ui.warnnoi18n(
1170 ui.warnnoi18n(
1171 b'Failed to update metadata for D%d\n' % drevid
1171 b'Failed to update metadata for D%d\n' % drevid
1172 )
1172 )
1173 # Remove local tags since it's no longer necessary
1173 # Remove local tags since it's no longer necessary
1174 tagname = b'D%d' % drevid
1174 tagname = b'D%d' % drevid
1175 if tagname in repo.tags():
1175 if tagname in repo.tags():
1176 tags.tag(
1176 tags.tag(
1177 repo,
1177 repo,
1178 tagname,
1178 tagname,
1179 nullid,
1179 nullid,
1180 message=None,
1180 message=None,
1181 user=None,
1181 user=None,
1182 date=None,
1182 date=None,
1183 local=True,
1183 local=True,
1184 )
1184 )
1185 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1185 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1186 if wnode in mapping:
1186 if wnode in mapping:
1187 unfi.setparents(mapping[wnode][0])
1187 unfi.setparents(mapping[wnode][0])
1188
1188
1189
1189
1190 # Map from "hg:meta" keys to header understood by "hg import". The order is
1190 # Map from "hg:meta" keys to header understood by "hg import". The order is
1191 # consistent with "hg export" output.
1191 # consistent with "hg export" output.
1192 _metanamemap = util.sortdict(
1192 _metanamemap = util.sortdict(
1193 [
1193 [
1194 (b'user', b'User'),
1194 (b'user', b'User'),
1195 (b'date', b'Date'),
1195 (b'date', b'Date'),
1196 (b'branch', b'Branch'),
1196 (b'branch', b'Branch'),
1197 (b'node', b'Node ID'),
1197 (b'node', b'Node ID'),
1198 (b'parent', b'Parent '),
1198 (b'parent', b'Parent '),
1199 ]
1199 ]
1200 )
1200 )
1201
1201
1202
1202
1203 def _confirmbeforesend(repo, revs, oldmap):
1203 def _confirmbeforesend(repo, revs, oldmap):
1204 url, token = readurltoken(repo.ui)
1204 url, token = readurltoken(repo.ui)
1205 ui = repo.ui
1205 ui = repo.ui
1206 for rev in revs:
1206 for rev in revs:
1207 ctx = repo[rev]
1207 ctx = repo[rev]
1208 desc = ctx.description().splitlines()[0]
1208 desc = ctx.description().splitlines()[0]
1209 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1209 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1210 if drevid:
1210 if drevid:
1211 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1211 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1212 else:
1212 else:
1213 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1213 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1214
1214
1215 ui.write(
1215 ui.write(
1216 _(b'%s - %s: %s\n')
1216 _(b'%s - %s: %s\n')
1217 % (
1217 % (
1218 drevdesc,
1218 drevdesc,
1219 ui.label(bytes(ctx), b'phabricator.node'),
1219 ui.label(bytes(ctx), b'phabricator.node'),
1220 ui.label(desc, b'phabricator.desc'),
1220 ui.label(desc, b'phabricator.desc'),
1221 )
1221 )
1222 )
1222 )
1223
1223
1224 if ui.promptchoice(
1224 if ui.promptchoice(
1225 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1225 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1226 ):
1226 ):
1227 return False
1227 return False
1228
1228
1229 return True
1229 return True
1230
1230
1231
1231
1232 _knownstatusnames = {
1232 _knownstatusnames = {
1233 b'accepted',
1233 b'accepted',
1234 b'needsreview',
1234 b'needsreview',
1235 b'needsrevision',
1235 b'needsrevision',
1236 b'closed',
1236 b'closed',
1237 b'abandoned',
1237 b'abandoned',
1238 }
1238 }
1239
1239
1240
1240
1241 def _getstatusname(drev):
1241 def _getstatusname(drev):
1242 """get normalized status name from a Differential Revision"""
1242 """get normalized status name from a Differential Revision"""
1243 return drev[b'statusName'].replace(b' ', b'').lower()
1243 return drev[b'statusName'].replace(b' ', b'').lower()
1244
1244
1245
1245
1246 # Small language to specify differential revisions. Support symbols: (), :X,
1246 # Small language to specify differential revisions. Support symbols: (), :X,
1247 # +, and -.
1247 # +, and -.
1248
1248
1249 _elements = {
1249 _elements = {
1250 # token-type: binding-strength, primary, prefix, infix, suffix
1250 # token-type: binding-strength, primary, prefix, infix, suffix
1251 b'(': (12, None, (b'group', 1, b')'), None, None),
1251 b'(': (12, None, (b'group', 1, b')'), None, None),
1252 b':': (8, None, (b'ancestors', 8), None, None),
1252 b':': (8, None, (b'ancestors', 8), None, None),
1253 b'&': (5, None, None, (b'and_', 5), None),
1253 b'&': (5, None, None, (b'and_', 5), None),
1254 b'+': (4, None, None, (b'add', 4), None),
1254 b'+': (4, None, None, (b'add', 4), None),
1255 b'-': (4, None, None, (b'sub', 4), None),
1255 b'-': (4, None, None, (b'sub', 4), None),
1256 b')': (0, None, None, None, None),
1256 b')': (0, None, None, None, None),
1257 b'symbol': (0, b'symbol', None, None, None),
1257 b'symbol': (0, b'symbol', None, None, None),
1258 b'end': (0, None, None, None, None),
1258 b'end': (0, None, None, None, None),
1259 }
1259 }
1260
1260
1261
1261
1262 def _tokenize(text):
1262 def _tokenize(text):
1263 view = memoryview(text) # zero-copy slice
1263 view = memoryview(text) # zero-copy slice
1264 special = b'():+-& '
1264 special = b'():+-& '
1265 pos = 0
1265 pos = 0
1266 length = len(text)
1266 length = len(text)
1267 while pos < length:
1267 while pos < length:
1268 symbol = b''.join(
1268 symbol = b''.join(
1269 itertools.takewhile(
1269 itertools.takewhile(
1270 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1270 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1271 )
1271 )
1272 )
1272 )
1273 if symbol:
1273 if symbol:
1274 yield (b'symbol', symbol, pos)
1274 yield (b'symbol', symbol, pos)
1275 pos += len(symbol)
1275 pos += len(symbol)
1276 else: # special char, ignore space
1276 else: # special char, ignore space
1277 if text[pos : pos + 1] != b' ':
1277 if text[pos : pos + 1] != b' ':
1278 yield (text[pos : pos + 1], None, pos)
1278 yield (text[pos : pos + 1], None, pos)
1279 pos += 1
1279 pos += 1
1280 yield (b'end', None, pos)
1280 yield (b'end', None, pos)
1281
1281
1282
1282
1283 def _parse(text):
1283 def _parse(text):
1284 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1284 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1285 if pos != len(text):
1285 if pos != len(text):
1286 raise error.ParseError(b'invalid token', pos)
1286 raise error.ParseError(b'invalid token', pos)
1287 return tree
1287 return tree
1288
1288
1289
1289
1290 def _parsedrev(symbol):
1290 def _parsedrev(symbol):
1291 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1291 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1292 if symbol.startswith(b'D') and symbol[1:].isdigit():
1292 if symbol.startswith(b'D') and symbol[1:].isdigit():
1293 return int(symbol[1:])
1293 return int(symbol[1:])
1294 if symbol.isdigit():
1294 if symbol.isdigit():
1295 return int(symbol)
1295 return int(symbol)
1296
1296
1297
1297
1298 def _prefetchdrevs(tree):
1298 def _prefetchdrevs(tree):
1299 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1299 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1300 drevs = set()
1300 drevs = set()
1301 ancestordrevs = set()
1301 ancestordrevs = set()
1302 op = tree[0]
1302 op = tree[0]
1303 if op == b'symbol':
1303 if op == b'symbol':
1304 r = _parsedrev(tree[1])
1304 r = _parsedrev(tree[1])
1305 if r:
1305 if r:
1306 drevs.add(r)
1306 drevs.add(r)
1307 elif op == b'ancestors':
1307 elif op == b'ancestors':
1308 r, a = _prefetchdrevs(tree[1])
1308 r, a = _prefetchdrevs(tree[1])
1309 drevs.update(r)
1309 drevs.update(r)
1310 ancestordrevs.update(r)
1310 ancestordrevs.update(r)
1311 ancestordrevs.update(a)
1311 ancestordrevs.update(a)
1312 else:
1312 else:
1313 for t in tree[1:]:
1313 for t in tree[1:]:
1314 r, a = _prefetchdrevs(t)
1314 r, a = _prefetchdrevs(t)
1315 drevs.update(r)
1315 drevs.update(r)
1316 ancestordrevs.update(a)
1316 ancestordrevs.update(a)
1317 return drevs, ancestordrevs
1317 return drevs, ancestordrevs
1318
1318
1319
1319
1320 def querydrev(repo, spec):
1320 def querydrev(repo, spec):
1321 """return a list of "Differential Revision" dicts
1321 """return a list of "Differential Revision" dicts
1322
1322
1323 spec is a string using a simple query language, see docstring in phabread
1323 spec is a string using a simple query language, see docstring in phabread
1324 for details.
1324 for details.
1325
1325
1326 A "Differential Revision dict" looks like:
1326 A "Differential Revision dict" looks like:
1327
1327
1328 {
1328 {
1329 "id": "2",
1329 "id": "2",
1330 "phid": "PHID-DREV-672qvysjcczopag46qty",
1330 "phid": "PHID-DREV-672qvysjcczopag46qty",
1331 "title": "example",
1331 "title": "example",
1332 "uri": "https://phab.example.com/D2",
1332 "uri": "https://phab.example.com/D2",
1333 "dateCreated": "1499181406",
1333 "dateCreated": "1499181406",
1334 "dateModified": "1499182103",
1334 "dateModified": "1499182103",
1335 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1335 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1336 "status": "0",
1336 "status": "0",
1337 "statusName": "Needs Review",
1337 "statusName": "Needs Review",
1338 "properties": [],
1338 "properties": [],
1339 "branch": null,
1339 "branch": null,
1340 "summary": "",
1340 "summary": "",
1341 "testPlan": "",
1341 "testPlan": "",
1342 "lineCount": "2",
1342 "lineCount": "2",
1343 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1343 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1344 "diffs": [
1344 "diffs": [
1345 "3",
1345 "3",
1346 "4",
1346 "4",
1347 ],
1347 ],
1348 "commits": [],
1348 "commits": [],
1349 "reviewers": [],
1349 "reviewers": [],
1350 "ccs": [],
1350 "ccs": [],
1351 "hashes": [],
1351 "hashes": [],
1352 "auxiliary": {
1352 "auxiliary": {
1353 "phabricator:projects": [],
1353 "phabricator:projects": [],
1354 "phabricator:depends-on": [
1354 "phabricator:depends-on": [
1355 "PHID-DREV-gbapp366kutjebt7agcd"
1355 "PHID-DREV-gbapp366kutjebt7agcd"
1356 ]
1356 ]
1357 },
1357 },
1358 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1358 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1359 "sourcePath": null
1359 "sourcePath": null
1360 }
1360 }
1361 """
1361 """
1362
1362
1363 def fetch(params):
1363 def fetch(params):
1364 """params -> single drev or None"""
1364 """params -> single drev or None"""
1365 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1365 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1366 if key in prefetched:
1366 if key in prefetched:
1367 return prefetched[key]
1367 return prefetched[key]
1368 drevs = callconduit(repo.ui, b'differential.query', params)
1368 drevs = callconduit(repo.ui, b'differential.query', params)
1369 # Fill prefetched with the result
1369 # Fill prefetched with the result
1370 for drev in drevs:
1370 for drev in drevs:
1371 prefetched[drev[b'phid']] = drev
1371 prefetched[drev[b'phid']] = drev
1372 prefetched[int(drev[b'id'])] = drev
1372 prefetched[int(drev[b'id'])] = drev
1373 if key not in prefetched:
1373 if key not in prefetched:
1374 raise error.Abort(
1374 raise error.Abort(
1375 _(b'cannot get Differential Revision %r') % params
1375 _(b'cannot get Differential Revision %r') % params
1376 )
1376 )
1377 return prefetched[key]
1377 return prefetched[key]
1378
1378
1379 def getstack(topdrevids):
1379 def getstack(topdrevids):
1380 """given a top, get a stack from the bottom, [id] -> [id]"""
1380 """given a top, get a stack from the bottom, [id] -> [id]"""
1381 visited = set()
1381 visited = set()
1382 result = []
1382 result = []
1383 queue = [{b'ids': [i]} for i in topdrevids]
1383 queue = [{b'ids': [i]} for i in topdrevids]
1384 while queue:
1384 while queue:
1385 params = queue.pop()
1385 params = queue.pop()
1386 drev = fetch(params)
1386 drev = fetch(params)
1387 if drev[b'id'] in visited:
1387 if drev[b'id'] in visited:
1388 continue
1388 continue
1389 visited.add(drev[b'id'])
1389 visited.add(drev[b'id'])
1390 result.append(int(drev[b'id']))
1390 result.append(int(drev[b'id']))
1391 auxiliary = drev.get(b'auxiliary', {})
1391 auxiliary = drev.get(b'auxiliary', {})
1392 depends = auxiliary.get(b'phabricator:depends-on', [])
1392 depends = auxiliary.get(b'phabricator:depends-on', [])
1393 for phid in depends:
1393 for phid in depends:
1394 queue.append({b'phids': [phid]})
1394 queue.append({b'phids': [phid]})
1395 result.reverse()
1395 result.reverse()
1396 return smartset.baseset(result)
1396 return smartset.baseset(result)
1397
1397
1398 # Initialize prefetch cache
1398 # Initialize prefetch cache
1399 prefetched = {} # {id or phid: drev}
1399 prefetched = {} # {id or phid: drev}
1400
1400
1401 tree = _parse(spec)
1401 tree = _parse(spec)
1402 drevs, ancestordrevs = _prefetchdrevs(tree)
1402 drevs, ancestordrevs = _prefetchdrevs(tree)
1403
1403
1404 # developer config: phabricator.batchsize
1404 # developer config: phabricator.batchsize
1405 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1405 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1406
1406
1407 # Prefetch Differential Revisions in batch
1407 # Prefetch Differential Revisions in batch
1408 tofetch = set(drevs)
1408 tofetch = set(drevs)
1409 for r in ancestordrevs:
1409 for r in ancestordrevs:
1410 tofetch.update(range(max(1, r - batchsize), r + 1))
1410 tofetch.update(range(max(1, r - batchsize), r + 1))
1411 if drevs:
1411 if drevs:
1412 fetch({b'ids': list(tofetch)})
1412 fetch({b'ids': list(tofetch)})
1413 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1413 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1414
1414
1415 # Walk through the tree, return smartsets
1415 # Walk through the tree, return smartsets
1416 def walk(tree):
1416 def walk(tree):
1417 op = tree[0]
1417 op = tree[0]
1418 if op == b'symbol':
1418 if op == b'symbol':
1419 drev = _parsedrev(tree[1])
1419 drev = _parsedrev(tree[1])
1420 if drev:
1420 if drev:
1421 return smartset.baseset([drev])
1421 return smartset.baseset([drev])
1422 elif tree[1] in _knownstatusnames:
1422 elif tree[1] in _knownstatusnames:
1423 drevs = [
1423 drevs = [
1424 r
1424 r
1425 for r in validids
1425 for r in validids
1426 if _getstatusname(prefetched[r]) == tree[1]
1426 if _getstatusname(prefetched[r]) == tree[1]
1427 ]
1427 ]
1428 return smartset.baseset(drevs)
1428 return smartset.baseset(drevs)
1429 else:
1429 else:
1430 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1430 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1431 elif op in {b'and_', b'add', b'sub'}:
1431 elif op in {b'and_', b'add', b'sub'}:
1432 assert len(tree) == 3
1432 assert len(tree) == 3
1433 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1433 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1434 elif op == b'group':
1434 elif op == b'group':
1435 return walk(tree[1])
1435 return walk(tree[1])
1436 elif op == b'ancestors':
1436 elif op == b'ancestors':
1437 return getstack(walk(tree[1]))
1437 return getstack(walk(tree[1]))
1438 else:
1438 else:
1439 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1439 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1440
1440
1441 return [prefetched[r] for r in walk(tree)]
1441 return [prefetched[r] for r in walk(tree)]
1442
1442
1443
1443
1444 def getdescfromdrev(drev):
1444 def getdescfromdrev(drev):
1445 """get description (commit message) from "Differential Revision"
1445 """get description (commit message) from "Differential Revision"
1446
1446
1447 This is similar to differential.getcommitmessage API. But we only care
1447 This is similar to differential.getcommitmessage API. But we only care
1448 about limited fields: title, summary, test plan, and URL.
1448 about limited fields: title, summary, test plan, and URL.
1449 """
1449 """
1450 title = drev[b'title']
1450 title = drev[b'title']
1451 summary = drev[b'summary'].rstrip()
1451 summary = drev[b'summary'].rstrip()
1452 testplan = drev[b'testPlan'].rstrip()
1452 testplan = drev[b'testPlan'].rstrip()
1453 if testplan:
1453 if testplan:
1454 testplan = b'Test Plan:\n%s' % testplan
1454 testplan = b'Test Plan:\n%s' % testplan
1455 uri = b'Differential Revision: %s' % drev[b'uri']
1455 uri = b'Differential Revision: %s' % drev[b'uri']
1456 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1456 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1457
1457
1458
1458
1459 def getdiffmeta(diff):
1459 def getdiffmeta(diff):
1460 """get commit metadata (date, node, user, p1) from a diff object
1460 """get commit metadata (date, node, user, p1) from a diff object
1461
1461
1462 The metadata could be "hg:meta", sent by phabsend, like:
1462 The metadata could be "hg:meta", sent by phabsend, like:
1463
1463
1464 "properties": {
1464 "properties": {
1465 "hg:meta": {
1465 "hg:meta": {
1466 "date": "1499571514 25200",
1466 "date": "1499571514 25200",
1467 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1467 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1468 "user": "Foo Bar <foo@example.com>",
1468 "user": "Foo Bar <foo@example.com>",
1469 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1469 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1470 }
1470 }
1471 }
1471 }
1472
1472
1473 Or converted from "local:commits", sent by "arc", like:
1473 Or converted from "local:commits", sent by "arc", like:
1474
1474
1475 "properties": {
1475 "properties": {
1476 "local:commits": {
1476 "local:commits": {
1477 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1477 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1478 "author": "Foo Bar",
1478 "author": "Foo Bar",
1479 "time": 1499546314,
1479 "time": 1499546314,
1480 "branch": "default",
1480 "branch": "default",
1481 "tag": "",
1481 "tag": "",
1482 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1484 "local": "1000",
1484 "local": "1000",
1485 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1485 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1486 "summary": "...",
1486 "summary": "...",
1487 "message": "...",
1487 "message": "...",
1488 "authorEmail": "foo@example.com"
1488 "authorEmail": "foo@example.com"
1489 }
1489 }
1490 }
1490 }
1491 }
1491 }
1492
1492
1493 Note: metadata extracted from "local:commits" will lose time zone
1493 Note: metadata extracted from "local:commits" will lose time zone
1494 information.
1494 information.
1495 """
1495 """
1496 props = diff.get(b'properties') or {}
1496 props = diff.get(b'properties') or {}
1497 meta = props.get(b'hg:meta')
1497 meta = props.get(b'hg:meta')
1498 if not meta:
1498 if not meta:
1499 if props.get(b'local:commits'):
1499 if props.get(b'local:commits'):
1500 commit = sorted(props[b'local:commits'].values())[0]
1500 commit = sorted(props[b'local:commits'].values())[0]
1501 meta = {}
1501 meta = {}
1502 if b'author' in commit and b'authorEmail' in commit:
1502 if b'author' in commit and b'authorEmail' in commit:
1503 meta[b'user'] = b'%s <%s>' % (
1503 meta[b'user'] = b'%s <%s>' % (
1504 commit[b'author'],
1504 commit[b'author'],
1505 commit[b'authorEmail'],
1505 commit[b'authorEmail'],
1506 )
1506 )
1507 if b'time' in commit:
1507 if b'time' in commit:
1508 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1508 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1509 if b'branch' in commit:
1509 if b'branch' in commit:
1510 meta[b'branch'] = commit[b'branch']
1510 meta[b'branch'] = commit[b'branch']
1511 node = commit.get(b'commit', commit.get(b'rev'))
1511 node = commit.get(b'commit', commit.get(b'rev'))
1512 if node:
1512 if node:
1513 meta[b'node'] = node
1513 meta[b'node'] = node
1514 if len(commit.get(b'parents', ())) >= 1:
1514 if len(commit.get(b'parents', ())) >= 1:
1515 meta[b'parent'] = commit[b'parents'][0]
1515 meta[b'parent'] = commit[b'parents'][0]
1516 else:
1516 else:
1517 meta = {}
1517 meta = {}
1518 if b'date' not in meta and b'dateCreated' in diff:
1518 if b'date' not in meta and b'dateCreated' in diff:
1519 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1519 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1520 if b'branch' not in meta and diff.get(b'branch'):
1520 if b'branch' not in meta and diff.get(b'branch'):
1521 meta[b'branch'] = diff[b'branch']
1521 meta[b'branch'] = diff[b'branch']
1522 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1522 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1523 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1523 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1524 return meta
1524 return meta
1525
1525
1526
1526
1527 def readpatch(repo, drevs, write):
1527 def readpatch(repo, drevs, write):
1528 """generate plain-text patch readable by 'hg import'
1528 """generate plain-text patch readable by 'hg import'
1529
1529
1530 write is usually ui.write. drevs is what "querydrev" returns, results of
1530 write is usually ui.write. drevs is what "querydrev" returns, results of
1531 "differential.query".
1531 "differential.query".
1532 """
1532 """
1533 # Prefetch hg:meta property for all diffs
1533 # Prefetch hg:meta property for all diffs
1534 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1534 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1535 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1535 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1536
1536
1537 # Generate patch for each drev
1537 # Generate patch for each drev
1538 for drev in drevs:
1538 for drev in drevs:
1539 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1539 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1540
1540
1541 diffid = max(int(v) for v in drev[b'diffs'])
1541 diffid = max(int(v) for v in drev[b'diffs'])
1542 body = callconduit(
1542 body = callconduit(
1543 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1543 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1544 )
1544 )
1545 desc = getdescfromdrev(drev)
1545 desc = getdescfromdrev(drev)
1546 header = b'# HG changeset patch\n'
1546 header = b'# HG changeset patch\n'
1547
1547
1548 # Try to preserve metadata from hg:meta property. Write hg patch
1548 # Try to preserve metadata from hg:meta property. Write hg patch
1549 # headers that can be read by the "import" command. See patchheadermap
1549 # headers that can be read by the "import" command. See patchheadermap
1550 # and extract in mercurial/patch.py for supported headers.
1550 # and extract in mercurial/patch.py for supported headers.
1551 meta = getdiffmeta(diffs[b'%d' % diffid])
1551 meta = getdiffmeta(diffs[b'%d' % diffid])
1552 for k in _metanamemap.keys():
1552 for k in _metanamemap.keys():
1553 if k in meta:
1553 if k in meta:
1554 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1554 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1555
1555
1556 content = b'%s%s\n%s' % (header, desc, body)
1556 content = b'%s%s\n%s' % (header, desc, body)
1557 write(content)
1557 write(content)
1558
1558
1559
1559
1560 @vcrcommand(
1560 @vcrcommand(
1561 b'phabread',
1561 b'phabread',
1562 [(b'', b'stack', False, _(b'read dependencies'))],
1562 [(b'', b'stack', False, _(b'read dependencies'))],
1563 _(b'DREVSPEC [OPTIONS]'),
1563 _(b'DREVSPEC [OPTIONS]'),
1564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1565 )
1565 )
1566 def phabread(ui, repo, spec, **opts):
1566 def phabread(ui, repo, spec, **opts):
1567 """print patches from Phabricator suitable for importing
1567 """print patches from Phabricator suitable for importing
1568
1568
1569 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1569 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1570 the number ``123``. It could also have common operators like ``+``, ``-``,
1570 the number ``123``. It could also have common operators like ``+``, ``-``,
1571 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1571 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1572 select a stack.
1572 select a stack.
1573
1573
1574 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1574 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1575 could be used to filter patches by status. For performance reason, they
1575 could be used to filter patches by status. For performance reason, they
1576 only represent a subset of non-status selections and cannot be used alone.
1576 only represent a subset of non-status selections and cannot be used alone.
1577
1577
1578 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1578 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1579 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1579 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1580 stack up to D9.
1580 stack up to D9.
1581
1581
1582 If --stack is given, follow dependencies information and read all patches.
1582 If --stack is given, follow dependencies information and read all patches.
1583 It is equivalent to the ``:`` operator.
1583 It is equivalent to the ``:`` operator.
1584 """
1584 """
1585 opts = pycompat.byteskwargs(opts)
1585 opts = pycompat.byteskwargs(opts)
1586 if opts.get(b'stack'):
1586 if opts.get(b'stack'):
1587 spec = b':(%s)' % spec
1587 spec = b':(%s)' % spec
1588 drevs = querydrev(repo, spec)
1588 drevs = querydrev(repo, spec)
1589 readpatch(repo, drevs, ui.write)
1589 readpatch(repo, drevs, ui.write)
1590
1590
1591
1591
1592 @vcrcommand(
1592 @vcrcommand(
1593 b'phabupdate',
1593 b'phabupdate',
1594 [
1594 [
1595 (b'', b'accept', False, _(b'accept revisions')),
1595 (b'', b'accept', False, _(b'accept revisions')),
1596 (b'', b'reject', False, _(b'reject revisions')),
1596 (b'', b'reject', False, _(b'reject revisions')),
1597 (b'', b'abandon', False, _(b'abandon revisions')),
1597 (b'', b'abandon', False, _(b'abandon revisions')),
1598 (b'', b'reclaim', False, _(b'reclaim revisions')),
1598 (b'', b'reclaim', False, _(b'reclaim revisions')),
1599 (b'm', b'comment', b'', _(b'comment on the last revision')),
1599 (b'm', b'comment', b'', _(b'comment on the last revision')),
1600 ],
1600 ],
1601 _(b'DREVSPEC [OPTIONS]'),
1601 _(b'DREVSPEC [OPTIONS]'),
1602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1603 )
1603 )
1604 def phabupdate(ui, repo, spec, **opts):
1604 def phabupdate(ui, repo, spec, **opts):
1605 """update Differential Revision in batch
1605 """update Differential Revision in batch
1606
1606
1607 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1607 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1608 """
1608 """
1609 opts = pycompat.byteskwargs(opts)
1609 opts = pycompat.byteskwargs(opts)
1610 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1610 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1611 if len(flags) > 1:
1611 if len(flags) > 1:
1612 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1612 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1613
1613
1614 actions = []
1614 actions = []
1615 for f in flags:
1615 for f in flags:
1616 actions.append({b'type': f, b'value': b'true'})
1616 actions.append({b'type': f, b'value': True})
1617
1617
1618 drevs = querydrev(repo, spec)
1618 drevs = querydrev(repo, spec)
1619 for i, drev in enumerate(drevs):
1619 for i, drev in enumerate(drevs):
1620 if i + 1 == len(drevs) and opts.get(b'comment'):
1620 if i + 1 == len(drevs) and opts.get(b'comment'):
1621 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1621 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1622 if actions:
1622 if actions:
1623 params = {
1623 params = {
1624 b'objectIdentifier': drev[b'phid'],
1624 b'objectIdentifier': drev[b'phid'],
1625 b'transactions': actions,
1625 b'transactions': actions,
1626 }
1626 }
1627 callconduit(ui, b'differential.revision.edit', params)
1627 callconduit(ui, b'differential.revision.edit', params)
1628
1628
1629
1629
1630 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1630 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1631 def template_review(context, mapping):
1631 def template_review(context, mapping):
1632 """:phabreview: Object describing the review for this changeset.
1632 """:phabreview: Object describing the review for this changeset.
1633 Has attributes `url` and `id`.
1633 Has attributes `url` and `id`.
1634 """
1634 """
1635 ctx = context.resource(mapping, b'ctx')
1635 ctx = context.resource(mapping, b'ctx')
1636 m = _differentialrevisiondescre.search(ctx.description())
1636 m = _differentialrevisiondescre.search(ctx.description())
1637 if m:
1637 if m:
1638 return templateutil.hybriddict(
1638 return templateutil.hybriddict(
1639 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1639 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1640 )
1640 )
1641 else:
1641 else:
1642 tags = ctx.repo().nodetags(ctx.node())
1642 tags = ctx.repo().nodetags(ctx.node())
1643 for t in tags:
1643 for t in tags:
1644 if _differentialrevisiontagre.match(t):
1644 if _differentialrevisiontagre.match(t):
1645 url = ctx.repo().ui.config(b'phabricator', b'url')
1645 url = ctx.repo().ui.config(b'phabricator', b'url')
1646 if not url.endswith(b'/'):
1646 if not url.endswith(b'/'):
1647 url += b'/'
1647 url += b'/'
1648 url += t
1648 url += t
1649
1649
1650 return templateutil.hybriddict({b'url': url, b'id': t,})
1650 return templateutil.hybriddict({b'url': url, b'id': t,})
1651 return None
1651 return None
General Comments 0
You need to be logged in to leave comments. Login now