##// END OF EJS Templates
phabricator: fix processing of tags/desc in getoldnodedrevmap()...
Denis Laxalde -
r44281:16b607e9 default
parent child Browse files
Show More
@@ -1,1650 +1,1653 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = util.urlreq.parseqs(r1.body)
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
148 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
149 for key in r1params:
150 if key not in r2params:
150 if key not in r2params:
151 return False
151 return False
152 value = r1params[key][0]
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = pycompat.json_loads(value)
155 r1json = pycompat.json_loads(value)
156 r2json = pycompat.json_loads(r2params[key][0])
156 r2json = pycompat.json_loads(r2params[key][0])
157 if r1json != r2json:
157 if r1json != r2json:
158 return False
158 return False
159 elif r2params[key][0] != value:
159 elif r2params[key][0] != value:
160 return False
160 return False
161 return True
161 return True
162
162
163 def sanitiserequest(request):
163 def sanitiserequest(request):
164 request.body = re.sub(
164 request.body = re.sub(
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 )
166 )
167 return request
167 return request
168
168
169 def sanitiseresponse(response):
169 def sanitiseresponse(response):
170 if 'set-cookie' in response['headers']:
170 if 'set-cookie' in response['headers']:
171 del response['headers']['set-cookie']
171 del response['headers']['set-cookie']
172 return response
172 return response
173
173
174 def decorate(fn):
174 def decorate(fn):
175 def inner(*args, **kwargs):
175 def inner(*args, **kwargs):
176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
177 if cassette:
177 if cassette:
178 import hgdemandimport
178 import hgdemandimport
179
179
180 with hgdemandimport.deactivated():
180 with hgdemandimport.deactivated():
181 import vcr as vcrmod
181 import vcr as vcrmod
182 import vcr.stubs as stubs
182 import vcr.stubs as stubs
183
183
184 vcr = vcrmod.VCR(
184 vcr = vcrmod.VCR(
185 serializer='json',
185 serializer='json',
186 before_record_request=sanitiserequest,
186 before_record_request=sanitiserequest,
187 before_record_response=sanitiseresponse,
187 before_record_response=sanitiseresponse,
188 custom_patches=[
188 custom_patches=[
189 (
189 (
190 urlmod,
190 urlmod,
191 'httpconnection',
191 'httpconnection',
192 stubs.VCRHTTPConnection,
192 stubs.VCRHTTPConnection,
193 ),
193 ),
194 (
194 (
195 urlmod,
195 urlmod,
196 'httpsconnection',
196 'httpsconnection',
197 stubs.VCRHTTPSConnection,
197 stubs.VCRHTTPSConnection,
198 ),
198 ),
199 ],
199 ],
200 )
200 )
201 vcr.register_matcher('hgmatcher', hgmatcher)
201 vcr.register_matcher('hgmatcher', hgmatcher)
202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
203 return fn(*args, **kwargs)
203 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
205
205
206 inner.__name__ = fn.__name__
206 inner.__name__ = fn.__name__
207 inner.__doc__ = fn.__doc__
207 inner.__doc__ = fn.__doc__
208 return command(
208 return command(
209 name,
209 name,
210 fullflags,
210 fullflags,
211 spec,
211 spec,
212 helpcategory=helpcategory,
212 helpcategory=helpcategory,
213 optionalrepo=optionalrepo,
213 optionalrepo=optionalrepo,
214 )(inner)
214 )(inner)
215
215
216 return decorate
216 return decorate
217
217
218
218
219 def urlencodenested(params):
219 def urlencodenested(params):
220 """like urlencode, but works with nested parameters.
220 """like urlencode, but works with nested parameters.
221
221
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 """
225 """
226 flatparams = util.sortdict()
226 flatparams = util.sortdict()
227
227
228 def process(prefix, obj):
228 def process(prefix, obj):
229 if isinstance(obj, bool):
229 if isinstance(obj, bool):
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 if items is None:
233 if items is None:
234 flatparams[prefix] = obj
234 flatparams[prefix] = obj
235 else:
235 else:
236 for k, v in items(obj):
236 for k, v in items(obj):
237 if prefix:
237 if prefix:
238 process(b'%s[%s]' % (prefix, k), v)
238 process(b'%s[%s]' % (prefix, k), v)
239 else:
239 else:
240 process(k, v)
240 process(k, v)
241
241
242 process(b'', params)
242 process(b'', params)
243 return util.urlreq.urlencode(flatparams)
243 return util.urlreq.urlencode(flatparams)
244
244
245
245
246 def readurltoken(ui):
246 def readurltoken(ui):
247 """return conduit url, token and make sure they exist
247 """return conduit url, token and make sure they exist
248
248
249 Currently read from [auth] config section. In the future, it might
249 Currently read from [auth] config section. In the future, it might
250 make sense to read from .arcconfig and .arcrc as well.
250 make sense to read from .arcconfig and .arcrc as well.
251 """
251 """
252 url = ui.config(b'phabricator', b'url')
252 url = ui.config(b'phabricator', b'url')
253 if not url:
253 if not url:
254 raise error.Abort(
254 raise error.Abort(
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 )
256 )
257
257
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 token = None
259 token = None
260
260
261 if res:
261 if res:
262 group, auth = res
262 group, auth = res
263
263
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
265
265
266 token = auth.get(b'phabtoken')
266 token = auth.get(b'phabtoken')
267
267
268 if not token:
268 if not token:
269 raise error.Abort(
269 raise error.Abort(
270 _(b'Can\'t find conduit token associated to %s') % (url,)
270 _(b'Can\'t find conduit token associated to %s') % (url,)
271 )
271 )
272
272
273 return url, token
273 return url, token
274
274
275
275
276 def callconduit(ui, name, params):
276 def callconduit(ui, name, params):
277 """call Conduit API, params is a dict. return json.loads result, or None"""
277 """call Conduit API, params is a dict. return json.loads result, or None"""
278 host, token = readurltoken(ui)
278 host, token = readurltoken(ui)
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 params = params.copy()
281 params = params.copy()
282 params[b'__conduit__'] = {
282 params[b'__conduit__'] = {
283 b'token': token,
283 b'token': token,
284 }
284 }
285 rawdata = {
285 rawdata = {
286 b'params': templatefilters.json(params),
286 b'params': templatefilters.json(params),
287 b'output': b'json',
287 b'output': b'json',
288 b'__conduit__': 1,
288 b'__conduit__': 1,
289 }
289 }
290 data = urlencodenested(rawdata)
290 data = urlencodenested(rawdata)
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 if curlcmd:
292 if curlcmd:
293 sin, sout = procutil.popen2(
293 sin, sout = procutil.popen2(
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 )
295 )
296 sin.write(data)
296 sin.write(data)
297 sin.close()
297 sin.close()
298 body = sout.read()
298 body = sout.read()
299 else:
299 else:
300 urlopener = urlmod.opener(ui, authinfo)
300 urlopener = urlmod.opener(ui, authinfo)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 with contextlib.closing(urlopener.open(request)) as rsp:
302 with contextlib.closing(urlopener.open(request)) as rsp:
303 body = rsp.read()
303 body = rsp.read()
304 ui.debug(b'Conduit Response: %s\n' % body)
304 ui.debug(b'Conduit Response: %s\n' % body)
305 parsed = pycompat.rapply(
305 parsed = pycompat.rapply(
306 lambda x: encoding.unitolocal(x)
306 lambda x: encoding.unitolocal(x)
307 if isinstance(x, pycompat.unicode)
307 if isinstance(x, pycompat.unicode)
308 else x,
308 else x,
309 # json.loads only accepts bytes from py3.6+
309 # json.loads only accepts bytes from py3.6+
310 pycompat.json_loads(encoding.unifromlocal(body)),
310 pycompat.json_loads(encoding.unifromlocal(body)),
311 )
311 )
312 if parsed.get(b'error_code'):
312 if parsed.get(b'error_code'):
313 msg = _(b'Conduit Error (%s): %s') % (
313 msg = _(b'Conduit Error (%s): %s') % (
314 parsed[b'error_code'],
314 parsed[b'error_code'],
315 parsed[b'error_info'],
315 parsed[b'error_info'],
316 )
316 )
317 raise error.Abort(msg)
317 raise error.Abort(msg)
318 return parsed[b'result']
318 return parsed[b'result']
319
319
320
320
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 def debugcallconduit(ui, repo, name):
322 def debugcallconduit(ui, repo, name):
323 """call Conduit API
323 """call Conduit API
324
324
325 Call parameters are read from stdin as a JSON blob. Result will be written
325 Call parameters are read from stdin as a JSON blob. Result will be written
326 to stdout as a JSON blob.
326 to stdout as a JSON blob.
327 """
327 """
328 # json.loads only accepts bytes from 3.6+
328 # json.loads only accepts bytes from 3.6+
329 rawparams = encoding.unifromlocal(ui.fin.read())
329 rawparams = encoding.unifromlocal(ui.fin.read())
330 # json.loads only returns unicode strings
330 # json.loads only returns unicode strings
331 params = pycompat.rapply(
331 params = pycompat.rapply(
332 lambda x: encoding.unitolocal(x)
332 lambda x: encoding.unitolocal(x)
333 if isinstance(x, pycompat.unicode)
333 if isinstance(x, pycompat.unicode)
334 else x,
334 else x,
335 pycompat.json_loads(rawparams),
335 pycompat.json_loads(rawparams),
336 )
336 )
337 # json.dumps only accepts unicode strings
337 # json.dumps only accepts unicode strings
338 result = pycompat.rapply(
338 result = pycompat.rapply(
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 callconduit(ui, name, params),
340 callconduit(ui, name, params),
341 )
341 )
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
344
344
345
345
346 def getrepophid(repo):
346 def getrepophid(repo):
347 """given callsign, return repository PHID or None"""
347 """given callsign, return repository PHID or None"""
348 # developer config: phabricator.repophid
348 # developer config: phabricator.repophid
349 repophid = repo.ui.config(b'phabricator', b'repophid')
349 repophid = repo.ui.config(b'phabricator', b'repophid')
350 if repophid:
350 if repophid:
351 return repophid
351 return repophid
352 callsign = repo.ui.config(b'phabricator', b'callsign')
352 callsign = repo.ui.config(b'phabricator', b'callsign')
353 if not callsign:
353 if not callsign:
354 return None
354 return None
355 query = callconduit(
355 query = callconduit(
356 repo.ui,
356 repo.ui,
357 b'diffusion.repository.search',
357 b'diffusion.repository.search',
358 {b'constraints': {b'callsigns': [callsign]}},
358 {b'constraints': {b'callsigns': [callsign]}},
359 )
359 )
360 if len(query[b'data']) == 0:
360 if len(query[b'data']) == 0:
361 return None
361 return None
362 repophid = query[b'data'][0][b'phid']
362 repophid = query[b'data'][0][b'phid']
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 return repophid
364 return repophid
365
365
366
366
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 _differentialrevisiondescre = re.compile(
368 _differentialrevisiondescre = re.compile(
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 )
370 )
371
371
372
372
373 def getoldnodedrevmap(repo, nodelist):
373 def getoldnodedrevmap(repo, nodelist):
374 """find previous nodes that has been sent to Phabricator
374 """find previous nodes that has been sent to Phabricator
375
375
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 for node in nodelist with known previous sent versions, or associated
377 for node in nodelist with known previous sent versions, or associated
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 be ``None``.
379 be ``None``.
380
380
381 Examines commit messages like "Differential Revision:" to get the
381 Examines commit messages like "Differential Revision:" to get the
382 association information.
382 association information.
383
383
384 If such commit message line is not found, examines all precursors and their
384 If such commit message line is not found, examines all precursors and their
385 tags. Tags with format like "D1234" are considered a match and the node
385 tags. Tags with format like "D1234" are considered a match and the node
386 with that tag, and the number after "D" (ex. 1234) will be returned.
386 with that tag, and the number after "D" (ex. 1234) will be returned.
387
387
388 The ``old node``, if not None, is guaranteed to be the last diff of
388 The ``old node``, if not None, is guaranteed to be the last diff of
389 corresponding Differential Revision, and exist in the repo.
389 corresponding Differential Revision, and exist in the repo.
390 """
390 """
391 unfi = repo.unfiltered()
391 unfi = repo.unfiltered()
392 has_node = unfi.changelog.index.has_node
392 has_node = unfi.changelog.index.has_node
393
393
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
396 for node in nodelist:
396 for node in nodelist:
397 ctx = unfi[node]
397 ctx = unfi[node]
398 # For tags like "D123", put them into "toconfirm" to verify later
398 # For tags like "D123", put them into "toconfirm" to verify later
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 for n in precnodes:
400 for n in precnodes:
401 if has_node(n):
401 if has_node(n):
402 for tag in unfi.nodetags(n):
402 for tag in unfi.nodetags(n):
403 m = _differentialrevisiontagre.match(tag)
403 m = _differentialrevisiontagre.match(tag)
404 if m:
404 if m:
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 continue
406 break
407
407 else:
408 # Check commit message
408 continue # move to next predecessor
409 m = _differentialrevisiondescre.search(ctx.description())
409 break # found a tag, stop
410 if m:
410 else:
411 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
411 # Check commit message
412 m = _differentialrevisiondescre.search(ctx.description())
413 if m:
414 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
412
415
413 # Double check if tags are genuine by collecting all old nodes from
416 # Double check if tags are genuine by collecting all old nodes from
414 # Phabricator, and expect precursors overlap with it.
417 # Phabricator, and expect precursors overlap with it.
415 if toconfirm:
418 if toconfirm:
416 drevs = [drev for force, precs, drev in toconfirm.values()]
419 drevs = [drev for force, precs, drev in toconfirm.values()]
417 alldiffs = callconduit(
420 alldiffs = callconduit(
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
421 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
419 )
422 )
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
423 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
421 for newnode, (force, precset, drev) in toconfirm.items():
424 for newnode, (force, precset, drev) in toconfirm.items():
422 diffs = [
425 diffs = [
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
426 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
424 ]
427 ]
425
428
426 # "precursors" as known by Phabricator
429 # "precursors" as known by Phabricator
427 phprecset = set(getnode(d) for d in diffs)
430 phprecset = set(getnode(d) for d in diffs)
428
431
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
432 # Ignore if precursors (Phabricator and local repo) do not overlap,
430 # and force is not set (when commit message says nothing)
433 # and force is not set (when commit message says nothing)
431 if not force and not bool(phprecset & precset):
434 if not force and not bool(phprecset & precset):
432 tagname = b'D%d' % drev
435 tagname = b'D%d' % drev
433 tags.tag(
436 tags.tag(
434 repo,
437 repo,
435 tagname,
438 tagname,
436 nullid,
439 nullid,
437 message=None,
440 message=None,
438 user=None,
441 user=None,
439 date=None,
442 date=None,
440 local=True,
443 local=True,
441 )
444 )
442 unfi.ui.warn(
445 unfi.ui.warn(
443 _(
446 _(
444 b'D%d: local tag removed - does not match '
447 b'D%d: local tag removed - does not match '
445 b'Differential history\n'
448 b'Differential history\n'
446 )
449 )
447 % drev
450 % drev
448 )
451 )
449 continue
452 continue
450
453
451 # Find the last node using Phabricator metadata, and make sure it
454 # Find the last node using Phabricator metadata, and make sure it
452 # exists in the repo
455 # exists in the repo
453 oldnode = lastdiff = None
456 oldnode = lastdiff = None
454 if diffs:
457 if diffs:
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
458 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
456 oldnode = getnode(lastdiff)
459 oldnode = getnode(lastdiff)
457 if oldnode and not has_node(oldnode):
460 if oldnode and not has_node(oldnode):
458 oldnode = None
461 oldnode = None
459
462
460 result[newnode] = (oldnode, lastdiff, drev)
463 result[newnode] = (oldnode, lastdiff, drev)
461
464
462 return result
465 return result
463
466
464
467
465 def getdiff(ctx, diffopts):
468 def getdiff(ctx, diffopts):
466 """plain-text diff without header (user, commit message, etc)"""
469 """plain-text diff without header (user, commit message, etc)"""
467 output = util.stringio()
470 output = util.stringio()
468 for chunk, _label in patch.diffui(
471 for chunk, _label in patch.diffui(
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
472 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
470 ):
473 ):
471 output.write(chunk)
474 output.write(chunk)
472 return output.getvalue()
475 return output.getvalue()
473
476
474
477
475 class DiffChangeType(object):
478 class DiffChangeType(object):
476 ADD = 1
479 ADD = 1
477 CHANGE = 2
480 CHANGE = 2
478 DELETE = 3
481 DELETE = 3
479 MOVE_AWAY = 4
482 MOVE_AWAY = 4
480 COPY_AWAY = 5
483 COPY_AWAY = 5
481 MOVE_HERE = 6
484 MOVE_HERE = 6
482 COPY_HERE = 7
485 COPY_HERE = 7
483 MULTICOPY = 8
486 MULTICOPY = 8
484
487
485
488
486 class DiffFileType(object):
489 class DiffFileType(object):
487 TEXT = 1
490 TEXT = 1
488 IMAGE = 2
491 IMAGE = 2
489 BINARY = 3
492 BINARY = 3
490
493
491
494
492 @attr.s
495 @attr.s
493 class phabhunk(dict):
496 class phabhunk(dict):
494 """Represents a Differential hunk, which is owned by a Differential change
497 """Represents a Differential hunk, which is owned by a Differential change
495 """
498 """
496
499
497 oldOffset = attr.ib(default=0) # camelcase-required
500 oldOffset = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
501 oldLength = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
502 newOffset = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
503 newLength = attr.ib(default=0) # camelcase-required
501 corpus = attr.ib(default='')
504 corpus = attr.ib(default='')
502 # These get added to the phabchange's equivalents
505 # These get added to the phabchange's equivalents
503 addLines = attr.ib(default=0) # camelcase-required
506 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
507 delLines = attr.ib(default=0) # camelcase-required
505
508
506
509
507 @attr.s
510 @attr.s
508 class phabchange(object):
511 class phabchange(object):
509 """Represents a Differential change, owns Differential hunks and owned by a
512 """Represents a Differential change, owns Differential hunks and owned by a
510 Differential diff. Each one represents one file in a diff.
513 Differential diff. Each one represents one file in a diff.
511 """
514 """
512
515
513 currentPath = attr.ib(default=None) # camelcase-required
516 currentPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
517 oldPath = attr.ib(default=None) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
518 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
516 metadata = attr.ib(default=attr.Factory(dict))
519 metadata = attr.ib(default=attr.Factory(dict))
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
520 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
521 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
519 type = attr.ib(default=DiffChangeType.CHANGE)
522 type = attr.ib(default=DiffChangeType.CHANGE)
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
523 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
524 commitHash = attr.ib(default=None) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
525 addLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
526 delLines = attr.ib(default=0) # camelcase-required
524 hunks = attr.ib(default=attr.Factory(list))
527 hunks = attr.ib(default=attr.Factory(list))
525
528
526 def copynewmetadatatoold(self):
529 def copynewmetadatatoold(self):
527 for key in list(self.metadata.keys()):
530 for key in list(self.metadata.keys()):
528 newkey = key.replace(b'new:', b'old:')
531 newkey = key.replace(b'new:', b'old:')
529 self.metadata[newkey] = self.metadata[key]
532 self.metadata[newkey] = self.metadata[key]
530
533
531 def addoldmode(self, value):
534 def addoldmode(self, value):
532 self.oldProperties[b'unix:filemode'] = value
535 self.oldProperties[b'unix:filemode'] = value
533
536
534 def addnewmode(self, value):
537 def addnewmode(self, value):
535 self.newProperties[b'unix:filemode'] = value
538 self.newProperties[b'unix:filemode'] = value
536
539
537 def addhunk(self, hunk):
540 def addhunk(self, hunk):
538 if not isinstance(hunk, phabhunk):
541 if not isinstance(hunk, phabhunk):
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
542 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
543 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
541 # It's useful to include these stats since the Phab web UI shows them,
544 # It's useful to include these stats since the Phab web UI shows them,
542 # and uses them to estimate how large a change a Revision is. Also used
545 # and uses them to estimate how large a change a Revision is. Also used
543 # in email subjects for the [+++--] bit.
546 # in email subjects for the [+++--] bit.
544 self.addLines += hunk.addLines
547 self.addLines += hunk.addLines
545 self.delLines += hunk.delLines
548 self.delLines += hunk.delLines
546
549
547
550
548 @attr.s
551 @attr.s
549 class phabdiff(object):
552 class phabdiff(object):
550 """Represents a Differential diff, owns Differential changes. Corresponds
553 """Represents a Differential diff, owns Differential changes. Corresponds
551 to a commit.
554 to a commit.
552 """
555 """
553
556
554 # Doesn't seem to be any reason to send this (output of uname -n)
557 # Doesn't seem to be any reason to send this (output of uname -n)
555 sourceMachine = attr.ib(default=b'') # camelcase-required
558 sourceMachine = attr.ib(default=b'') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
559 sourcePath = attr.ib(default=b'/') # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
560 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
561 sourceControlPath = attr.ib(default=b'/') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
562 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
560 branch = attr.ib(default=b'default')
563 branch = attr.ib(default=b'default')
561 bookmark = attr.ib(default=None)
564 bookmark = attr.ib(default=None)
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
565 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
566 lintStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
567 unitStatus = attr.ib(default=b'none') # camelcase-required
565 changes = attr.ib(default=attr.Factory(dict))
568 changes = attr.ib(default=attr.Factory(dict))
566 repositoryPHID = attr.ib(default=None) # camelcase-required
569 repositoryPHID = attr.ib(default=None) # camelcase-required
567
570
568 def addchange(self, change):
571 def addchange(self, change):
569 if not isinstance(change, phabchange):
572 if not isinstance(change, phabchange):
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
573 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
571 self.changes[change.currentPath] = pycompat.byteskwargs(
574 self.changes[change.currentPath] = pycompat.byteskwargs(
572 attr.asdict(change)
575 attr.asdict(change)
573 )
576 )
574
577
575
578
576 def maketext(pchange, ctx, fname):
579 def maketext(pchange, ctx, fname):
577 """populate the phabchange for a text file"""
580 """populate the phabchange for a text file"""
578 repo = ctx.repo()
581 repo = ctx.repo()
579 fmatcher = match.exact([fname])
582 fmatcher = match.exact([fname])
580 diffopts = mdiff.diffopts(git=True, context=32767)
583 diffopts = mdiff.diffopts(git=True, context=32767)
581 _pfctx, _fctx, header, fhunks = next(
584 _pfctx, _fctx, header, fhunks = next(
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
585 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
583 )
586 )
584
587
585 for fhunk in fhunks:
588 for fhunk in fhunks:
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
589 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
587 corpus = b''.join(lines[1:])
590 corpus = b''.join(lines[1:])
588 shunk = list(header)
591 shunk = list(header)
589 shunk.extend(lines)
592 shunk.extend(lines)
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
593 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
591 patch.diffstatdata(util.iterlines(shunk))
594 patch.diffstatdata(util.iterlines(shunk))
592 )
595 )
593 pchange.addhunk(
596 pchange.addhunk(
594 phabhunk(
597 phabhunk(
595 oldOffset,
598 oldOffset,
596 oldLength,
599 oldLength,
597 newOffset,
600 newOffset,
598 newLength,
601 newLength,
599 corpus,
602 corpus,
600 addLines,
603 addLines,
601 delLines,
604 delLines,
602 )
605 )
603 )
606 )
604
607
605
608
606 def uploadchunks(fctx, fphid):
609 def uploadchunks(fctx, fphid):
607 """upload large binary files as separate chunks.
610 """upload large binary files as separate chunks.
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
611 Phab requests chunking over 8MiB, and splits into 4MiB chunks
609 """
612 """
610 ui = fctx.repo().ui
613 ui = fctx.repo().ui
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
614 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
612 with ui.makeprogress(
615 with ui.makeprogress(
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
616 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
614 ) as progress:
617 ) as progress:
615 for chunk in chunks:
618 for chunk in chunks:
616 progress.increment()
619 progress.increment()
617 if chunk[b'complete']:
620 if chunk[b'complete']:
618 continue
621 continue
619 bstart = int(chunk[b'byteStart'])
622 bstart = int(chunk[b'byteStart'])
620 bend = int(chunk[b'byteEnd'])
623 bend = int(chunk[b'byteEnd'])
621 callconduit(
624 callconduit(
622 ui,
625 ui,
623 b'file.uploadchunk',
626 b'file.uploadchunk',
624 {
627 {
625 b'filePHID': fphid,
628 b'filePHID': fphid,
626 b'byteStart': bstart,
629 b'byteStart': bstart,
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
630 b'data': base64.b64encode(fctx.data()[bstart:bend]),
628 b'dataEncoding': b'base64',
631 b'dataEncoding': b'base64',
629 },
632 },
630 )
633 )
631
634
632
635
633 def uploadfile(fctx):
636 def uploadfile(fctx):
634 """upload binary files to Phabricator"""
637 """upload binary files to Phabricator"""
635 repo = fctx.repo()
638 repo = fctx.repo()
636 ui = repo.ui
639 ui = repo.ui
637 fname = fctx.path()
640 fname = fctx.path()
638 size = fctx.size()
641 size = fctx.size()
639 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
642 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
640
643
641 # an allocate call is required first to see if an upload is even required
644 # an allocate call is required first to see if an upload is even required
642 # (Phab might already have it) and to determine if chunking is needed
645 # (Phab might already have it) and to determine if chunking is needed
643 allocateparams = {
646 allocateparams = {
644 b'name': fname,
647 b'name': fname,
645 b'contentLength': size,
648 b'contentLength': size,
646 b'contentHash': fhash,
649 b'contentHash': fhash,
647 }
650 }
648 filealloc = callconduit(ui, b'file.allocate', allocateparams)
651 filealloc = callconduit(ui, b'file.allocate', allocateparams)
649 fphid = filealloc[b'filePHID']
652 fphid = filealloc[b'filePHID']
650
653
651 if filealloc[b'upload']:
654 if filealloc[b'upload']:
652 ui.write(_(b'uploading %s\n') % bytes(fctx))
655 ui.write(_(b'uploading %s\n') % bytes(fctx))
653 if not fphid:
656 if not fphid:
654 uploadparams = {
657 uploadparams = {
655 b'name': fname,
658 b'name': fname,
656 b'data_base64': base64.b64encode(fctx.data()),
659 b'data_base64': base64.b64encode(fctx.data()),
657 }
660 }
658 fphid = callconduit(ui, b'file.upload', uploadparams)
661 fphid = callconduit(ui, b'file.upload', uploadparams)
659 else:
662 else:
660 uploadchunks(fctx, fphid)
663 uploadchunks(fctx, fphid)
661 else:
664 else:
662 ui.debug(b'server already has %s\n' % bytes(fctx))
665 ui.debug(b'server already has %s\n' % bytes(fctx))
663
666
664 if not fphid:
667 if not fphid:
665 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
668 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
666
669
667 return fphid
670 return fphid
668
671
669
672
670 def addoldbinary(pchange, fctx, originalfname):
673 def addoldbinary(pchange, fctx, originalfname):
671 """add the metadata for the previous version of a binary file to the
674 """add the metadata for the previous version of a binary file to the
672 phabchange for the new version
675 phabchange for the new version
673 """
676 """
674 oldfctx = fctx.p1()[originalfname]
677 oldfctx = fctx.p1()[originalfname]
675 if fctx.cmp(oldfctx):
678 if fctx.cmp(oldfctx):
676 # Files differ, add the old one
679 # Files differ, add the old one
677 pchange.metadata[b'old:file:size'] = oldfctx.size()
680 pchange.metadata[b'old:file:size'] = oldfctx.size()
678 mimeguess, _enc = mimetypes.guess_type(
681 mimeguess, _enc = mimetypes.guess_type(
679 encoding.unifromlocal(oldfctx.path())
682 encoding.unifromlocal(oldfctx.path())
680 )
683 )
681 if mimeguess:
684 if mimeguess:
682 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
685 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
683 mimeguess
686 mimeguess
684 )
687 )
685 fphid = uploadfile(oldfctx)
688 fphid = uploadfile(oldfctx)
686 pchange.metadata[b'old:binary-phid'] = fphid
689 pchange.metadata[b'old:binary-phid'] = fphid
687 else:
690 else:
688 # If it's left as IMAGE/BINARY web UI might try to display it
691 # If it's left as IMAGE/BINARY web UI might try to display it
689 pchange.fileType = DiffFileType.TEXT
692 pchange.fileType = DiffFileType.TEXT
690 pchange.copynewmetadatatoold()
693 pchange.copynewmetadatatoold()
691
694
692
695
693 def makebinary(pchange, fctx):
696 def makebinary(pchange, fctx):
694 """populate the phabchange for a binary file"""
697 """populate the phabchange for a binary file"""
695 pchange.fileType = DiffFileType.BINARY
698 pchange.fileType = DiffFileType.BINARY
696 fphid = uploadfile(fctx)
699 fphid = uploadfile(fctx)
697 pchange.metadata[b'new:binary-phid'] = fphid
700 pchange.metadata[b'new:binary-phid'] = fphid
698 pchange.metadata[b'new:file:size'] = fctx.size()
701 pchange.metadata[b'new:file:size'] = fctx.size()
699 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
702 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
700 if mimeguess:
703 if mimeguess:
701 mimeguess = pycompat.bytestr(mimeguess)
704 mimeguess = pycompat.bytestr(mimeguess)
702 pchange.metadata[b'new:file:mime-type'] = mimeguess
705 pchange.metadata[b'new:file:mime-type'] = mimeguess
703 if mimeguess.startswith(b'image/'):
706 if mimeguess.startswith(b'image/'):
704 pchange.fileType = DiffFileType.IMAGE
707 pchange.fileType = DiffFileType.IMAGE
705
708
706
709
707 # Copied from mercurial/patch.py
710 # Copied from mercurial/patch.py
708 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
711 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
709
712
710
713
711 def notutf8(fctx):
714 def notutf8(fctx):
712 """detect non-UTF-8 text files since Phabricator requires them to be marked
715 """detect non-UTF-8 text files since Phabricator requires them to be marked
713 as binary
716 as binary
714 """
717 """
715 try:
718 try:
716 fctx.data().decode('utf-8')
719 fctx.data().decode('utf-8')
717 if fctx.parents():
720 if fctx.parents():
718 fctx.p1().data().decode('utf-8')
721 fctx.p1().data().decode('utf-8')
719 return False
722 return False
720 except UnicodeDecodeError:
723 except UnicodeDecodeError:
721 fctx.repo().ui.write(
724 fctx.repo().ui.write(
722 _(b'file %s detected as non-UTF-8, marked as binary\n')
725 _(b'file %s detected as non-UTF-8, marked as binary\n')
723 % fctx.path()
726 % fctx.path()
724 )
727 )
725 return True
728 return True
726
729
727
730
728 def addremoved(pdiff, ctx, removed):
731 def addremoved(pdiff, ctx, removed):
729 """add removed files to the phabdiff. Shouldn't include moves"""
732 """add removed files to the phabdiff. Shouldn't include moves"""
730 for fname in removed:
733 for fname in removed:
731 pchange = phabchange(
734 pchange = phabchange(
732 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
735 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
733 )
736 )
734 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
737 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
735 fctx = ctx.p1()[fname]
738 fctx = ctx.p1()[fname]
736 if not (fctx.isbinary() or notutf8(fctx)):
739 if not (fctx.isbinary() or notutf8(fctx)):
737 maketext(pchange, ctx, fname)
740 maketext(pchange, ctx, fname)
738
741
739 pdiff.addchange(pchange)
742 pdiff.addchange(pchange)
740
743
741
744
742 def addmodified(pdiff, ctx, modified):
745 def addmodified(pdiff, ctx, modified):
743 """add modified files to the phabdiff"""
746 """add modified files to the phabdiff"""
744 for fname in modified:
747 for fname in modified:
745 fctx = ctx[fname]
748 fctx = ctx[fname]
746 pchange = phabchange(currentPath=fname, oldPath=fname)
749 pchange = phabchange(currentPath=fname, oldPath=fname)
747 filemode = gitmode[ctx[fname].flags()]
750 filemode = gitmode[ctx[fname].flags()]
748 originalmode = gitmode[ctx.p1()[fname].flags()]
751 originalmode = gitmode[ctx.p1()[fname].flags()]
749 if filemode != originalmode:
752 if filemode != originalmode:
750 pchange.addoldmode(originalmode)
753 pchange.addoldmode(originalmode)
751 pchange.addnewmode(filemode)
754 pchange.addnewmode(filemode)
752
755
753 if fctx.isbinary() or notutf8(fctx):
756 if fctx.isbinary() or notutf8(fctx):
754 makebinary(pchange, fctx)
757 makebinary(pchange, fctx)
755 addoldbinary(pchange, fctx, fname)
758 addoldbinary(pchange, fctx, fname)
756 else:
759 else:
757 maketext(pchange, ctx, fname)
760 maketext(pchange, ctx, fname)
758
761
759 pdiff.addchange(pchange)
762 pdiff.addchange(pchange)
760
763
761
764
762 def addadded(pdiff, ctx, added, removed):
765 def addadded(pdiff, ctx, added, removed):
763 """add file adds to the phabdiff, both new files and copies/moves"""
766 """add file adds to the phabdiff, both new files and copies/moves"""
764 # Keep track of files that've been recorded as moved/copied, so if there are
767 # Keep track of files that've been recorded as moved/copied, so if there are
765 # additional copies we can mark them (moves get removed from removed)
768 # additional copies we can mark them (moves get removed from removed)
766 copiedchanges = {}
769 copiedchanges = {}
767 movedchanges = {}
770 movedchanges = {}
768 for fname in added:
771 for fname in added:
769 fctx = ctx[fname]
772 fctx = ctx[fname]
770 pchange = phabchange(currentPath=fname)
773 pchange = phabchange(currentPath=fname)
771
774
772 filemode = gitmode[ctx[fname].flags()]
775 filemode = gitmode[ctx[fname].flags()]
773 renamed = fctx.renamed()
776 renamed = fctx.renamed()
774
777
775 if renamed:
778 if renamed:
776 originalfname = renamed[0]
779 originalfname = renamed[0]
777 originalmode = gitmode[ctx.p1()[originalfname].flags()]
780 originalmode = gitmode[ctx.p1()[originalfname].flags()]
778 pchange.oldPath = originalfname
781 pchange.oldPath = originalfname
779
782
780 if originalfname in removed:
783 if originalfname in removed:
781 origpchange = phabchange(
784 origpchange = phabchange(
782 currentPath=originalfname,
785 currentPath=originalfname,
783 oldPath=originalfname,
786 oldPath=originalfname,
784 type=DiffChangeType.MOVE_AWAY,
787 type=DiffChangeType.MOVE_AWAY,
785 awayPaths=[fname],
788 awayPaths=[fname],
786 )
789 )
787 movedchanges[originalfname] = origpchange
790 movedchanges[originalfname] = origpchange
788 removed.remove(originalfname)
791 removed.remove(originalfname)
789 pchange.type = DiffChangeType.MOVE_HERE
792 pchange.type = DiffChangeType.MOVE_HERE
790 elif originalfname in movedchanges:
793 elif originalfname in movedchanges:
791 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
794 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
792 movedchanges[originalfname].awayPaths.append(fname)
795 movedchanges[originalfname].awayPaths.append(fname)
793 pchange.type = DiffChangeType.COPY_HERE
796 pchange.type = DiffChangeType.COPY_HERE
794 else: # pure copy
797 else: # pure copy
795 if originalfname not in copiedchanges:
798 if originalfname not in copiedchanges:
796 origpchange = phabchange(
799 origpchange = phabchange(
797 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
800 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
798 )
801 )
799 copiedchanges[originalfname] = origpchange
802 copiedchanges[originalfname] = origpchange
800 else:
803 else:
801 origpchange = copiedchanges[originalfname]
804 origpchange = copiedchanges[originalfname]
802 origpchange.awayPaths.append(fname)
805 origpchange.awayPaths.append(fname)
803 pchange.type = DiffChangeType.COPY_HERE
806 pchange.type = DiffChangeType.COPY_HERE
804
807
805 if filemode != originalmode:
808 if filemode != originalmode:
806 pchange.addoldmode(originalmode)
809 pchange.addoldmode(originalmode)
807 pchange.addnewmode(filemode)
810 pchange.addnewmode(filemode)
808 else: # Brand-new file
811 else: # Brand-new file
809 pchange.addnewmode(gitmode[fctx.flags()])
812 pchange.addnewmode(gitmode[fctx.flags()])
810 pchange.type = DiffChangeType.ADD
813 pchange.type = DiffChangeType.ADD
811
814
812 if fctx.isbinary() or notutf8(fctx):
815 if fctx.isbinary() or notutf8(fctx):
813 makebinary(pchange, fctx)
816 makebinary(pchange, fctx)
814 if renamed:
817 if renamed:
815 addoldbinary(pchange, fctx, originalfname)
818 addoldbinary(pchange, fctx, originalfname)
816 else:
819 else:
817 maketext(pchange, ctx, fname)
820 maketext(pchange, ctx, fname)
818
821
819 pdiff.addchange(pchange)
822 pdiff.addchange(pchange)
820
823
821 for _path, copiedchange in copiedchanges.items():
824 for _path, copiedchange in copiedchanges.items():
822 pdiff.addchange(copiedchange)
825 pdiff.addchange(copiedchange)
823 for _path, movedchange in movedchanges.items():
826 for _path, movedchange in movedchanges.items():
824 pdiff.addchange(movedchange)
827 pdiff.addchange(movedchange)
825
828
826
829
827 def creatediff(ctx):
830 def creatediff(ctx):
828 """create a Differential Diff"""
831 """create a Differential Diff"""
829 repo = ctx.repo()
832 repo = ctx.repo()
830 repophid = getrepophid(repo)
833 repophid = getrepophid(repo)
831 # Create a "Differential Diff" via "differential.creatediff" API
834 # Create a "Differential Diff" via "differential.creatediff" API
832 pdiff = phabdiff(
835 pdiff = phabdiff(
833 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
836 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
834 branch=b'%s' % ctx.branch(),
837 branch=b'%s' % ctx.branch(),
835 )
838 )
836 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
839 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
837 # addadded will remove moved files from removed, so addremoved won't get
840 # addadded will remove moved files from removed, so addremoved won't get
838 # them
841 # them
839 addadded(pdiff, ctx, added, removed)
842 addadded(pdiff, ctx, added, removed)
840 addmodified(pdiff, ctx, modified)
843 addmodified(pdiff, ctx, modified)
841 addremoved(pdiff, ctx, removed)
844 addremoved(pdiff, ctx, removed)
842 if repophid:
845 if repophid:
843 pdiff.repositoryPHID = repophid
846 pdiff.repositoryPHID = repophid
844 diff = callconduit(
847 diff = callconduit(
845 repo.ui,
848 repo.ui,
846 b'differential.creatediff',
849 b'differential.creatediff',
847 pycompat.byteskwargs(attr.asdict(pdiff)),
850 pycompat.byteskwargs(attr.asdict(pdiff)),
848 )
851 )
849 if not diff:
852 if not diff:
850 raise error.Abort(_(b'cannot create diff for %s') % ctx)
853 raise error.Abort(_(b'cannot create diff for %s') % ctx)
851 return diff
854 return diff
852
855
853
856
854 def writediffproperties(ctx, diff):
857 def writediffproperties(ctx, diff):
855 """write metadata to diff so patches could be applied losslessly"""
858 """write metadata to diff so patches could be applied losslessly"""
856 # creatediff returns with a diffid but query returns with an id
859 # creatediff returns with a diffid but query returns with an id
857 diffid = diff.get(b'diffid', diff.get(b'id'))
860 diffid = diff.get(b'diffid', diff.get(b'id'))
858 params = {
861 params = {
859 b'diff_id': diffid,
862 b'diff_id': diffid,
860 b'name': b'hg:meta',
863 b'name': b'hg:meta',
861 b'data': templatefilters.json(
864 b'data': templatefilters.json(
862 {
865 {
863 b'user': ctx.user(),
866 b'user': ctx.user(),
864 b'date': b'%d %d' % ctx.date(),
867 b'date': b'%d %d' % ctx.date(),
865 b'branch': ctx.branch(),
868 b'branch': ctx.branch(),
866 b'node': ctx.hex(),
869 b'node': ctx.hex(),
867 b'parent': ctx.p1().hex(),
870 b'parent': ctx.p1().hex(),
868 }
871 }
869 ),
872 ),
870 }
873 }
871 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
874 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
872
875
873 params = {
876 params = {
874 b'diff_id': diffid,
877 b'diff_id': diffid,
875 b'name': b'local:commits',
878 b'name': b'local:commits',
876 b'data': templatefilters.json(
879 b'data': templatefilters.json(
877 {
880 {
878 ctx.hex(): {
881 ctx.hex(): {
879 b'author': stringutil.person(ctx.user()),
882 b'author': stringutil.person(ctx.user()),
880 b'authorEmail': stringutil.email(ctx.user()),
883 b'authorEmail': stringutil.email(ctx.user()),
881 b'time': int(ctx.date()[0]),
884 b'time': int(ctx.date()[0]),
882 b'commit': ctx.hex(),
885 b'commit': ctx.hex(),
883 b'parents': [ctx.p1().hex()],
886 b'parents': [ctx.p1().hex()],
884 b'branch': ctx.branch(),
887 b'branch': ctx.branch(),
885 },
888 },
886 }
889 }
887 ),
890 ),
888 }
891 }
889 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
892 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
890
893
891
894
892 def createdifferentialrevision(
895 def createdifferentialrevision(
893 ctx,
896 ctx,
894 revid=None,
897 revid=None,
895 parentrevphid=None,
898 parentrevphid=None,
896 oldnode=None,
899 oldnode=None,
897 olddiff=None,
900 olddiff=None,
898 actions=None,
901 actions=None,
899 comment=None,
902 comment=None,
900 ):
903 ):
901 """create or update a Differential Revision
904 """create or update a Differential Revision
902
905
903 If revid is None, create a new Differential Revision, otherwise update
906 If revid is None, create a new Differential Revision, otherwise update
904 revid. If parentrevphid is not None, set it as a dependency.
907 revid. If parentrevphid is not None, set it as a dependency.
905
908
906 If oldnode is not None, check if the patch content (without commit message
909 If oldnode is not None, check if the patch content (without commit message
907 and metadata) has changed before creating another diff.
910 and metadata) has changed before creating another diff.
908
911
909 If actions is not None, they will be appended to the transaction.
912 If actions is not None, they will be appended to the transaction.
910 """
913 """
911 repo = ctx.repo()
914 repo = ctx.repo()
912 if oldnode:
915 if oldnode:
913 diffopts = mdiff.diffopts(git=True, context=32767)
916 diffopts = mdiff.diffopts(git=True, context=32767)
914 oldctx = repo.unfiltered()[oldnode]
917 oldctx = repo.unfiltered()[oldnode]
915 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
918 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
916 else:
919 else:
917 neednewdiff = True
920 neednewdiff = True
918
921
919 transactions = []
922 transactions = []
920 if neednewdiff:
923 if neednewdiff:
921 diff = creatediff(ctx)
924 diff = creatediff(ctx)
922 transactions.append({b'type': b'update', b'value': diff[b'phid']})
925 transactions.append({b'type': b'update', b'value': diff[b'phid']})
923 if comment:
926 if comment:
924 transactions.append({b'type': b'comment', b'value': comment})
927 transactions.append({b'type': b'comment', b'value': comment})
925 else:
928 else:
926 # Even if we don't need to upload a new diff because the patch content
929 # Even if we don't need to upload a new diff because the patch content
927 # does not change. We might still need to update its metadata so
930 # does not change. We might still need to update its metadata so
928 # pushers could know the correct node metadata.
931 # pushers could know the correct node metadata.
929 assert olddiff
932 assert olddiff
930 diff = olddiff
933 diff = olddiff
931 writediffproperties(ctx, diff)
934 writediffproperties(ctx, diff)
932
935
933 # Set the parent Revision every time, so commit re-ordering is picked-up
936 # Set the parent Revision every time, so commit re-ordering is picked-up
934 if parentrevphid:
937 if parentrevphid:
935 transactions.append(
938 transactions.append(
936 {b'type': b'parents.set', b'value': [parentrevphid]}
939 {b'type': b'parents.set', b'value': [parentrevphid]}
937 )
940 )
938
941
939 if actions:
942 if actions:
940 transactions += actions
943 transactions += actions
941
944
942 # Parse commit message and update related fields.
945 # Parse commit message and update related fields.
943 desc = ctx.description()
946 desc = ctx.description()
944 info = callconduit(
947 info = callconduit(
945 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
948 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
946 )
949 )
947 for k, v in info[b'fields'].items():
950 for k, v in info[b'fields'].items():
948 if k in [b'title', b'summary', b'testPlan']:
951 if k in [b'title', b'summary', b'testPlan']:
949 transactions.append({b'type': k, b'value': v})
952 transactions.append({b'type': k, b'value': v})
950
953
951 params = {b'transactions': transactions}
954 params = {b'transactions': transactions}
952 if revid is not None:
955 if revid is not None:
953 # Update an existing Differential Revision
956 # Update an existing Differential Revision
954 params[b'objectIdentifier'] = revid
957 params[b'objectIdentifier'] = revid
955
958
956 revision = callconduit(repo.ui, b'differential.revision.edit', params)
959 revision = callconduit(repo.ui, b'differential.revision.edit', params)
957 if not revision:
960 if not revision:
958 raise error.Abort(_(b'cannot create revision for %s') % ctx)
961 raise error.Abort(_(b'cannot create revision for %s') % ctx)
959
962
960 return revision, diff
963 return revision, diff
961
964
962
965
963 def userphids(repo, names):
966 def userphids(repo, names):
964 """convert user names to PHIDs"""
967 """convert user names to PHIDs"""
965 names = [name.lower() for name in names]
968 names = [name.lower() for name in names]
966 query = {b'constraints': {b'usernames': names}}
969 query = {b'constraints': {b'usernames': names}}
967 result = callconduit(repo.ui, b'user.search', query)
970 result = callconduit(repo.ui, b'user.search', query)
968 # username not found is not an error of the API. So check if we have missed
971 # username not found is not an error of the API. So check if we have missed
969 # some names here.
972 # some names here.
970 data = result[b'data']
973 data = result[b'data']
971 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
974 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
972 unresolved = set(names) - resolved
975 unresolved = set(names) - resolved
973 if unresolved:
976 if unresolved:
974 raise error.Abort(
977 raise error.Abort(
975 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
978 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
976 )
979 )
977 return [entry[b'phid'] for entry in data]
980 return [entry[b'phid'] for entry in data]
978
981
979
982
980 @vcrcommand(
983 @vcrcommand(
981 b'phabsend',
984 b'phabsend',
982 [
985 [
983 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
986 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
984 (b'', b'amend', True, _(b'update commit messages')),
987 (b'', b'amend', True, _(b'update commit messages')),
985 (b'', b'reviewer', [], _(b'specify reviewers')),
988 (b'', b'reviewer', [], _(b'specify reviewers')),
986 (b'', b'blocker', [], _(b'specify blocking reviewers')),
989 (b'', b'blocker', [], _(b'specify blocking reviewers')),
987 (
990 (
988 b'm',
991 b'm',
989 b'comment',
992 b'comment',
990 b'',
993 b'',
991 _(b'add a comment to Revisions with new/updated Diffs'),
994 _(b'add a comment to Revisions with new/updated Diffs'),
992 ),
995 ),
993 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
996 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
994 ],
997 ],
995 _(b'REV [OPTIONS]'),
998 _(b'REV [OPTIONS]'),
996 helpcategory=command.CATEGORY_IMPORT_EXPORT,
999 helpcategory=command.CATEGORY_IMPORT_EXPORT,
997 )
1000 )
998 def phabsend(ui, repo, *revs, **opts):
1001 def phabsend(ui, repo, *revs, **opts):
999 """upload changesets to Phabricator
1002 """upload changesets to Phabricator
1000
1003
1001 If there are multiple revisions specified, they will be send as a stack
1004 If there are multiple revisions specified, they will be send as a stack
1002 with a linear dependencies relationship using the order specified by the
1005 with a linear dependencies relationship using the order specified by the
1003 revset.
1006 revset.
1004
1007
1005 For the first time uploading changesets, local tags will be created to
1008 For the first time uploading changesets, local tags will be created to
1006 maintain the association. After the first time, phabsend will check
1009 maintain the association. After the first time, phabsend will check
1007 obsstore and tags information so it can figure out whether to update an
1010 obsstore and tags information so it can figure out whether to update an
1008 existing Differential Revision, or create a new one.
1011 existing Differential Revision, or create a new one.
1009
1012
1010 If --amend is set, update commit messages so they have the
1013 If --amend is set, update commit messages so they have the
1011 ``Differential Revision`` URL, remove related tags. This is similar to what
1014 ``Differential Revision`` URL, remove related tags. This is similar to what
1012 arcanist will do, and is more desired in author-push workflows. Otherwise,
1015 arcanist will do, and is more desired in author-push workflows. Otherwise,
1013 use local tags to record the ``Differential Revision`` association.
1016 use local tags to record the ``Differential Revision`` association.
1014
1017
1015 The --confirm option lets you confirm changesets before sending them. You
1018 The --confirm option lets you confirm changesets before sending them. You
1016 can also add following to your configuration file to make it default
1019 can also add following to your configuration file to make it default
1017 behaviour::
1020 behaviour::
1018
1021
1019 [phabsend]
1022 [phabsend]
1020 confirm = true
1023 confirm = true
1021
1024
1022 phabsend will check obsstore and the above association to decide whether to
1025 phabsend will check obsstore and the above association to decide whether to
1023 update an existing Differential Revision, or create a new one.
1026 update an existing Differential Revision, or create a new one.
1024 """
1027 """
1025 opts = pycompat.byteskwargs(opts)
1028 opts = pycompat.byteskwargs(opts)
1026 revs = list(revs) + opts.get(b'rev', [])
1029 revs = list(revs) + opts.get(b'rev', [])
1027 revs = scmutil.revrange(repo, revs)
1030 revs = scmutil.revrange(repo, revs)
1028
1031
1029 if not revs:
1032 if not revs:
1030 raise error.Abort(_(b'phabsend requires at least one changeset'))
1033 raise error.Abort(_(b'phabsend requires at least one changeset'))
1031 if opts.get(b'amend'):
1034 if opts.get(b'amend'):
1032 cmdutil.checkunfinished(repo)
1035 cmdutil.checkunfinished(repo)
1033
1036
1034 # {newnode: (oldnode, olddiff, olddrev}
1037 # {newnode: (oldnode, olddiff, olddrev}
1035 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1038 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1036
1039
1037 confirm = ui.configbool(b'phabsend', b'confirm')
1040 confirm = ui.configbool(b'phabsend', b'confirm')
1038 confirm |= bool(opts.get(b'confirm'))
1041 confirm |= bool(opts.get(b'confirm'))
1039 if confirm:
1042 if confirm:
1040 confirmed = _confirmbeforesend(repo, revs, oldmap)
1043 confirmed = _confirmbeforesend(repo, revs, oldmap)
1041 if not confirmed:
1044 if not confirmed:
1042 raise error.Abort(_(b'phabsend cancelled'))
1045 raise error.Abort(_(b'phabsend cancelled'))
1043
1046
1044 actions = []
1047 actions = []
1045 reviewers = opts.get(b'reviewer', [])
1048 reviewers = opts.get(b'reviewer', [])
1046 blockers = opts.get(b'blocker', [])
1049 blockers = opts.get(b'blocker', [])
1047 phids = []
1050 phids = []
1048 if reviewers:
1051 if reviewers:
1049 phids.extend(userphids(repo, reviewers))
1052 phids.extend(userphids(repo, reviewers))
1050 if blockers:
1053 if blockers:
1051 phids.extend(
1054 phids.extend(
1052 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1055 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1053 )
1056 )
1054 if phids:
1057 if phids:
1055 actions.append({b'type': b'reviewers.add', b'value': phids})
1058 actions.append({b'type': b'reviewers.add', b'value': phids})
1056
1059
1057 drevids = [] # [int]
1060 drevids = [] # [int]
1058 diffmap = {} # {newnode: diff}
1061 diffmap = {} # {newnode: diff}
1059
1062
1060 # Send patches one by one so we know their Differential Revision PHIDs and
1063 # Send patches one by one so we know their Differential Revision PHIDs and
1061 # can provide dependency relationship
1064 # can provide dependency relationship
1062 lastrevphid = None
1065 lastrevphid = None
1063 for rev in revs:
1066 for rev in revs:
1064 ui.debug(b'sending rev %d\n' % rev)
1067 ui.debug(b'sending rev %d\n' % rev)
1065 ctx = repo[rev]
1068 ctx = repo[rev]
1066
1069
1067 # Get Differential Revision ID
1070 # Get Differential Revision ID
1068 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1071 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1069 if oldnode != ctx.node() or opts.get(b'amend'):
1072 if oldnode != ctx.node() or opts.get(b'amend'):
1070 # Create or update Differential Revision
1073 # Create or update Differential Revision
1071 revision, diff = createdifferentialrevision(
1074 revision, diff = createdifferentialrevision(
1072 ctx,
1075 ctx,
1073 revid,
1076 revid,
1074 lastrevphid,
1077 lastrevphid,
1075 oldnode,
1078 oldnode,
1076 olddiff,
1079 olddiff,
1077 actions,
1080 actions,
1078 opts.get(b'comment'),
1081 opts.get(b'comment'),
1079 )
1082 )
1080 diffmap[ctx.node()] = diff
1083 diffmap[ctx.node()] = diff
1081 newrevid = int(revision[b'object'][b'id'])
1084 newrevid = int(revision[b'object'][b'id'])
1082 newrevphid = revision[b'object'][b'phid']
1085 newrevphid = revision[b'object'][b'phid']
1083 if revid:
1086 if revid:
1084 action = b'updated'
1087 action = b'updated'
1085 else:
1088 else:
1086 action = b'created'
1089 action = b'created'
1087
1090
1088 # Create a local tag to note the association, if commit message
1091 # Create a local tag to note the association, if commit message
1089 # does not have it already
1092 # does not have it already
1090 m = _differentialrevisiondescre.search(ctx.description())
1093 m = _differentialrevisiondescre.search(ctx.description())
1091 if not m or int(m.group('id')) != newrevid:
1094 if not m or int(m.group('id')) != newrevid:
1092 tagname = b'D%d' % newrevid
1095 tagname = b'D%d' % newrevid
1093 tags.tag(
1096 tags.tag(
1094 repo,
1097 repo,
1095 tagname,
1098 tagname,
1096 ctx.node(),
1099 ctx.node(),
1097 message=None,
1100 message=None,
1098 user=None,
1101 user=None,
1099 date=None,
1102 date=None,
1100 local=True,
1103 local=True,
1101 )
1104 )
1102 else:
1105 else:
1103 # Nothing changed. But still set "newrevphid" so the next revision
1106 # Nothing changed. But still set "newrevphid" so the next revision
1104 # could depend on this one and "newrevid" for the summary line.
1107 # could depend on this one and "newrevid" for the summary line.
1105 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1108 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1106 newrevid = revid
1109 newrevid = revid
1107 action = b'skipped'
1110 action = b'skipped'
1108
1111
1109 actiondesc = ui.label(
1112 actiondesc = ui.label(
1110 {
1113 {
1111 b'created': _(b'created'),
1114 b'created': _(b'created'),
1112 b'skipped': _(b'skipped'),
1115 b'skipped': _(b'skipped'),
1113 b'updated': _(b'updated'),
1116 b'updated': _(b'updated'),
1114 }[action],
1117 }[action],
1115 b'phabricator.action.%s' % action,
1118 b'phabricator.action.%s' % action,
1116 )
1119 )
1117 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1120 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1118 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1121 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1119 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1122 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1120 ui.write(
1123 ui.write(
1121 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1124 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1122 )
1125 )
1123 drevids.append(newrevid)
1126 drevids.append(newrevid)
1124 lastrevphid = newrevphid
1127 lastrevphid = newrevphid
1125
1128
1126 # Update commit messages and remove tags
1129 # Update commit messages and remove tags
1127 if opts.get(b'amend'):
1130 if opts.get(b'amend'):
1128 unfi = repo.unfiltered()
1131 unfi = repo.unfiltered()
1129 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1132 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1130 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1133 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1131 wnode = unfi[b'.'].node()
1134 wnode = unfi[b'.'].node()
1132 mapping = {} # {oldnode: [newnode]}
1135 mapping = {} # {oldnode: [newnode]}
1133 for i, rev in enumerate(revs):
1136 for i, rev in enumerate(revs):
1134 old = unfi[rev]
1137 old = unfi[rev]
1135 drevid = drevids[i]
1138 drevid = drevids[i]
1136 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1139 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1137 newdesc = getdescfromdrev(drev)
1140 newdesc = getdescfromdrev(drev)
1138 # Make sure commit message contain "Differential Revision"
1141 # Make sure commit message contain "Differential Revision"
1139 if old.description() != newdesc:
1142 if old.description() != newdesc:
1140 if old.phase() == phases.public:
1143 if old.phase() == phases.public:
1141 ui.warn(
1144 ui.warn(
1142 _(b"warning: not updating public commit %s\n")
1145 _(b"warning: not updating public commit %s\n")
1143 % scmutil.formatchangeid(old)
1146 % scmutil.formatchangeid(old)
1144 )
1147 )
1145 continue
1148 continue
1146 parents = [
1149 parents = [
1147 mapping.get(old.p1().node(), (old.p1(),))[0],
1150 mapping.get(old.p1().node(), (old.p1(),))[0],
1148 mapping.get(old.p2().node(), (old.p2(),))[0],
1151 mapping.get(old.p2().node(), (old.p2(),))[0],
1149 ]
1152 ]
1150 new = context.metadataonlyctx(
1153 new = context.metadataonlyctx(
1151 repo,
1154 repo,
1152 old,
1155 old,
1153 parents=parents,
1156 parents=parents,
1154 text=newdesc,
1157 text=newdesc,
1155 user=old.user(),
1158 user=old.user(),
1156 date=old.date(),
1159 date=old.date(),
1157 extra=old.extra(),
1160 extra=old.extra(),
1158 )
1161 )
1159
1162
1160 newnode = new.commit()
1163 newnode = new.commit()
1161
1164
1162 mapping[old.node()] = [newnode]
1165 mapping[old.node()] = [newnode]
1163 # Update diff property
1166 # Update diff property
1164 # If it fails just warn and keep going, otherwise the DREV
1167 # If it fails just warn and keep going, otherwise the DREV
1165 # associations will be lost
1168 # associations will be lost
1166 try:
1169 try:
1167 writediffproperties(unfi[newnode], diffmap[old.node()])
1170 writediffproperties(unfi[newnode], diffmap[old.node()])
1168 except util.urlerr.urlerror:
1171 except util.urlerr.urlerror:
1169 ui.warnnoi18n(
1172 ui.warnnoi18n(
1170 b'Failed to update metadata for D%d\n' % drevid
1173 b'Failed to update metadata for D%d\n' % drevid
1171 )
1174 )
1172 # Remove local tags since it's no longer necessary
1175 # Remove local tags since it's no longer necessary
1173 tagname = b'D%d' % drevid
1176 tagname = b'D%d' % drevid
1174 if tagname in repo.tags():
1177 if tagname in repo.tags():
1175 tags.tag(
1178 tags.tag(
1176 repo,
1179 repo,
1177 tagname,
1180 tagname,
1178 nullid,
1181 nullid,
1179 message=None,
1182 message=None,
1180 user=None,
1183 user=None,
1181 date=None,
1184 date=None,
1182 local=True,
1185 local=True,
1183 )
1186 )
1184 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1187 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1185 if wnode in mapping:
1188 if wnode in mapping:
1186 unfi.setparents(mapping[wnode][0])
1189 unfi.setparents(mapping[wnode][0])
1187
1190
1188
1191
1189 # Map from "hg:meta" keys to header understood by "hg import". The order is
1192 # Map from "hg:meta" keys to header understood by "hg import". The order is
1190 # consistent with "hg export" output.
1193 # consistent with "hg export" output.
1191 _metanamemap = util.sortdict(
1194 _metanamemap = util.sortdict(
1192 [
1195 [
1193 (b'user', b'User'),
1196 (b'user', b'User'),
1194 (b'date', b'Date'),
1197 (b'date', b'Date'),
1195 (b'branch', b'Branch'),
1198 (b'branch', b'Branch'),
1196 (b'node', b'Node ID'),
1199 (b'node', b'Node ID'),
1197 (b'parent', b'Parent '),
1200 (b'parent', b'Parent '),
1198 ]
1201 ]
1199 )
1202 )
1200
1203
1201
1204
1202 def _confirmbeforesend(repo, revs, oldmap):
1205 def _confirmbeforesend(repo, revs, oldmap):
1203 url, token = readurltoken(repo.ui)
1206 url, token = readurltoken(repo.ui)
1204 ui = repo.ui
1207 ui = repo.ui
1205 for rev in revs:
1208 for rev in revs:
1206 ctx = repo[rev]
1209 ctx = repo[rev]
1207 desc = ctx.description().splitlines()[0]
1210 desc = ctx.description().splitlines()[0]
1208 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1211 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1209 if drevid:
1212 if drevid:
1210 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1213 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1211 else:
1214 else:
1212 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1215 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1213
1216
1214 ui.write(
1217 ui.write(
1215 _(b'%s - %s: %s\n')
1218 _(b'%s - %s: %s\n')
1216 % (
1219 % (
1217 drevdesc,
1220 drevdesc,
1218 ui.label(bytes(ctx), b'phabricator.node'),
1221 ui.label(bytes(ctx), b'phabricator.node'),
1219 ui.label(desc, b'phabricator.desc'),
1222 ui.label(desc, b'phabricator.desc'),
1220 )
1223 )
1221 )
1224 )
1222
1225
1223 if ui.promptchoice(
1226 if ui.promptchoice(
1224 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1227 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1225 ):
1228 ):
1226 return False
1229 return False
1227
1230
1228 return True
1231 return True
1229
1232
1230
1233
1231 _knownstatusnames = {
1234 _knownstatusnames = {
1232 b'accepted',
1235 b'accepted',
1233 b'needsreview',
1236 b'needsreview',
1234 b'needsrevision',
1237 b'needsrevision',
1235 b'closed',
1238 b'closed',
1236 b'abandoned',
1239 b'abandoned',
1237 }
1240 }
1238
1241
1239
1242
1240 def _getstatusname(drev):
1243 def _getstatusname(drev):
1241 """get normalized status name from a Differential Revision"""
1244 """get normalized status name from a Differential Revision"""
1242 return drev[b'statusName'].replace(b' ', b'').lower()
1245 return drev[b'statusName'].replace(b' ', b'').lower()
1243
1246
1244
1247
1245 # Small language to specify differential revisions. Support symbols: (), :X,
1248 # Small language to specify differential revisions. Support symbols: (), :X,
1246 # +, and -.
1249 # +, and -.
1247
1250
1248 _elements = {
1251 _elements = {
1249 # token-type: binding-strength, primary, prefix, infix, suffix
1252 # token-type: binding-strength, primary, prefix, infix, suffix
1250 b'(': (12, None, (b'group', 1, b')'), None, None),
1253 b'(': (12, None, (b'group', 1, b')'), None, None),
1251 b':': (8, None, (b'ancestors', 8), None, None),
1254 b':': (8, None, (b'ancestors', 8), None, None),
1252 b'&': (5, None, None, (b'and_', 5), None),
1255 b'&': (5, None, None, (b'and_', 5), None),
1253 b'+': (4, None, None, (b'add', 4), None),
1256 b'+': (4, None, None, (b'add', 4), None),
1254 b'-': (4, None, None, (b'sub', 4), None),
1257 b'-': (4, None, None, (b'sub', 4), None),
1255 b')': (0, None, None, None, None),
1258 b')': (0, None, None, None, None),
1256 b'symbol': (0, b'symbol', None, None, None),
1259 b'symbol': (0, b'symbol', None, None, None),
1257 b'end': (0, None, None, None, None),
1260 b'end': (0, None, None, None, None),
1258 }
1261 }
1259
1262
1260
1263
1261 def _tokenize(text):
1264 def _tokenize(text):
1262 view = memoryview(text) # zero-copy slice
1265 view = memoryview(text) # zero-copy slice
1263 special = b'():+-& '
1266 special = b'():+-& '
1264 pos = 0
1267 pos = 0
1265 length = len(text)
1268 length = len(text)
1266 while pos < length:
1269 while pos < length:
1267 symbol = b''.join(
1270 symbol = b''.join(
1268 itertools.takewhile(
1271 itertools.takewhile(
1269 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1272 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1270 )
1273 )
1271 )
1274 )
1272 if symbol:
1275 if symbol:
1273 yield (b'symbol', symbol, pos)
1276 yield (b'symbol', symbol, pos)
1274 pos += len(symbol)
1277 pos += len(symbol)
1275 else: # special char, ignore space
1278 else: # special char, ignore space
1276 if text[pos : pos + 1] != b' ':
1279 if text[pos : pos + 1] != b' ':
1277 yield (text[pos : pos + 1], None, pos)
1280 yield (text[pos : pos + 1], None, pos)
1278 pos += 1
1281 pos += 1
1279 yield (b'end', None, pos)
1282 yield (b'end', None, pos)
1280
1283
1281
1284
1282 def _parse(text):
1285 def _parse(text):
1283 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1286 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1284 if pos != len(text):
1287 if pos != len(text):
1285 raise error.ParseError(b'invalid token', pos)
1288 raise error.ParseError(b'invalid token', pos)
1286 return tree
1289 return tree
1287
1290
1288
1291
1289 def _parsedrev(symbol):
1292 def _parsedrev(symbol):
1290 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1293 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1291 if symbol.startswith(b'D') and symbol[1:].isdigit():
1294 if symbol.startswith(b'D') and symbol[1:].isdigit():
1292 return int(symbol[1:])
1295 return int(symbol[1:])
1293 if symbol.isdigit():
1296 if symbol.isdigit():
1294 return int(symbol)
1297 return int(symbol)
1295
1298
1296
1299
1297 def _prefetchdrevs(tree):
1300 def _prefetchdrevs(tree):
1298 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1301 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1299 drevs = set()
1302 drevs = set()
1300 ancestordrevs = set()
1303 ancestordrevs = set()
1301 op = tree[0]
1304 op = tree[0]
1302 if op == b'symbol':
1305 if op == b'symbol':
1303 r = _parsedrev(tree[1])
1306 r = _parsedrev(tree[1])
1304 if r:
1307 if r:
1305 drevs.add(r)
1308 drevs.add(r)
1306 elif op == b'ancestors':
1309 elif op == b'ancestors':
1307 r, a = _prefetchdrevs(tree[1])
1310 r, a = _prefetchdrevs(tree[1])
1308 drevs.update(r)
1311 drevs.update(r)
1309 ancestordrevs.update(r)
1312 ancestordrevs.update(r)
1310 ancestordrevs.update(a)
1313 ancestordrevs.update(a)
1311 else:
1314 else:
1312 for t in tree[1:]:
1315 for t in tree[1:]:
1313 r, a = _prefetchdrevs(t)
1316 r, a = _prefetchdrevs(t)
1314 drevs.update(r)
1317 drevs.update(r)
1315 ancestordrevs.update(a)
1318 ancestordrevs.update(a)
1316 return drevs, ancestordrevs
1319 return drevs, ancestordrevs
1317
1320
1318
1321
1319 def querydrev(repo, spec):
1322 def querydrev(repo, spec):
1320 """return a list of "Differential Revision" dicts
1323 """return a list of "Differential Revision" dicts
1321
1324
1322 spec is a string using a simple query language, see docstring in phabread
1325 spec is a string using a simple query language, see docstring in phabread
1323 for details.
1326 for details.
1324
1327
1325 A "Differential Revision dict" looks like:
1328 A "Differential Revision dict" looks like:
1326
1329
1327 {
1330 {
1328 "id": "2",
1331 "id": "2",
1329 "phid": "PHID-DREV-672qvysjcczopag46qty",
1332 "phid": "PHID-DREV-672qvysjcczopag46qty",
1330 "title": "example",
1333 "title": "example",
1331 "uri": "https://phab.example.com/D2",
1334 "uri": "https://phab.example.com/D2",
1332 "dateCreated": "1499181406",
1335 "dateCreated": "1499181406",
1333 "dateModified": "1499182103",
1336 "dateModified": "1499182103",
1334 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1337 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1335 "status": "0",
1338 "status": "0",
1336 "statusName": "Needs Review",
1339 "statusName": "Needs Review",
1337 "properties": [],
1340 "properties": [],
1338 "branch": null,
1341 "branch": null,
1339 "summary": "",
1342 "summary": "",
1340 "testPlan": "",
1343 "testPlan": "",
1341 "lineCount": "2",
1344 "lineCount": "2",
1342 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1345 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1343 "diffs": [
1346 "diffs": [
1344 "3",
1347 "3",
1345 "4",
1348 "4",
1346 ],
1349 ],
1347 "commits": [],
1350 "commits": [],
1348 "reviewers": [],
1351 "reviewers": [],
1349 "ccs": [],
1352 "ccs": [],
1350 "hashes": [],
1353 "hashes": [],
1351 "auxiliary": {
1354 "auxiliary": {
1352 "phabricator:projects": [],
1355 "phabricator:projects": [],
1353 "phabricator:depends-on": [
1356 "phabricator:depends-on": [
1354 "PHID-DREV-gbapp366kutjebt7agcd"
1357 "PHID-DREV-gbapp366kutjebt7agcd"
1355 ]
1358 ]
1356 },
1359 },
1357 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1360 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1358 "sourcePath": null
1361 "sourcePath": null
1359 }
1362 }
1360 """
1363 """
1361
1364
1362 def fetch(params):
1365 def fetch(params):
1363 """params -> single drev or None"""
1366 """params -> single drev or None"""
1364 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1367 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1365 if key in prefetched:
1368 if key in prefetched:
1366 return prefetched[key]
1369 return prefetched[key]
1367 drevs = callconduit(repo.ui, b'differential.query', params)
1370 drevs = callconduit(repo.ui, b'differential.query', params)
1368 # Fill prefetched with the result
1371 # Fill prefetched with the result
1369 for drev in drevs:
1372 for drev in drevs:
1370 prefetched[drev[b'phid']] = drev
1373 prefetched[drev[b'phid']] = drev
1371 prefetched[int(drev[b'id'])] = drev
1374 prefetched[int(drev[b'id'])] = drev
1372 if key not in prefetched:
1375 if key not in prefetched:
1373 raise error.Abort(
1376 raise error.Abort(
1374 _(b'cannot get Differential Revision %r') % params
1377 _(b'cannot get Differential Revision %r') % params
1375 )
1378 )
1376 return prefetched[key]
1379 return prefetched[key]
1377
1380
1378 def getstack(topdrevids):
1381 def getstack(topdrevids):
1379 """given a top, get a stack from the bottom, [id] -> [id]"""
1382 """given a top, get a stack from the bottom, [id] -> [id]"""
1380 visited = set()
1383 visited = set()
1381 result = []
1384 result = []
1382 queue = [{b'ids': [i]} for i in topdrevids]
1385 queue = [{b'ids': [i]} for i in topdrevids]
1383 while queue:
1386 while queue:
1384 params = queue.pop()
1387 params = queue.pop()
1385 drev = fetch(params)
1388 drev = fetch(params)
1386 if drev[b'id'] in visited:
1389 if drev[b'id'] in visited:
1387 continue
1390 continue
1388 visited.add(drev[b'id'])
1391 visited.add(drev[b'id'])
1389 result.append(int(drev[b'id']))
1392 result.append(int(drev[b'id']))
1390 auxiliary = drev.get(b'auxiliary', {})
1393 auxiliary = drev.get(b'auxiliary', {})
1391 depends = auxiliary.get(b'phabricator:depends-on', [])
1394 depends = auxiliary.get(b'phabricator:depends-on', [])
1392 for phid in depends:
1395 for phid in depends:
1393 queue.append({b'phids': [phid]})
1396 queue.append({b'phids': [phid]})
1394 result.reverse()
1397 result.reverse()
1395 return smartset.baseset(result)
1398 return smartset.baseset(result)
1396
1399
1397 # Initialize prefetch cache
1400 # Initialize prefetch cache
1398 prefetched = {} # {id or phid: drev}
1401 prefetched = {} # {id or phid: drev}
1399
1402
1400 tree = _parse(spec)
1403 tree = _parse(spec)
1401 drevs, ancestordrevs = _prefetchdrevs(tree)
1404 drevs, ancestordrevs = _prefetchdrevs(tree)
1402
1405
1403 # developer config: phabricator.batchsize
1406 # developer config: phabricator.batchsize
1404 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1407 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1405
1408
1406 # Prefetch Differential Revisions in batch
1409 # Prefetch Differential Revisions in batch
1407 tofetch = set(drevs)
1410 tofetch = set(drevs)
1408 for r in ancestordrevs:
1411 for r in ancestordrevs:
1409 tofetch.update(range(max(1, r - batchsize), r + 1))
1412 tofetch.update(range(max(1, r - batchsize), r + 1))
1410 if drevs:
1413 if drevs:
1411 fetch({b'ids': list(tofetch)})
1414 fetch({b'ids': list(tofetch)})
1412 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1415 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1413
1416
1414 # Walk through the tree, return smartsets
1417 # Walk through the tree, return smartsets
1415 def walk(tree):
1418 def walk(tree):
1416 op = tree[0]
1419 op = tree[0]
1417 if op == b'symbol':
1420 if op == b'symbol':
1418 drev = _parsedrev(tree[1])
1421 drev = _parsedrev(tree[1])
1419 if drev:
1422 if drev:
1420 return smartset.baseset([drev])
1423 return smartset.baseset([drev])
1421 elif tree[1] in _knownstatusnames:
1424 elif tree[1] in _knownstatusnames:
1422 drevs = [
1425 drevs = [
1423 r
1426 r
1424 for r in validids
1427 for r in validids
1425 if _getstatusname(prefetched[r]) == tree[1]
1428 if _getstatusname(prefetched[r]) == tree[1]
1426 ]
1429 ]
1427 return smartset.baseset(drevs)
1430 return smartset.baseset(drevs)
1428 else:
1431 else:
1429 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1432 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1430 elif op in {b'and_', b'add', b'sub'}:
1433 elif op in {b'and_', b'add', b'sub'}:
1431 assert len(tree) == 3
1434 assert len(tree) == 3
1432 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1435 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1433 elif op == b'group':
1436 elif op == b'group':
1434 return walk(tree[1])
1437 return walk(tree[1])
1435 elif op == b'ancestors':
1438 elif op == b'ancestors':
1436 return getstack(walk(tree[1]))
1439 return getstack(walk(tree[1]))
1437 else:
1440 else:
1438 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1441 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1439
1442
1440 return [prefetched[r] for r in walk(tree)]
1443 return [prefetched[r] for r in walk(tree)]
1441
1444
1442
1445
1443 def getdescfromdrev(drev):
1446 def getdescfromdrev(drev):
1444 """get description (commit message) from "Differential Revision"
1447 """get description (commit message) from "Differential Revision"
1445
1448
1446 This is similar to differential.getcommitmessage API. But we only care
1449 This is similar to differential.getcommitmessage API. But we only care
1447 about limited fields: title, summary, test plan, and URL.
1450 about limited fields: title, summary, test plan, and URL.
1448 """
1451 """
1449 title = drev[b'title']
1452 title = drev[b'title']
1450 summary = drev[b'summary'].rstrip()
1453 summary = drev[b'summary'].rstrip()
1451 testplan = drev[b'testPlan'].rstrip()
1454 testplan = drev[b'testPlan'].rstrip()
1452 if testplan:
1455 if testplan:
1453 testplan = b'Test Plan:\n%s' % testplan
1456 testplan = b'Test Plan:\n%s' % testplan
1454 uri = b'Differential Revision: %s' % drev[b'uri']
1457 uri = b'Differential Revision: %s' % drev[b'uri']
1455 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1458 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1456
1459
1457
1460
1458 def getdiffmeta(diff):
1461 def getdiffmeta(diff):
1459 """get commit metadata (date, node, user, p1) from a diff object
1462 """get commit metadata (date, node, user, p1) from a diff object
1460
1463
1461 The metadata could be "hg:meta", sent by phabsend, like:
1464 The metadata could be "hg:meta", sent by phabsend, like:
1462
1465
1463 "properties": {
1466 "properties": {
1464 "hg:meta": {
1467 "hg:meta": {
1465 "date": "1499571514 25200",
1468 "date": "1499571514 25200",
1466 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1469 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1467 "user": "Foo Bar <foo@example.com>",
1470 "user": "Foo Bar <foo@example.com>",
1468 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1471 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1469 }
1472 }
1470 }
1473 }
1471
1474
1472 Or converted from "local:commits", sent by "arc", like:
1475 Or converted from "local:commits", sent by "arc", like:
1473
1476
1474 "properties": {
1477 "properties": {
1475 "local:commits": {
1478 "local:commits": {
1476 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1479 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1477 "author": "Foo Bar",
1480 "author": "Foo Bar",
1478 "time": 1499546314,
1481 "time": 1499546314,
1479 "branch": "default",
1482 "branch": "default",
1480 "tag": "",
1483 "tag": "",
1481 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1484 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1485 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "local": "1000",
1486 "local": "1000",
1484 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1487 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1485 "summary": "...",
1488 "summary": "...",
1486 "message": "...",
1489 "message": "...",
1487 "authorEmail": "foo@example.com"
1490 "authorEmail": "foo@example.com"
1488 }
1491 }
1489 }
1492 }
1490 }
1493 }
1491
1494
1492 Note: metadata extracted from "local:commits" will lose time zone
1495 Note: metadata extracted from "local:commits" will lose time zone
1493 information.
1496 information.
1494 """
1497 """
1495 props = diff.get(b'properties') or {}
1498 props = diff.get(b'properties') or {}
1496 meta = props.get(b'hg:meta')
1499 meta = props.get(b'hg:meta')
1497 if not meta:
1500 if not meta:
1498 if props.get(b'local:commits'):
1501 if props.get(b'local:commits'):
1499 commit = sorted(props[b'local:commits'].values())[0]
1502 commit = sorted(props[b'local:commits'].values())[0]
1500 meta = {}
1503 meta = {}
1501 if b'author' in commit and b'authorEmail' in commit:
1504 if b'author' in commit and b'authorEmail' in commit:
1502 meta[b'user'] = b'%s <%s>' % (
1505 meta[b'user'] = b'%s <%s>' % (
1503 commit[b'author'],
1506 commit[b'author'],
1504 commit[b'authorEmail'],
1507 commit[b'authorEmail'],
1505 )
1508 )
1506 if b'time' in commit:
1509 if b'time' in commit:
1507 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1510 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1508 if b'branch' in commit:
1511 if b'branch' in commit:
1509 meta[b'branch'] = commit[b'branch']
1512 meta[b'branch'] = commit[b'branch']
1510 node = commit.get(b'commit', commit.get(b'rev'))
1513 node = commit.get(b'commit', commit.get(b'rev'))
1511 if node:
1514 if node:
1512 meta[b'node'] = node
1515 meta[b'node'] = node
1513 if len(commit.get(b'parents', ())) >= 1:
1516 if len(commit.get(b'parents', ())) >= 1:
1514 meta[b'parent'] = commit[b'parents'][0]
1517 meta[b'parent'] = commit[b'parents'][0]
1515 else:
1518 else:
1516 meta = {}
1519 meta = {}
1517 if b'date' not in meta and b'dateCreated' in diff:
1520 if b'date' not in meta and b'dateCreated' in diff:
1518 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1521 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1519 if b'branch' not in meta and diff.get(b'branch'):
1522 if b'branch' not in meta and diff.get(b'branch'):
1520 meta[b'branch'] = diff[b'branch']
1523 meta[b'branch'] = diff[b'branch']
1521 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1524 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1522 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1525 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1523 return meta
1526 return meta
1524
1527
1525
1528
1526 def readpatch(repo, drevs, write):
1529 def readpatch(repo, drevs, write):
1527 """generate plain-text patch readable by 'hg import'
1530 """generate plain-text patch readable by 'hg import'
1528
1531
1529 write is usually ui.write. drevs is what "querydrev" returns, results of
1532 write is usually ui.write. drevs is what "querydrev" returns, results of
1530 "differential.query".
1533 "differential.query".
1531 """
1534 """
1532 # Prefetch hg:meta property for all diffs
1535 # Prefetch hg:meta property for all diffs
1533 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1536 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1534 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1537 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1535
1538
1536 # Generate patch for each drev
1539 # Generate patch for each drev
1537 for drev in drevs:
1540 for drev in drevs:
1538 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1541 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1539
1542
1540 diffid = max(int(v) for v in drev[b'diffs'])
1543 diffid = max(int(v) for v in drev[b'diffs'])
1541 body = callconduit(
1544 body = callconduit(
1542 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1545 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1543 )
1546 )
1544 desc = getdescfromdrev(drev)
1547 desc = getdescfromdrev(drev)
1545 header = b'# HG changeset patch\n'
1548 header = b'# HG changeset patch\n'
1546
1549
1547 # Try to preserve metadata from hg:meta property. Write hg patch
1550 # Try to preserve metadata from hg:meta property. Write hg patch
1548 # headers that can be read by the "import" command. See patchheadermap
1551 # headers that can be read by the "import" command. See patchheadermap
1549 # and extract in mercurial/patch.py for supported headers.
1552 # and extract in mercurial/patch.py for supported headers.
1550 meta = getdiffmeta(diffs[b'%d' % diffid])
1553 meta = getdiffmeta(diffs[b'%d' % diffid])
1551 for k in _metanamemap.keys():
1554 for k in _metanamemap.keys():
1552 if k in meta:
1555 if k in meta:
1553 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1556 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1554
1557
1555 content = b'%s%s\n%s' % (header, desc, body)
1558 content = b'%s%s\n%s' % (header, desc, body)
1556 write(content)
1559 write(content)
1557
1560
1558
1561
1559 @vcrcommand(
1562 @vcrcommand(
1560 b'phabread',
1563 b'phabread',
1561 [(b'', b'stack', False, _(b'read dependencies'))],
1564 [(b'', b'stack', False, _(b'read dependencies'))],
1562 _(b'DREVSPEC [OPTIONS]'),
1565 _(b'DREVSPEC [OPTIONS]'),
1563 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1566 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1564 )
1567 )
1565 def phabread(ui, repo, spec, **opts):
1568 def phabread(ui, repo, spec, **opts):
1566 """print patches from Phabricator suitable for importing
1569 """print patches from Phabricator suitable for importing
1567
1570
1568 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1571 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1569 the number ``123``. It could also have common operators like ``+``, ``-``,
1572 the number ``123``. It could also have common operators like ``+``, ``-``,
1570 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1573 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1571 select a stack.
1574 select a stack.
1572
1575
1573 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1576 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1574 could be used to filter patches by status. For performance reason, they
1577 could be used to filter patches by status. For performance reason, they
1575 only represent a subset of non-status selections and cannot be used alone.
1578 only represent a subset of non-status selections and cannot be used alone.
1576
1579
1577 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1580 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1578 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1581 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1579 stack up to D9.
1582 stack up to D9.
1580
1583
1581 If --stack is given, follow dependencies information and read all patches.
1584 If --stack is given, follow dependencies information and read all patches.
1582 It is equivalent to the ``:`` operator.
1585 It is equivalent to the ``:`` operator.
1583 """
1586 """
1584 opts = pycompat.byteskwargs(opts)
1587 opts = pycompat.byteskwargs(opts)
1585 if opts.get(b'stack'):
1588 if opts.get(b'stack'):
1586 spec = b':(%s)' % spec
1589 spec = b':(%s)' % spec
1587 drevs = querydrev(repo, spec)
1590 drevs = querydrev(repo, spec)
1588 readpatch(repo, drevs, ui.write)
1591 readpatch(repo, drevs, ui.write)
1589
1592
1590
1593
1591 @vcrcommand(
1594 @vcrcommand(
1592 b'phabupdate',
1595 b'phabupdate',
1593 [
1596 [
1594 (b'', b'accept', False, _(b'accept revisions')),
1597 (b'', b'accept', False, _(b'accept revisions')),
1595 (b'', b'reject', False, _(b'reject revisions')),
1598 (b'', b'reject', False, _(b'reject revisions')),
1596 (b'', b'abandon', False, _(b'abandon revisions')),
1599 (b'', b'abandon', False, _(b'abandon revisions')),
1597 (b'', b'reclaim', False, _(b'reclaim revisions')),
1600 (b'', b'reclaim', False, _(b'reclaim revisions')),
1598 (b'm', b'comment', b'', _(b'comment on the last revision')),
1601 (b'm', b'comment', b'', _(b'comment on the last revision')),
1599 ],
1602 ],
1600 _(b'DREVSPEC [OPTIONS]'),
1603 _(b'DREVSPEC [OPTIONS]'),
1601 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1602 )
1605 )
1603 def phabupdate(ui, repo, spec, **opts):
1606 def phabupdate(ui, repo, spec, **opts):
1604 """update Differential Revision in batch
1607 """update Differential Revision in batch
1605
1608
1606 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1609 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1607 """
1610 """
1608 opts = pycompat.byteskwargs(opts)
1611 opts = pycompat.byteskwargs(opts)
1609 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1612 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1610 if len(flags) > 1:
1613 if len(flags) > 1:
1611 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1614 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1612
1615
1613 actions = []
1616 actions = []
1614 for f in flags:
1617 for f in flags:
1615 actions.append({b'type': f, b'value': True})
1618 actions.append({b'type': f, b'value': True})
1616
1619
1617 drevs = querydrev(repo, spec)
1620 drevs = querydrev(repo, spec)
1618 for i, drev in enumerate(drevs):
1621 for i, drev in enumerate(drevs):
1619 if i + 1 == len(drevs) and opts.get(b'comment'):
1622 if i + 1 == len(drevs) and opts.get(b'comment'):
1620 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1623 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1621 if actions:
1624 if actions:
1622 params = {
1625 params = {
1623 b'objectIdentifier': drev[b'phid'],
1626 b'objectIdentifier': drev[b'phid'],
1624 b'transactions': actions,
1627 b'transactions': actions,
1625 }
1628 }
1626 callconduit(ui, b'differential.revision.edit', params)
1629 callconduit(ui, b'differential.revision.edit', params)
1627
1630
1628
1631
1629 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1632 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1630 def template_review(context, mapping):
1633 def template_review(context, mapping):
1631 """:phabreview: Object describing the review for this changeset.
1634 """:phabreview: Object describing the review for this changeset.
1632 Has attributes `url` and `id`.
1635 Has attributes `url` and `id`.
1633 """
1636 """
1634 ctx = context.resource(mapping, b'ctx')
1637 ctx = context.resource(mapping, b'ctx')
1635 m = _differentialrevisiondescre.search(ctx.description())
1638 m = _differentialrevisiondescre.search(ctx.description())
1636 if m:
1639 if m:
1637 return templateutil.hybriddict(
1640 return templateutil.hybriddict(
1638 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1641 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1639 )
1642 )
1640 else:
1643 else:
1641 tags = ctx.repo().nodetags(ctx.node())
1644 tags = ctx.repo().nodetags(ctx.node())
1642 for t in tags:
1645 for t in tags:
1643 if _differentialrevisiontagre.match(t):
1646 if _differentialrevisiontagre.match(t):
1644 url = ctx.repo().ui.config(b'phabricator', b'url')
1647 url = ctx.repo().ui.config(b'phabricator', b'url')
1645 if not url.endswith(b'/'):
1648 if not url.endswith(b'/'):
1646 url += b'/'
1649 url += b'/'
1647 url += t
1650 url += t
1648
1651
1649 return templateutil.hybriddict({b'url': url, b'id': t,})
1652 return templateutil.hybriddict({b'url': url, b'id': t,})
1650 return None
1653 return None
General Comments 0
You need to be logged in to leave comments. Login now