##// END OF EJS Templates
phabricator: update hgmatcher to cope with the new data format...
Ian Moody -
r43558:a4da1c3b default
parent child Browse files
Show More
@@ -1,1639 +1,1651 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = r1.body.split(b'&')
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = r2.body.split(b'&')
148 r2params = util.urlreq.parseqs(r2.body)
149 return set(r1params) == set(r2params)
149 for key in r1params:
150 if key not in r2params:
151 return False
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = json.loads(value)
156 r2json = json.loads(r2params[key][0])
157 if r1json != r2json:
158 return False
159 elif r2params[key][0] != value:
160 return False
161 return True
150
162
151 def sanitiserequest(request):
163 def sanitiserequest(request):
152 request.body = re.sub(
164 request.body = re.sub(
153 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
154 )
166 )
155 return request
167 return request
156
168
157 def sanitiseresponse(response):
169 def sanitiseresponse(response):
158 if r'set-cookie' in response[r'headers']:
170 if r'set-cookie' in response[r'headers']:
159 del response[r'headers'][r'set-cookie']
171 del response[r'headers'][r'set-cookie']
160 return response
172 return response
161
173
162 def decorate(fn):
174 def decorate(fn):
163 def inner(*args, **kwargs):
175 def inner(*args, **kwargs):
164 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
165 if cassette:
177 if cassette:
166 import hgdemandimport
178 import hgdemandimport
167
179
168 with hgdemandimport.deactivated():
180 with hgdemandimport.deactivated():
169 import vcr as vcrmod
181 import vcr as vcrmod
170 import vcr.stubs as stubs
182 import vcr.stubs as stubs
171
183
172 vcr = vcrmod.VCR(
184 vcr = vcrmod.VCR(
173 serializer=r'json',
185 serializer=r'json',
174 before_record_request=sanitiserequest,
186 before_record_request=sanitiserequest,
175 before_record_response=sanitiseresponse,
187 before_record_response=sanitiseresponse,
176 custom_patches=[
188 custom_patches=[
177 (
189 (
178 urlmod,
190 urlmod,
179 r'httpconnection',
191 r'httpconnection',
180 stubs.VCRHTTPConnection,
192 stubs.VCRHTTPConnection,
181 ),
193 ),
182 (
194 (
183 urlmod,
195 urlmod,
184 r'httpsconnection',
196 r'httpsconnection',
185 stubs.VCRHTTPSConnection,
197 stubs.VCRHTTPSConnection,
186 ),
198 ),
187 ],
199 ],
188 )
200 )
189 vcr.register_matcher(r'hgmatcher', hgmatcher)
201 vcr.register_matcher(r'hgmatcher', hgmatcher)
190 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
191 return fn(*args, **kwargs)
203 return fn(*args, **kwargs)
192 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
193
205
194 inner.__name__ = fn.__name__
206 inner.__name__ = fn.__name__
195 inner.__doc__ = fn.__doc__
207 inner.__doc__ = fn.__doc__
196 return command(
208 return command(
197 name,
209 name,
198 fullflags,
210 fullflags,
199 spec,
211 spec,
200 helpcategory=helpcategory,
212 helpcategory=helpcategory,
201 optionalrepo=optionalrepo,
213 optionalrepo=optionalrepo,
202 )(inner)
214 )(inner)
203
215
204 return decorate
216 return decorate
205
217
206
218
207 def urlencodenested(params):
219 def urlencodenested(params):
208 """like urlencode, but works with nested parameters.
220 """like urlencode, but works with nested parameters.
209
221
210 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
211 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
212 urlencode. Note: the encoding is consistent with PHP's http_build_query.
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
213 """
225 """
214 flatparams = util.sortdict()
226 flatparams = util.sortdict()
215
227
216 def process(prefix, obj):
228 def process(prefix, obj):
217 if isinstance(obj, bool):
229 if isinstance(obj, bool):
218 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
219 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
220 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
221 if items is None:
233 if items is None:
222 flatparams[prefix] = obj
234 flatparams[prefix] = obj
223 else:
235 else:
224 for k, v in items(obj):
236 for k, v in items(obj):
225 if prefix:
237 if prefix:
226 process(b'%s[%s]' % (prefix, k), v)
238 process(b'%s[%s]' % (prefix, k), v)
227 else:
239 else:
228 process(k, v)
240 process(k, v)
229
241
230 process(b'', params)
242 process(b'', params)
231 return util.urlreq.urlencode(flatparams)
243 return util.urlreq.urlencode(flatparams)
232
244
233
245
234 def readurltoken(ui):
246 def readurltoken(ui):
235 """return conduit url, token and make sure they exist
247 """return conduit url, token and make sure they exist
236
248
237 Currently read from [auth] config section. In the future, it might
249 Currently read from [auth] config section. In the future, it might
238 make sense to read from .arcconfig and .arcrc as well.
250 make sense to read from .arcconfig and .arcrc as well.
239 """
251 """
240 url = ui.config(b'phabricator', b'url')
252 url = ui.config(b'phabricator', b'url')
241 if not url:
253 if not url:
242 raise error.Abort(
254 raise error.Abort(
243 _(b'config %s.%s is required') % (b'phabricator', b'url')
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
244 )
256 )
245
257
246 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
247 token = None
259 token = None
248
260
249 if res:
261 if res:
250 group, auth = res
262 group, auth = res
251
263
252 ui.debug(b"using auth.%s.* for authentication\n" % group)
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
253
265
254 token = auth.get(b'phabtoken')
266 token = auth.get(b'phabtoken')
255
267
256 if not token:
268 if not token:
257 raise error.Abort(
269 raise error.Abort(
258 _(b'Can\'t find conduit token associated to %s') % (url,)
270 _(b'Can\'t find conduit token associated to %s') % (url,)
259 )
271 )
260
272
261 return url, token
273 return url, token
262
274
263
275
264 def callconduit(ui, name, params):
276 def callconduit(ui, name, params):
265 """call Conduit API, params is a dict. return json.loads result, or None"""
277 """call Conduit API, params is a dict. return json.loads result, or None"""
266 host, token = readurltoken(ui)
278 host, token = readurltoken(ui)
267 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
268 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
269 params = params.copy()
281 params = params.copy()
270 params[b'__conduit__'] = {
282 params[b'__conduit__'] = {
271 b'token': token,
283 b'token': token,
272 }
284 }
273 rawdata = {
285 rawdata = {
274 b'params': templatefilters.json(params),
286 b'params': templatefilters.json(params),
275 b'output': b'json',
287 b'output': b'json',
276 b'__conduit__': 1,
288 b'__conduit__': 1,
277 }
289 }
278 data = urlencodenested(rawdata)
290 data = urlencodenested(rawdata)
279 curlcmd = ui.config(b'phabricator', b'curlcmd')
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
280 if curlcmd:
292 if curlcmd:
281 sin, sout = procutil.popen2(
293 sin, sout = procutil.popen2(
282 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
283 )
295 )
284 sin.write(data)
296 sin.write(data)
285 sin.close()
297 sin.close()
286 body = sout.read()
298 body = sout.read()
287 else:
299 else:
288 urlopener = urlmod.opener(ui, authinfo)
300 urlopener = urlmod.opener(ui, authinfo)
289 request = util.urlreq.request(pycompat.strurl(url), data=data)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
290 with contextlib.closing(urlopener.open(request)) as rsp:
302 with contextlib.closing(urlopener.open(request)) as rsp:
291 body = rsp.read()
303 body = rsp.read()
292 ui.debug(b'Conduit Response: %s\n' % body)
304 ui.debug(b'Conduit Response: %s\n' % body)
293 parsed = pycompat.rapply(
305 parsed = pycompat.rapply(
294 lambda x: encoding.unitolocal(x)
306 lambda x: encoding.unitolocal(x)
295 if isinstance(x, pycompat.unicode)
307 if isinstance(x, pycompat.unicode)
296 else x,
308 else x,
297 # json.loads only accepts bytes from py3.6+
309 # json.loads only accepts bytes from py3.6+
298 json.loads(encoding.unifromlocal(body)),
310 json.loads(encoding.unifromlocal(body)),
299 )
311 )
300 if parsed.get(b'error_code'):
312 if parsed.get(b'error_code'):
301 msg = _(b'Conduit Error (%s): %s') % (
313 msg = _(b'Conduit Error (%s): %s') % (
302 parsed[b'error_code'],
314 parsed[b'error_code'],
303 parsed[b'error_info'],
315 parsed[b'error_info'],
304 )
316 )
305 raise error.Abort(msg)
317 raise error.Abort(msg)
306 return parsed[b'result']
318 return parsed[b'result']
307
319
308
320
309 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
310 def debugcallconduit(ui, repo, name):
322 def debugcallconduit(ui, repo, name):
311 """call Conduit API
323 """call Conduit API
312
324
313 Call parameters are read from stdin as a JSON blob. Result will be written
325 Call parameters are read from stdin as a JSON blob. Result will be written
314 to stdout as a JSON blob.
326 to stdout as a JSON blob.
315 """
327 """
316 # json.loads only accepts bytes from 3.6+
328 # json.loads only accepts bytes from 3.6+
317 rawparams = encoding.unifromlocal(ui.fin.read())
329 rawparams = encoding.unifromlocal(ui.fin.read())
318 # json.loads only returns unicode strings
330 # json.loads only returns unicode strings
319 params = pycompat.rapply(
331 params = pycompat.rapply(
320 lambda x: encoding.unitolocal(x)
332 lambda x: encoding.unitolocal(x)
321 if isinstance(x, pycompat.unicode)
333 if isinstance(x, pycompat.unicode)
322 else x,
334 else x,
323 json.loads(rawparams),
335 json.loads(rawparams),
324 )
336 )
325 # json.dumps only accepts unicode strings
337 # json.dumps only accepts unicode strings
326 result = pycompat.rapply(
338 result = pycompat.rapply(
327 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
328 callconduit(ui, name, params),
340 callconduit(ui, name, params),
329 )
341 )
330 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
331 ui.write(b'%s\n' % encoding.unitolocal(s))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
332
344
333
345
334 def getrepophid(repo):
346 def getrepophid(repo):
335 """given callsign, return repository PHID or None"""
347 """given callsign, return repository PHID or None"""
336 # developer config: phabricator.repophid
348 # developer config: phabricator.repophid
337 repophid = repo.ui.config(b'phabricator', b'repophid')
349 repophid = repo.ui.config(b'phabricator', b'repophid')
338 if repophid:
350 if repophid:
339 return repophid
351 return repophid
340 callsign = repo.ui.config(b'phabricator', b'callsign')
352 callsign = repo.ui.config(b'phabricator', b'callsign')
341 if not callsign:
353 if not callsign:
342 return None
354 return None
343 query = callconduit(
355 query = callconduit(
344 repo.ui,
356 repo.ui,
345 b'diffusion.repository.search',
357 b'diffusion.repository.search',
346 {b'constraints': {b'callsigns': [callsign]}},
358 {b'constraints': {b'callsigns': [callsign]}},
347 )
359 )
348 if len(query[b'data']) == 0:
360 if len(query[b'data']) == 0:
349 return None
361 return None
350 repophid = query[b'data'][0][b'phid']
362 repophid = query[b'data'][0][b'phid']
351 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
352 return repophid
364 return repophid
353
365
354
366
355 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
356 _differentialrevisiondescre = re.compile(
368 _differentialrevisiondescre = re.compile(
357 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
358 )
370 )
359
371
360
372
361 def getoldnodedrevmap(repo, nodelist):
373 def getoldnodedrevmap(repo, nodelist):
362 """find previous nodes that has been sent to Phabricator
374 """find previous nodes that has been sent to Phabricator
363
375
364 return {node: (oldnode, Differential diff, Differential Revision ID)}
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
365 for node in nodelist with known previous sent versions, or associated
377 for node in nodelist with known previous sent versions, or associated
366 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
367 be ``None``.
379 be ``None``.
368
380
369 Examines commit messages like "Differential Revision:" to get the
381 Examines commit messages like "Differential Revision:" to get the
370 association information.
382 association information.
371
383
372 If such commit message line is not found, examines all precursors and their
384 If such commit message line is not found, examines all precursors and their
373 tags. Tags with format like "D1234" are considered a match and the node
385 tags. Tags with format like "D1234" are considered a match and the node
374 with that tag, and the number after "D" (ex. 1234) will be returned.
386 with that tag, and the number after "D" (ex. 1234) will be returned.
375
387
376 The ``old node``, if not None, is guaranteed to be the last diff of
388 The ``old node``, if not None, is guaranteed to be the last diff of
377 corresponding Differential Revision, and exist in the repo.
389 corresponding Differential Revision, and exist in the repo.
378 """
390 """
379 unfi = repo.unfiltered()
391 unfi = repo.unfiltered()
380 nodemap = unfi.changelog.nodemap
392 nodemap = unfi.changelog.nodemap
381
393
382 result = {} # {node: (oldnode?, lastdiff?, drev)}
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
383 toconfirm = {} # {node: (force, {precnode}, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
384 for node in nodelist:
396 for node in nodelist:
385 ctx = unfi[node]
397 ctx = unfi[node]
386 # For tags like "D123", put them into "toconfirm" to verify later
398 # For tags like "D123", put them into "toconfirm" to verify later
387 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
388 for n in precnodes:
400 for n in precnodes:
389 if n in nodemap:
401 if n in nodemap:
390 for tag in unfi.nodetags(n):
402 for tag in unfi.nodetags(n):
391 m = _differentialrevisiontagre.match(tag)
403 m = _differentialrevisiontagre.match(tag)
392 if m:
404 if m:
393 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
394 continue
406 continue
395
407
396 # Check commit message
408 # Check commit message
397 m = _differentialrevisiondescre.search(ctx.description())
409 m = _differentialrevisiondescre.search(ctx.description())
398 if m:
410 if m:
399 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
400
412
401 # Double check if tags are genuine by collecting all old nodes from
413 # Double check if tags are genuine by collecting all old nodes from
402 # Phabricator, and expect precursors overlap with it.
414 # Phabricator, and expect precursors overlap with it.
403 if toconfirm:
415 if toconfirm:
404 drevs = [drev for force, precs, drev in toconfirm.values()]
416 drevs = [drev for force, precs, drev in toconfirm.values()]
405 alldiffs = callconduit(
417 alldiffs = callconduit(
406 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
407 )
419 )
408 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
409 for newnode, (force, precset, drev) in toconfirm.items():
421 for newnode, (force, precset, drev) in toconfirm.items():
410 diffs = [
422 diffs = [
411 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
412 ]
424 ]
413
425
414 # "precursors" as known by Phabricator
426 # "precursors" as known by Phabricator
415 phprecset = set(getnode(d) for d in diffs)
427 phprecset = set(getnode(d) for d in diffs)
416
428
417 # Ignore if precursors (Phabricator and local repo) do not overlap,
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
418 # and force is not set (when commit message says nothing)
430 # and force is not set (when commit message says nothing)
419 if not force and not bool(phprecset & precset):
431 if not force and not bool(phprecset & precset):
420 tagname = b'D%d' % drev
432 tagname = b'D%d' % drev
421 tags.tag(
433 tags.tag(
422 repo,
434 repo,
423 tagname,
435 tagname,
424 nullid,
436 nullid,
425 message=None,
437 message=None,
426 user=None,
438 user=None,
427 date=None,
439 date=None,
428 local=True,
440 local=True,
429 )
441 )
430 unfi.ui.warn(
442 unfi.ui.warn(
431 _(
443 _(
432 b'D%s: local tag removed - does not match '
444 b'D%s: local tag removed - does not match '
433 b'Differential history\n'
445 b'Differential history\n'
434 )
446 )
435 % drev
447 % drev
436 )
448 )
437 continue
449 continue
438
450
439 # Find the last node using Phabricator metadata, and make sure it
451 # Find the last node using Phabricator metadata, and make sure it
440 # exists in the repo
452 # exists in the repo
441 oldnode = lastdiff = None
453 oldnode = lastdiff = None
442 if diffs:
454 if diffs:
443 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
444 oldnode = getnode(lastdiff)
456 oldnode = getnode(lastdiff)
445 if oldnode and oldnode not in nodemap:
457 if oldnode and oldnode not in nodemap:
446 oldnode = None
458 oldnode = None
447
459
448 result[newnode] = (oldnode, lastdiff, drev)
460 result[newnode] = (oldnode, lastdiff, drev)
449
461
450 return result
462 return result
451
463
452
464
453 def getdiff(ctx, diffopts):
465 def getdiff(ctx, diffopts):
454 """plain-text diff without header (user, commit message, etc)"""
466 """plain-text diff without header (user, commit message, etc)"""
455 output = util.stringio()
467 output = util.stringio()
456 for chunk, _label in patch.diffui(
468 for chunk, _label in patch.diffui(
457 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
458 ):
470 ):
459 output.write(chunk)
471 output.write(chunk)
460 return output.getvalue()
472 return output.getvalue()
461
473
462
474
463 class DiffChangeType(object):
475 class DiffChangeType(object):
464 ADD = 1
476 ADD = 1
465 CHANGE = 2
477 CHANGE = 2
466 DELETE = 3
478 DELETE = 3
467 MOVE_AWAY = 4
479 MOVE_AWAY = 4
468 COPY_AWAY = 5
480 COPY_AWAY = 5
469 MOVE_HERE = 6
481 MOVE_HERE = 6
470 COPY_HERE = 7
482 COPY_HERE = 7
471 MULTICOPY = 8
483 MULTICOPY = 8
472
484
473
485
474 class DiffFileType(object):
486 class DiffFileType(object):
475 TEXT = 1
487 TEXT = 1
476 IMAGE = 2
488 IMAGE = 2
477 BINARY = 3
489 BINARY = 3
478
490
479
491
480 @attr.s
492 @attr.s
481 class phabhunk(dict):
493 class phabhunk(dict):
482 """Represents a Differential hunk, which is owned by a Differential change
494 """Represents a Differential hunk, which is owned by a Differential change
483 """
495 """
484
496
485 oldOffset = attr.ib(default=0) # camelcase-required
497 oldOffset = attr.ib(default=0) # camelcase-required
486 oldLength = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
487 newOffset = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
488 newLength = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
489 corpus = attr.ib(default='')
501 corpus = attr.ib(default='')
490 # These get added to the phabchange's equivalents
502 # These get added to the phabchange's equivalents
491 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
492 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
493
505
494
506
495 @attr.s
507 @attr.s
496 class phabchange(object):
508 class phabchange(object):
497 """Represents a Differential change, owns Differential hunks and owned by a
509 """Represents a Differential change, owns Differential hunks and owned by a
498 Differential diff. Each one represents one file in a diff.
510 Differential diff. Each one represents one file in a diff.
499 """
511 """
500
512
501 currentPath = attr.ib(default=None) # camelcase-required
513 currentPath = attr.ib(default=None) # camelcase-required
502 oldPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
503 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
504 metadata = attr.ib(default=attr.Factory(dict))
516 metadata = attr.ib(default=attr.Factory(dict))
505 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
506 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
507 type = attr.ib(default=DiffChangeType.CHANGE)
519 type = attr.ib(default=DiffChangeType.CHANGE)
508 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
509 commitHash = attr.ib(default=None) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
510 addLines = attr.ib(default=0) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
511 delLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
512 hunks = attr.ib(default=attr.Factory(list))
524 hunks = attr.ib(default=attr.Factory(list))
513
525
514 def copynewmetadatatoold(self):
526 def copynewmetadatatoold(self):
515 for key in list(self.metadata.keys()):
527 for key in list(self.metadata.keys()):
516 newkey = key.replace(b'new:', b'old:')
528 newkey = key.replace(b'new:', b'old:')
517 self.metadata[newkey] = self.metadata[key]
529 self.metadata[newkey] = self.metadata[key]
518
530
519 def addoldmode(self, value):
531 def addoldmode(self, value):
520 self.oldProperties[b'unix:filemode'] = value
532 self.oldProperties[b'unix:filemode'] = value
521
533
522 def addnewmode(self, value):
534 def addnewmode(self, value):
523 self.newProperties[b'unix:filemode'] = value
535 self.newProperties[b'unix:filemode'] = value
524
536
525 def addhunk(self, hunk):
537 def addhunk(self, hunk):
526 if not isinstance(hunk, phabhunk):
538 if not isinstance(hunk, phabhunk):
527 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
528 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
529 # It's useful to include these stats since the Phab web UI shows them,
541 # It's useful to include these stats since the Phab web UI shows them,
530 # and uses them to estimate how large a change a Revision is. Also used
542 # and uses them to estimate how large a change a Revision is. Also used
531 # in email subjects for the [+++--] bit.
543 # in email subjects for the [+++--] bit.
532 self.addLines += hunk.addLines
544 self.addLines += hunk.addLines
533 self.delLines += hunk.delLines
545 self.delLines += hunk.delLines
534
546
535
547
536 @attr.s
548 @attr.s
537 class phabdiff(object):
549 class phabdiff(object):
538 """Represents a Differential diff, owns Differential changes. Corresponds
550 """Represents a Differential diff, owns Differential changes. Corresponds
539 to a commit.
551 to a commit.
540 """
552 """
541
553
542 # Doesn't seem to be any reason to send this (output of uname -n)
554 # Doesn't seem to be any reason to send this (output of uname -n)
543 sourceMachine = attr.ib(default=b'') # camelcase-required
555 sourceMachine = attr.ib(default=b'') # camelcase-required
544 sourcePath = attr.ib(default=b'/') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
545 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
546 sourceControlPath = attr.ib(default=b'/') # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
547 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
548 branch = attr.ib(default=b'default')
560 branch = attr.ib(default=b'default')
549 bookmark = attr.ib(default=None)
561 bookmark = attr.ib(default=None)
550 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
551 lintStatus = attr.ib(default=b'none') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
552 unitStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
553 changes = attr.ib(default=attr.Factory(dict))
565 changes = attr.ib(default=attr.Factory(dict))
554 repositoryPHID = attr.ib(default=None) # camelcase-required
566 repositoryPHID = attr.ib(default=None) # camelcase-required
555
567
556 def addchange(self, change):
568 def addchange(self, change):
557 if not isinstance(change, phabchange):
569 if not isinstance(change, phabchange):
558 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
559 self.changes[change.currentPath] = pycompat.byteskwargs(
571 self.changes[change.currentPath] = pycompat.byteskwargs(
560 attr.asdict(change)
572 attr.asdict(change)
561 )
573 )
562
574
563
575
564 def maketext(pchange, ctx, fname):
576 def maketext(pchange, ctx, fname):
565 """populate the phabchange for a text file"""
577 """populate the phabchange for a text file"""
566 repo = ctx.repo()
578 repo = ctx.repo()
567 fmatcher = match.exact([fname])
579 fmatcher = match.exact([fname])
568 diffopts = mdiff.diffopts(git=True, context=32767)
580 diffopts = mdiff.diffopts(git=True, context=32767)
569 _pfctx, _fctx, header, fhunks = next(
581 _pfctx, _fctx, header, fhunks = next(
570 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
571 )
583 )
572
584
573 for fhunk in fhunks:
585 for fhunk in fhunks:
574 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
575 corpus = b''.join(lines[1:])
587 corpus = b''.join(lines[1:])
576 shunk = list(header)
588 shunk = list(header)
577 shunk.extend(lines)
589 shunk.extend(lines)
578 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
579 patch.diffstatdata(util.iterlines(shunk))
591 patch.diffstatdata(util.iterlines(shunk))
580 )
592 )
581 pchange.addhunk(
593 pchange.addhunk(
582 phabhunk(
594 phabhunk(
583 oldOffset,
595 oldOffset,
584 oldLength,
596 oldLength,
585 newOffset,
597 newOffset,
586 newLength,
598 newLength,
587 corpus,
599 corpus,
588 addLines,
600 addLines,
589 delLines,
601 delLines,
590 )
602 )
591 )
603 )
592
604
593
605
594 def uploadchunks(fctx, fphid):
606 def uploadchunks(fctx, fphid):
595 """upload large binary files as separate chunks.
607 """upload large binary files as separate chunks.
596 Phab requests chunking over 8MiB, and splits into 4MiB chunks
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
597 """
609 """
598 ui = fctx.repo().ui
610 ui = fctx.repo().ui
599 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
600 progress = ui.makeprogress(
612 progress = ui.makeprogress(
601 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
602 )
614 )
603 for chunk in chunks:
615 for chunk in chunks:
604 progress.increment()
616 progress.increment()
605 if chunk[b'complete']:
617 if chunk[b'complete']:
606 continue
618 continue
607 bstart = int(chunk[b'byteStart'])
619 bstart = int(chunk[b'byteStart'])
608 bend = int(chunk[b'byteEnd'])
620 bend = int(chunk[b'byteEnd'])
609 callconduit(
621 callconduit(
610 ui,
622 ui,
611 b'file.uploadchunk',
623 b'file.uploadchunk',
612 {
624 {
613 b'filePHID': fphid,
625 b'filePHID': fphid,
614 b'byteStart': bstart,
626 b'byteStart': bstart,
615 b'data': base64.b64encode(fctx.data()[bstart:bend]),
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
616 b'dataEncoding': b'base64',
628 b'dataEncoding': b'base64',
617 },
629 },
618 )
630 )
619 progress.complete()
631 progress.complete()
620
632
621
633
622 def uploadfile(fctx):
634 def uploadfile(fctx):
623 """upload binary files to Phabricator"""
635 """upload binary files to Phabricator"""
624 repo = fctx.repo()
636 repo = fctx.repo()
625 ui = repo.ui
637 ui = repo.ui
626 fname = fctx.path()
638 fname = fctx.path()
627 size = fctx.size()
639 size = fctx.size()
628 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
640 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
629
641
630 # an allocate call is required first to see if an upload is even required
642 # an allocate call is required first to see if an upload is even required
631 # (Phab might already have it) and to determine if chunking is needed
643 # (Phab might already have it) and to determine if chunking is needed
632 allocateparams = {
644 allocateparams = {
633 b'name': fname,
645 b'name': fname,
634 b'contentLength': size,
646 b'contentLength': size,
635 b'contentHash': fhash,
647 b'contentHash': fhash,
636 }
648 }
637 filealloc = callconduit(ui, b'file.allocate', allocateparams)
649 filealloc = callconduit(ui, b'file.allocate', allocateparams)
638 fphid = filealloc[b'filePHID']
650 fphid = filealloc[b'filePHID']
639
651
640 if filealloc[b'upload']:
652 if filealloc[b'upload']:
641 ui.write(_(b'uploading %s\n') % bytes(fctx))
653 ui.write(_(b'uploading %s\n') % bytes(fctx))
642 if not fphid:
654 if not fphid:
643 uploadparams = {
655 uploadparams = {
644 b'name': fname,
656 b'name': fname,
645 b'data_base64': base64.b64encode(fctx.data()),
657 b'data_base64': base64.b64encode(fctx.data()),
646 }
658 }
647 fphid = callconduit(ui, b'file.upload', uploadparams)
659 fphid = callconduit(ui, b'file.upload', uploadparams)
648 else:
660 else:
649 uploadchunks(fctx, fphid)
661 uploadchunks(fctx, fphid)
650 else:
662 else:
651 ui.debug(b'server already has %s\n' % bytes(fctx))
663 ui.debug(b'server already has %s\n' % bytes(fctx))
652
664
653 if not fphid:
665 if not fphid:
654 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
666 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
655
667
656 return fphid
668 return fphid
657
669
658
670
659 def addoldbinary(pchange, fctx, originalfname):
671 def addoldbinary(pchange, fctx, originalfname):
660 """add the metadata for the previous version of a binary file to the
672 """add the metadata for the previous version of a binary file to the
661 phabchange for the new version
673 phabchange for the new version
662 """
674 """
663 oldfctx = fctx.p1()[originalfname]
675 oldfctx = fctx.p1()[originalfname]
664 if fctx.cmp(oldfctx):
676 if fctx.cmp(oldfctx):
665 # Files differ, add the old one
677 # Files differ, add the old one
666 pchange.metadata[b'old:file:size'] = oldfctx.size()
678 pchange.metadata[b'old:file:size'] = oldfctx.size()
667 mimeguess, _enc = mimetypes.guess_type(
679 mimeguess, _enc = mimetypes.guess_type(
668 encoding.unifromlocal(oldfctx.path())
680 encoding.unifromlocal(oldfctx.path())
669 )
681 )
670 if mimeguess:
682 if mimeguess:
671 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
683 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
672 mimeguess
684 mimeguess
673 )
685 )
674 fphid = uploadfile(oldfctx)
686 fphid = uploadfile(oldfctx)
675 pchange.metadata[b'old:binary-phid'] = fphid
687 pchange.metadata[b'old:binary-phid'] = fphid
676 else:
688 else:
677 # If it's left as IMAGE/BINARY web UI might try to display it
689 # If it's left as IMAGE/BINARY web UI might try to display it
678 pchange.fileType = DiffFileType.TEXT
690 pchange.fileType = DiffFileType.TEXT
679 pchange.copynewmetadatatoold()
691 pchange.copynewmetadatatoold()
680
692
681
693
682 def makebinary(pchange, fctx):
694 def makebinary(pchange, fctx):
683 """populate the phabchange for a binary file"""
695 """populate the phabchange for a binary file"""
684 pchange.fileType = DiffFileType.BINARY
696 pchange.fileType = DiffFileType.BINARY
685 fphid = uploadfile(fctx)
697 fphid = uploadfile(fctx)
686 pchange.metadata[b'new:binary-phid'] = fphid
698 pchange.metadata[b'new:binary-phid'] = fphid
687 pchange.metadata[b'new:file:size'] = fctx.size()
699 pchange.metadata[b'new:file:size'] = fctx.size()
688 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
700 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
689 if mimeguess:
701 if mimeguess:
690 mimeguess = pycompat.bytestr(mimeguess)
702 mimeguess = pycompat.bytestr(mimeguess)
691 pchange.metadata[b'new:file:mime-type'] = mimeguess
703 pchange.metadata[b'new:file:mime-type'] = mimeguess
692 if mimeguess.startswith(b'image/'):
704 if mimeguess.startswith(b'image/'):
693 pchange.fileType = DiffFileType.IMAGE
705 pchange.fileType = DiffFileType.IMAGE
694
706
695
707
696 # Copied from mercurial/patch.py
708 # Copied from mercurial/patch.py
697 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
709 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
698
710
699
711
700 def notutf8(fctx):
712 def notutf8(fctx):
701 """detect non-UTF-8 text files since Phabricator requires them to be marked
713 """detect non-UTF-8 text files since Phabricator requires them to be marked
702 as binary
714 as binary
703 """
715 """
704 try:
716 try:
705 fctx.data().decode('utf-8')
717 fctx.data().decode('utf-8')
706 if fctx.parents():
718 if fctx.parents():
707 fctx.p1().data().decode('utf-8')
719 fctx.p1().data().decode('utf-8')
708 return False
720 return False
709 except UnicodeDecodeError:
721 except UnicodeDecodeError:
710 fctx.repo().ui.write(
722 fctx.repo().ui.write(
711 _(b'file %s detected as non-UTF-8, marked as binary\n')
723 _(b'file %s detected as non-UTF-8, marked as binary\n')
712 % fctx.path()
724 % fctx.path()
713 )
725 )
714 return True
726 return True
715
727
716
728
717 def addremoved(pdiff, ctx, removed):
729 def addremoved(pdiff, ctx, removed):
718 """add removed files to the phabdiff. Shouldn't include moves"""
730 """add removed files to the phabdiff. Shouldn't include moves"""
719 for fname in removed:
731 for fname in removed:
720 pchange = phabchange(
732 pchange = phabchange(
721 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
733 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
722 )
734 )
723 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
735 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
724 fctx = ctx.p1()[fname]
736 fctx = ctx.p1()[fname]
725 if not (fctx.isbinary() or notutf8(fctx)):
737 if not (fctx.isbinary() or notutf8(fctx)):
726 maketext(pchange, ctx, fname)
738 maketext(pchange, ctx, fname)
727
739
728 pdiff.addchange(pchange)
740 pdiff.addchange(pchange)
729
741
730
742
731 def addmodified(pdiff, ctx, modified):
743 def addmodified(pdiff, ctx, modified):
732 """add modified files to the phabdiff"""
744 """add modified files to the phabdiff"""
733 for fname in modified:
745 for fname in modified:
734 fctx = ctx[fname]
746 fctx = ctx[fname]
735 pchange = phabchange(currentPath=fname, oldPath=fname)
747 pchange = phabchange(currentPath=fname, oldPath=fname)
736 filemode = gitmode[ctx[fname].flags()]
748 filemode = gitmode[ctx[fname].flags()]
737 originalmode = gitmode[ctx.p1()[fname].flags()]
749 originalmode = gitmode[ctx.p1()[fname].flags()]
738 if filemode != originalmode:
750 if filemode != originalmode:
739 pchange.addoldmode(originalmode)
751 pchange.addoldmode(originalmode)
740 pchange.addnewmode(filemode)
752 pchange.addnewmode(filemode)
741
753
742 if fctx.isbinary() or notutf8(fctx):
754 if fctx.isbinary() or notutf8(fctx):
743 makebinary(pchange, fctx)
755 makebinary(pchange, fctx)
744 addoldbinary(pchange, fctx, fname)
756 addoldbinary(pchange, fctx, fname)
745 else:
757 else:
746 maketext(pchange, ctx, fname)
758 maketext(pchange, ctx, fname)
747
759
748 pdiff.addchange(pchange)
760 pdiff.addchange(pchange)
749
761
750
762
751 def addadded(pdiff, ctx, added, removed):
763 def addadded(pdiff, ctx, added, removed):
752 """add file adds to the phabdiff, both new files and copies/moves"""
764 """add file adds to the phabdiff, both new files and copies/moves"""
753 # Keep track of files that've been recorded as moved/copied, so if there are
765 # Keep track of files that've been recorded as moved/copied, so if there are
754 # additional copies we can mark them (moves get removed from removed)
766 # additional copies we can mark them (moves get removed from removed)
755 copiedchanges = {}
767 copiedchanges = {}
756 movedchanges = {}
768 movedchanges = {}
757 for fname in added:
769 for fname in added:
758 fctx = ctx[fname]
770 fctx = ctx[fname]
759 pchange = phabchange(currentPath=fname)
771 pchange = phabchange(currentPath=fname)
760
772
761 filemode = gitmode[ctx[fname].flags()]
773 filemode = gitmode[ctx[fname].flags()]
762 renamed = fctx.renamed()
774 renamed = fctx.renamed()
763
775
764 if renamed:
776 if renamed:
765 originalfname = renamed[0]
777 originalfname = renamed[0]
766 originalmode = gitmode[ctx.p1()[originalfname].flags()]
778 originalmode = gitmode[ctx.p1()[originalfname].flags()]
767 pchange.oldPath = originalfname
779 pchange.oldPath = originalfname
768
780
769 if originalfname in removed:
781 if originalfname in removed:
770 origpchange = phabchange(
782 origpchange = phabchange(
771 currentPath=originalfname,
783 currentPath=originalfname,
772 oldPath=originalfname,
784 oldPath=originalfname,
773 type=DiffChangeType.MOVE_AWAY,
785 type=DiffChangeType.MOVE_AWAY,
774 awayPaths=[fname],
786 awayPaths=[fname],
775 )
787 )
776 movedchanges[originalfname] = origpchange
788 movedchanges[originalfname] = origpchange
777 removed.remove(originalfname)
789 removed.remove(originalfname)
778 pchange.type = DiffChangeType.MOVE_HERE
790 pchange.type = DiffChangeType.MOVE_HERE
779 elif originalfname in movedchanges:
791 elif originalfname in movedchanges:
780 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
792 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
781 movedchanges[originalfname].awayPaths.append(fname)
793 movedchanges[originalfname].awayPaths.append(fname)
782 pchange.type = DiffChangeType.COPY_HERE
794 pchange.type = DiffChangeType.COPY_HERE
783 else: # pure copy
795 else: # pure copy
784 if originalfname not in copiedchanges:
796 if originalfname not in copiedchanges:
785 origpchange = phabchange(
797 origpchange = phabchange(
786 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
798 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
787 )
799 )
788 copiedchanges[originalfname] = origpchange
800 copiedchanges[originalfname] = origpchange
789 else:
801 else:
790 origpchange = copiedchanges[originalfname]
802 origpchange = copiedchanges[originalfname]
791 origpchange.awayPaths.append(fname)
803 origpchange.awayPaths.append(fname)
792 pchange.type = DiffChangeType.COPY_HERE
804 pchange.type = DiffChangeType.COPY_HERE
793
805
794 if filemode != originalmode:
806 if filemode != originalmode:
795 pchange.addoldmode(originalmode)
807 pchange.addoldmode(originalmode)
796 pchange.addnewmode(filemode)
808 pchange.addnewmode(filemode)
797 else: # Brand-new file
809 else: # Brand-new file
798 pchange.addnewmode(gitmode[fctx.flags()])
810 pchange.addnewmode(gitmode[fctx.flags()])
799 pchange.type = DiffChangeType.ADD
811 pchange.type = DiffChangeType.ADD
800
812
801 if fctx.isbinary() or notutf8(fctx):
813 if fctx.isbinary() or notutf8(fctx):
802 makebinary(pchange, fctx)
814 makebinary(pchange, fctx)
803 if renamed:
815 if renamed:
804 addoldbinary(pchange, fctx, originalfname)
816 addoldbinary(pchange, fctx, originalfname)
805 else:
817 else:
806 maketext(pchange, ctx, fname)
818 maketext(pchange, ctx, fname)
807
819
808 pdiff.addchange(pchange)
820 pdiff.addchange(pchange)
809
821
810 for _path, copiedchange in copiedchanges.items():
822 for _path, copiedchange in copiedchanges.items():
811 pdiff.addchange(copiedchange)
823 pdiff.addchange(copiedchange)
812 for _path, movedchange in movedchanges.items():
824 for _path, movedchange in movedchanges.items():
813 pdiff.addchange(movedchange)
825 pdiff.addchange(movedchange)
814
826
815
827
816 def creatediff(ctx):
828 def creatediff(ctx):
817 """create a Differential Diff"""
829 """create a Differential Diff"""
818 repo = ctx.repo()
830 repo = ctx.repo()
819 repophid = getrepophid(repo)
831 repophid = getrepophid(repo)
820 # Create a "Differential Diff" via "differential.creatediff" API
832 # Create a "Differential Diff" via "differential.creatediff" API
821 pdiff = phabdiff(
833 pdiff = phabdiff(
822 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
834 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
823 branch=b'%s' % ctx.branch(),
835 branch=b'%s' % ctx.branch(),
824 )
836 )
825 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
837 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
826 # addadded will remove moved files from removed, so addremoved won't get
838 # addadded will remove moved files from removed, so addremoved won't get
827 # them
839 # them
828 addadded(pdiff, ctx, added, removed)
840 addadded(pdiff, ctx, added, removed)
829 addmodified(pdiff, ctx, modified)
841 addmodified(pdiff, ctx, modified)
830 addremoved(pdiff, ctx, removed)
842 addremoved(pdiff, ctx, removed)
831 if repophid:
843 if repophid:
832 pdiff.repositoryPHID = repophid
844 pdiff.repositoryPHID = repophid
833 diff = callconduit(
845 diff = callconduit(
834 repo.ui,
846 repo.ui,
835 b'differential.creatediff',
847 b'differential.creatediff',
836 pycompat.byteskwargs(attr.asdict(pdiff)),
848 pycompat.byteskwargs(attr.asdict(pdiff)),
837 )
849 )
838 if not diff:
850 if not diff:
839 raise error.Abort(_(b'cannot create diff for %s') % ctx)
851 raise error.Abort(_(b'cannot create diff for %s') % ctx)
840 return diff
852 return diff
841
853
842
854
843 def writediffproperties(ctx, diff):
855 def writediffproperties(ctx, diff):
844 """write metadata to diff so patches could be applied losslessly"""
856 """write metadata to diff so patches could be applied losslessly"""
845 # creatediff returns with a diffid but query returns with an id
857 # creatediff returns with a diffid but query returns with an id
846 diffid = diff.get(b'diffid', diff.get(b'id'))
858 diffid = diff.get(b'diffid', diff.get(b'id'))
847 params = {
859 params = {
848 b'diff_id': diffid,
860 b'diff_id': diffid,
849 b'name': b'hg:meta',
861 b'name': b'hg:meta',
850 b'data': templatefilters.json(
862 b'data': templatefilters.json(
851 {
863 {
852 b'user': ctx.user(),
864 b'user': ctx.user(),
853 b'date': b'%d %d' % ctx.date(),
865 b'date': b'%d %d' % ctx.date(),
854 b'branch': ctx.branch(),
866 b'branch': ctx.branch(),
855 b'node': ctx.hex(),
867 b'node': ctx.hex(),
856 b'parent': ctx.p1().hex(),
868 b'parent': ctx.p1().hex(),
857 }
869 }
858 ),
870 ),
859 }
871 }
860 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
872 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
861
873
862 params = {
874 params = {
863 b'diff_id': diffid,
875 b'diff_id': diffid,
864 b'name': b'local:commits',
876 b'name': b'local:commits',
865 b'data': templatefilters.json(
877 b'data': templatefilters.json(
866 {
878 {
867 ctx.hex(): {
879 ctx.hex(): {
868 b'author': stringutil.person(ctx.user()),
880 b'author': stringutil.person(ctx.user()),
869 b'authorEmail': stringutil.email(ctx.user()),
881 b'authorEmail': stringutil.email(ctx.user()),
870 b'time': int(ctx.date()[0]),
882 b'time': int(ctx.date()[0]),
871 b'commit': ctx.hex(),
883 b'commit': ctx.hex(),
872 b'parents': [ctx.p1().hex()],
884 b'parents': [ctx.p1().hex()],
873 b'branch': ctx.branch(),
885 b'branch': ctx.branch(),
874 },
886 },
875 }
887 }
876 ),
888 ),
877 }
889 }
878 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
890 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
879
891
880
892
881 def createdifferentialrevision(
893 def createdifferentialrevision(
882 ctx,
894 ctx,
883 revid=None,
895 revid=None,
884 parentrevphid=None,
896 parentrevphid=None,
885 oldnode=None,
897 oldnode=None,
886 olddiff=None,
898 olddiff=None,
887 actions=None,
899 actions=None,
888 comment=None,
900 comment=None,
889 ):
901 ):
890 """create or update a Differential Revision
902 """create or update a Differential Revision
891
903
892 If revid is None, create a new Differential Revision, otherwise update
904 If revid is None, create a new Differential Revision, otherwise update
893 revid. If parentrevphid is not None, set it as a dependency.
905 revid. If parentrevphid is not None, set it as a dependency.
894
906
895 If oldnode is not None, check if the patch content (without commit message
907 If oldnode is not None, check if the patch content (without commit message
896 and metadata) has changed before creating another diff.
908 and metadata) has changed before creating another diff.
897
909
898 If actions is not None, they will be appended to the transaction.
910 If actions is not None, they will be appended to the transaction.
899 """
911 """
900 repo = ctx.repo()
912 repo = ctx.repo()
901 if oldnode:
913 if oldnode:
902 diffopts = mdiff.diffopts(git=True, context=32767)
914 diffopts = mdiff.diffopts(git=True, context=32767)
903 oldctx = repo.unfiltered()[oldnode]
915 oldctx = repo.unfiltered()[oldnode]
904 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
916 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
905 else:
917 else:
906 neednewdiff = True
918 neednewdiff = True
907
919
908 transactions = []
920 transactions = []
909 if neednewdiff:
921 if neednewdiff:
910 diff = creatediff(ctx)
922 diff = creatediff(ctx)
911 transactions.append({b'type': b'update', b'value': diff[b'phid']})
923 transactions.append({b'type': b'update', b'value': diff[b'phid']})
912 if comment:
924 if comment:
913 transactions.append({b'type': b'comment', b'value': comment})
925 transactions.append({b'type': b'comment', b'value': comment})
914 else:
926 else:
915 # Even if we don't need to upload a new diff because the patch content
927 # Even if we don't need to upload a new diff because the patch content
916 # does not change. We might still need to update its metadata so
928 # does not change. We might still need to update its metadata so
917 # pushers could know the correct node metadata.
929 # pushers could know the correct node metadata.
918 assert olddiff
930 assert olddiff
919 diff = olddiff
931 diff = olddiff
920 writediffproperties(ctx, diff)
932 writediffproperties(ctx, diff)
921
933
922 # Set the parent Revision every time, so commit re-ordering is picked-up
934 # Set the parent Revision every time, so commit re-ordering is picked-up
923 if parentrevphid:
935 if parentrevphid:
924 transactions.append(
936 transactions.append(
925 {b'type': b'parents.set', b'value': [parentrevphid]}
937 {b'type': b'parents.set', b'value': [parentrevphid]}
926 )
938 )
927
939
928 if actions:
940 if actions:
929 transactions += actions
941 transactions += actions
930
942
931 # Parse commit message and update related fields.
943 # Parse commit message and update related fields.
932 desc = ctx.description()
944 desc = ctx.description()
933 info = callconduit(
945 info = callconduit(
934 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
946 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
935 )
947 )
936 for k, v in info[b'fields'].items():
948 for k, v in info[b'fields'].items():
937 if k in [b'title', b'summary', b'testPlan']:
949 if k in [b'title', b'summary', b'testPlan']:
938 transactions.append({b'type': k, b'value': v})
950 transactions.append({b'type': k, b'value': v})
939
951
940 params = {b'transactions': transactions}
952 params = {b'transactions': transactions}
941 if revid is not None:
953 if revid is not None:
942 # Update an existing Differential Revision
954 # Update an existing Differential Revision
943 params[b'objectIdentifier'] = revid
955 params[b'objectIdentifier'] = revid
944
956
945 revision = callconduit(repo.ui, b'differential.revision.edit', params)
957 revision = callconduit(repo.ui, b'differential.revision.edit', params)
946 if not revision:
958 if not revision:
947 raise error.Abort(_(b'cannot create revision for %s') % ctx)
959 raise error.Abort(_(b'cannot create revision for %s') % ctx)
948
960
949 return revision, diff
961 return revision, diff
950
962
951
963
952 def userphids(repo, names):
964 def userphids(repo, names):
953 """convert user names to PHIDs"""
965 """convert user names to PHIDs"""
954 names = [name.lower() for name in names]
966 names = [name.lower() for name in names]
955 query = {b'constraints': {b'usernames': names}}
967 query = {b'constraints': {b'usernames': names}}
956 result = callconduit(repo.ui, b'user.search', query)
968 result = callconduit(repo.ui, b'user.search', query)
957 # username not found is not an error of the API. So check if we have missed
969 # username not found is not an error of the API. So check if we have missed
958 # some names here.
970 # some names here.
959 data = result[b'data']
971 data = result[b'data']
960 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
972 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
961 unresolved = set(names) - resolved
973 unresolved = set(names) - resolved
962 if unresolved:
974 if unresolved:
963 raise error.Abort(
975 raise error.Abort(
964 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
976 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
965 )
977 )
966 return [entry[b'phid'] for entry in data]
978 return [entry[b'phid'] for entry in data]
967
979
968
980
969 @vcrcommand(
981 @vcrcommand(
970 b'phabsend',
982 b'phabsend',
971 [
983 [
972 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
984 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
973 (b'', b'amend', True, _(b'update commit messages')),
985 (b'', b'amend', True, _(b'update commit messages')),
974 (b'', b'reviewer', [], _(b'specify reviewers')),
986 (b'', b'reviewer', [], _(b'specify reviewers')),
975 (b'', b'blocker', [], _(b'specify blocking reviewers')),
987 (b'', b'blocker', [], _(b'specify blocking reviewers')),
976 (
988 (
977 b'm',
989 b'm',
978 b'comment',
990 b'comment',
979 b'',
991 b'',
980 _(b'add a comment to Revisions with new/updated Diffs'),
992 _(b'add a comment to Revisions with new/updated Diffs'),
981 ),
993 ),
982 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
994 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
983 ],
995 ],
984 _(b'REV [OPTIONS]'),
996 _(b'REV [OPTIONS]'),
985 helpcategory=command.CATEGORY_IMPORT_EXPORT,
997 helpcategory=command.CATEGORY_IMPORT_EXPORT,
986 )
998 )
987 def phabsend(ui, repo, *revs, **opts):
999 def phabsend(ui, repo, *revs, **opts):
988 """upload changesets to Phabricator
1000 """upload changesets to Phabricator
989
1001
990 If there are multiple revisions specified, they will be send as a stack
1002 If there are multiple revisions specified, they will be send as a stack
991 with a linear dependencies relationship using the order specified by the
1003 with a linear dependencies relationship using the order specified by the
992 revset.
1004 revset.
993
1005
994 For the first time uploading changesets, local tags will be created to
1006 For the first time uploading changesets, local tags will be created to
995 maintain the association. After the first time, phabsend will check
1007 maintain the association. After the first time, phabsend will check
996 obsstore and tags information so it can figure out whether to update an
1008 obsstore and tags information so it can figure out whether to update an
997 existing Differential Revision, or create a new one.
1009 existing Differential Revision, or create a new one.
998
1010
999 If --amend is set, update commit messages so they have the
1011 If --amend is set, update commit messages so they have the
1000 ``Differential Revision`` URL, remove related tags. This is similar to what
1012 ``Differential Revision`` URL, remove related tags. This is similar to what
1001 arcanist will do, and is more desired in author-push workflows. Otherwise,
1013 arcanist will do, and is more desired in author-push workflows. Otherwise,
1002 use local tags to record the ``Differential Revision`` association.
1014 use local tags to record the ``Differential Revision`` association.
1003
1015
1004 The --confirm option lets you confirm changesets before sending them. You
1016 The --confirm option lets you confirm changesets before sending them. You
1005 can also add following to your configuration file to make it default
1017 can also add following to your configuration file to make it default
1006 behaviour::
1018 behaviour::
1007
1019
1008 [phabsend]
1020 [phabsend]
1009 confirm = true
1021 confirm = true
1010
1022
1011 phabsend will check obsstore and the above association to decide whether to
1023 phabsend will check obsstore and the above association to decide whether to
1012 update an existing Differential Revision, or create a new one.
1024 update an existing Differential Revision, or create a new one.
1013 """
1025 """
1014 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1015 revs = list(revs) + opts.get(b'rev', [])
1027 revs = list(revs) + opts.get(b'rev', [])
1016 revs = scmutil.revrange(repo, revs)
1028 revs = scmutil.revrange(repo, revs)
1017
1029
1018 if not revs:
1030 if not revs:
1019 raise error.Abort(_(b'phabsend requires at least one changeset'))
1031 raise error.Abort(_(b'phabsend requires at least one changeset'))
1020 if opts.get(b'amend'):
1032 if opts.get(b'amend'):
1021 cmdutil.checkunfinished(repo)
1033 cmdutil.checkunfinished(repo)
1022
1034
1023 # {newnode: (oldnode, olddiff, olddrev}
1035 # {newnode: (oldnode, olddiff, olddrev}
1024 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1036 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1025
1037
1026 confirm = ui.configbool(b'phabsend', b'confirm')
1038 confirm = ui.configbool(b'phabsend', b'confirm')
1027 confirm |= bool(opts.get(b'confirm'))
1039 confirm |= bool(opts.get(b'confirm'))
1028 if confirm:
1040 if confirm:
1029 confirmed = _confirmbeforesend(repo, revs, oldmap)
1041 confirmed = _confirmbeforesend(repo, revs, oldmap)
1030 if not confirmed:
1042 if not confirmed:
1031 raise error.Abort(_(b'phabsend cancelled'))
1043 raise error.Abort(_(b'phabsend cancelled'))
1032
1044
1033 actions = []
1045 actions = []
1034 reviewers = opts.get(b'reviewer', [])
1046 reviewers = opts.get(b'reviewer', [])
1035 blockers = opts.get(b'blocker', [])
1047 blockers = opts.get(b'blocker', [])
1036 phids = []
1048 phids = []
1037 if reviewers:
1049 if reviewers:
1038 phids.extend(userphids(repo, reviewers))
1050 phids.extend(userphids(repo, reviewers))
1039 if blockers:
1051 if blockers:
1040 phids.extend(
1052 phids.extend(
1041 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1053 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1042 )
1054 )
1043 if phids:
1055 if phids:
1044 actions.append({b'type': b'reviewers.add', b'value': phids})
1056 actions.append({b'type': b'reviewers.add', b'value': phids})
1045
1057
1046 drevids = [] # [int]
1058 drevids = [] # [int]
1047 diffmap = {} # {newnode: diff}
1059 diffmap = {} # {newnode: diff}
1048
1060
1049 # Send patches one by one so we know their Differential Revision PHIDs and
1061 # Send patches one by one so we know their Differential Revision PHIDs and
1050 # can provide dependency relationship
1062 # can provide dependency relationship
1051 lastrevphid = None
1063 lastrevphid = None
1052 for rev in revs:
1064 for rev in revs:
1053 ui.debug(b'sending rev %d\n' % rev)
1065 ui.debug(b'sending rev %d\n' % rev)
1054 ctx = repo[rev]
1066 ctx = repo[rev]
1055
1067
1056 # Get Differential Revision ID
1068 # Get Differential Revision ID
1057 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1069 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1058 if oldnode != ctx.node() or opts.get(b'amend'):
1070 if oldnode != ctx.node() or opts.get(b'amend'):
1059 # Create or update Differential Revision
1071 # Create or update Differential Revision
1060 revision, diff = createdifferentialrevision(
1072 revision, diff = createdifferentialrevision(
1061 ctx,
1073 ctx,
1062 revid,
1074 revid,
1063 lastrevphid,
1075 lastrevphid,
1064 oldnode,
1076 oldnode,
1065 olddiff,
1077 olddiff,
1066 actions,
1078 actions,
1067 opts.get(b'comment'),
1079 opts.get(b'comment'),
1068 )
1080 )
1069 diffmap[ctx.node()] = diff
1081 diffmap[ctx.node()] = diff
1070 newrevid = int(revision[b'object'][b'id'])
1082 newrevid = int(revision[b'object'][b'id'])
1071 newrevphid = revision[b'object'][b'phid']
1083 newrevphid = revision[b'object'][b'phid']
1072 if revid:
1084 if revid:
1073 action = b'updated'
1085 action = b'updated'
1074 else:
1086 else:
1075 action = b'created'
1087 action = b'created'
1076
1088
1077 # Create a local tag to note the association, if commit message
1089 # Create a local tag to note the association, if commit message
1078 # does not have it already
1090 # does not have it already
1079 m = _differentialrevisiondescre.search(ctx.description())
1091 m = _differentialrevisiondescre.search(ctx.description())
1080 if not m or int(m.group(r'id')) != newrevid:
1092 if not m or int(m.group(r'id')) != newrevid:
1081 tagname = b'D%d' % newrevid
1093 tagname = b'D%d' % newrevid
1082 tags.tag(
1094 tags.tag(
1083 repo,
1095 repo,
1084 tagname,
1096 tagname,
1085 ctx.node(),
1097 ctx.node(),
1086 message=None,
1098 message=None,
1087 user=None,
1099 user=None,
1088 date=None,
1100 date=None,
1089 local=True,
1101 local=True,
1090 )
1102 )
1091 else:
1103 else:
1092 # Nothing changed. But still set "newrevphid" so the next revision
1104 # Nothing changed. But still set "newrevphid" so the next revision
1093 # could depend on this one and "newrevid" for the summary line.
1105 # could depend on this one and "newrevid" for the summary line.
1094 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1106 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1095 newrevid = revid
1107 newrevid = revid
1096 action = b'skipped'
1108 action = b'skipped'
1097
1109
1098 actiondesc = ui.label(
1110 actiondesc = ui.label(
1099 {
1111 {
1100 b'created': _(b'created'),
1112 b'created': _(b'created'),
1101 b'skipped': _(b'skipped'),
1113 b'skipped': _(b'skipped'),
1102 b'updated': _(b'updated'),
1114 b'updated': _(b'updated'),
1103 }[action],
1115 }[action],
1104 b'phabricator.action.%s' % action,
1116 b'phabricator.action.%s' % action,
1105 )
1117 )
1106 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1118 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1107 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1119 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1108 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1120 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1109 ui.write(
1121 ui.write(
1110 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1122 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1111 )
1123 )
1112 drevids.append(newrevid)
1124 drevids.append(newrevid)
1113 lastrevphid = newrevphid
1125 lastrevphid = newrevphid
1114
1126
1115 # Update commit messages and remove tags
1127 # Update commit messages and remove tags
1116 if opts.get(b'amend'):
1128 if opts.get(b'amend'):
1117 unfi = repo.unfiltered()
1129 unfi = repo.unfiltered()
1118 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1130 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1119 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1131 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1120 wnode = unfi[b'.'].node()
1132 wnode = unfi[b'.'].node()
1121 mapping = {} # {oldnode: [newnode]}
1133 mapping = {} # {oldnode: [newnode]}
1122 for i, rev in enumerate(revs):
1134 for i, rev in enumerate(revs):
1123 old = unfi[rev]
1135 old = unfi[rev]
1124 drevid = drevids[i]
1136 drevid = drevids[i]
1125 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1137 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1126 newdesc = getdescfromdrev(drev)
1138 newdesc = getdescfromdrev(drev)
1127 # Make sure commit message contain "Differential Revision"
1139 # Make sure commit message contain "Differential Revision"
1128 if old.description() != newdesc:
1140 if old.description() != newdesc:
1129 if old.phase() == phases.public:
1141 if old.phase() == phases.public:
1130 ui.warn(
1142 ui.warn(
1131 _(b"warning: not updating public commit %s\n")
1143 _(b"warning: not updating public commit %s\n")
1132 % scmutil.formatchangeid(old)
1144 % scmutil.formatchangeid(old)
1133 )
1145 )
1134 continue
1146 continue
1135 parents = [
1147 parents = [
1136 mapping.get(old.p1().node(), (old.p1(),))[0],
1148 mapping.get(old.p1().node(), (old.p1(),))[0],
1137 mapping.get(old.p2().node(), (old.p2(),))[0],
1149 mapping.get(old.p2().node(), (old.p2(),))[0],
1138 ]
1150 ]
1139 new = context.metadataonlyctx(
1151 new = context.metadataonlyctx(
1140 repo,
1152 repo,
1141 old,
1153 old,
1142 parents=parents,
1154 parents=parents,
1143 text=newdesc,
1155 text=newdesc,
1144 user=old.user(),
1156 user=old.user(),
1145 date=old.date(),
1157 date=old.date(),
1146 extra=old.extra(),
1158 extra=old.extra(),
1147 )
1159 )
1148
1160
1149 newnode = new.commit()
1161 newnode = new.commit()
1150
1162
1151 mapping[old.node()] = [newnode]
1163 mapping[old.node()] = [newnode]
1152 # Update diff property
1164 # Update diff property
1153 # If it fails just warn and keep going, otherwise the DREV
1165 # If it fails just warn and keep going, otherwise the DREV
1154 # associations will be lost
1166 # associations will be lost
1155 try:
1167 try:
1156 writediffproperties(unfi[newnode], diffmap[old.node()])
1168 writediffproperties(unfi[newnode], diffmap[old.node()])
1157 except util.urlerr.urlerror:
1169 except util.urlerr.urlerror:
1158 ui.warnnoi18n(
1170 ui.warnnoi18n(
1159 b'Failed to update metadata for D%s\n' % drevid
1171 b'Failed to update metadata for D%s\n' % drevid
1160 )
1172 )
1161 # Remove local tags since it's no longer necessary
1173 # Remove local tags since it's no longer necessary
1162 tagname = b'D%d' % drevid
1174 tagname = b'D%d' % drevid
1163 if tagname in repo.tags():
1175 if tagname in repo.tags():
1164 tags.tag(
1176 tags.tag(
1165 repo,
1177 repo,
1166 tagname,
1178 tagname,
1167 nullid,
1179 nullid,
1168 message=None,
1180 message=None,
1169 user=None,
1181 user=None,
1170 date=None,
1182 date=None,
1171 local=True,
1183 local=True,
1172 )
1184 )
1173 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1185 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1174 if wnode in mapping:
1186 if wnode in mapping:
1175 unfi.setparents(mapping[wnode][0])
1187 unfi.setparents(mapping[wnode][0])
1176
1188
1177
1189
1178 # Map from "hg:meta" keys to header understood by "hg import". The order is
1190 # Map from "hg:meta" keys to header understood by "hg import". The order is
1179 # consistent with "hg export" output.
1191 # consistent with "hg export" output.
1180 _metanamemap = util.sortdict(
1192 _metanamemap = util.sortdict(
1181 [
1193 [
1182 (b'user', b'User'),
1194 (b'user', b'User'),
1183 (b'date', b'Date'),
1195 (b'date', b'Date'),
1184 (b'branch', b'Branch'),
1196 (b'branch', b'Branch'),
1185 (b'node', b'Node ID'),
1197 (b'node', b'Node ID'),
1186 (b'parent', b'Parent '),
1198 (b'parent', b'Parent '),
1187 ]
1199 ]
1188 )
1200 )
1189
1201
1190
1202
1191 def _confirmbeforesend(repo, revs, oldmap):
1203 def _confirmbeforesend(repo, revs, oldmap):
1192 url, token = readurltoken(repo.ui)
1204 url, token = readurltoken(repo.ui)
1193 ui = repo.ui
1205 ui = repo.ui
1194 for rev in revs:
1206 for rev in revs:
1195 ctx = repo[rev]
1207 ctx = repo[rev]
1196 desc = ctx.description().splitlines()[0]
1208 desc = ctx.description().splitlines()[0]
1197 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1209 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1198 if drevid:
1210 if drevid:
1199 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1211 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
1200 else:
1212 else:
1201 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1213 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1202
1214
1203 ui.write(
1215 ui.write(
1204 _(b'%s - %s: %s\n')
1216 _(b'%s - %s: %s\n')
1205 % (
1217 % (
1206 drevdesc,
1218 drevdesc,
1207 ui.label(bytes(ctx), b'phabricator.node'),
1219 ui.label(bytes(ctx), b'phabricator.node'),
1208 ui.label(desc, b'phabricator.desc'),
1220 ui.label(desc, b'phabricator.desc'),
1209 )
1221 )
1210 )
1222 )
1211
1223
1212 if ui.promptchoice(
1224 if ui.promptchoice(
1213 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1225 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1214 ):
1226 ):
1215 return False
1227 return False
1216
1228
1217 return True
1229 return True
1218
1230
1219
1231
1220 _knownstatusnames = {
1232 _knownstatusnames = {
1221 b'accepted',
1233 b'accepted',
1222 b'needsreview',
1234 b'needsreview',
1223 b'needsrevision',
1235 b'needsrevision',
1224 b'closed',
1236 b'closed',
1225 b'abandoned',
1237 b'abandoned',
1226 }
1238 }
1227
1239
1228
1240
1229 def _getstatusname(drev):
1241 def _getstatusname(drev):
1230 """get normalized status name from a Differential Revision"""
1242 """get normalized status name from a Differential Revision"""
1231 return drev[b'statusName'].replace(b' ', b'').lower()
1243 return drev[b'statusName'].replace(b' ', b'').lower()
1232
1244
1233
1245
1234 # Small language to specify differential revisions. Support symbols: (), :X,
1246 # Small language to specify differential revisions. Support symbols: (), :X,
1235 # +, and -.
1247 # +, and -.
1236
1248
1237 _elements = {
1249 _elements = {
1238 # token-type: binding-strength, primary, prefix, infix, suffix
1250 # token-type: binding-strength, primary, prefix, infix, suffix
1239 b'(': (12, None, (b'group', 1, b')'), None, None),
1251 b'(': (12, None, (b'group', 1, b')'), None, None),
1240 b':': (8, None, (b'ancestors', 8), None, None),
1252 b':': (8, None, (b'ancestors', 8), None, None),
1241 b'&': (5, None, None, (b'and_', 5), None),
1253 b'&': (5, None, None, (b'and_', 5), None),
1242 b'+': (4, None, None, (b'add', 4), None),
1254 b'+': (4, None, None, (b'add', 4), None),
1243 b'-': (4, None, None, (b'sub', 4), None),
1255 b'-': (4, None, None, (b'sub', 4), None),
1244 b')': (0, None, None, None, None),
1256 b')': (0, None, None, None, None),
1245 b'symbol': (0, b'symbol', None, None, None),
1257 b'symbol': (0, b'symbol', None, None, None),
1246 b'end': (0, None, None, None, None),
1258 b'end': (0, None, None, None, None),
1247 }
1259 }
1248
1260
1249
1261
1250 def _tokenize(text):
1262 def _tokenize(text):
1251 view = memoryview(text) # zero-copy slice
1263 view = memoryview(text) # zero-copy slice
1252 special = b'():+-& '
1264 special = b'():+-& '
1253 pos = 0
1265 pos = 0
1254 length = len(text)
1266 length = len(text)
1255 while pos < length:
1267 while pos < length:
1256 symbol = b''.join(
1268 symbol = b''.join(
1257 itertools.takewhile(
1269 itertools.takewhile(
1258 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1270 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1259 )
1271 )
1260 )
1272 )
1261 if symbol:
1273 if symbol:
1262 yield (b'symbol', symbol, pos)
1274 yield (b'symbol', symbol, pos)
1263 pos += len(symbol)
1275 pos += len(symbol)
1264 else: # special char, ignore space
1276 else: # special char, ignore space
1265 if text[pos] != b' ':
1277 if text[pos] != b' ':
1266 yield (text[pos], None, pos)
1278 yield (text[pos], None, pos)
1267 pos += 1
1279 pos += 1
1268 yield (b'end', None, pos)
1280 yield (b'end', None, pos)
1269
1281
1270
1282
1271 def _parse(text):
1283 def _parse(text):
1272 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1284 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1273 if pos != len(text):
1285 if pos != len(text):
1274 raise error.ParseError(b'invalid token', pos)
1286 raise error.ParseError(b'invalid token', pos)
1275 return tree
1287 return tree
1276
1288
1277
1289
1278 def _parsedrev(symbol):
1290 def _parsedrev(symbol):
1279 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1291 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1280 if symbol.startswith(b'D') and symbol[1:].isdigit():
1292 if symbol.startswith(b'D') and symbol[1:].isdigit():
1281 return int(symbol[1:])
1293 return int(symbol[1:])
1282 if symbol.isdigit():
1294 if symbol.isdigit():
1283 return int(symbol)
1295 return int(symbol)
1284
1296
1285
1297
1286 def _prefetchdrevs(tree):
1298 def _prefetchdrevs(tree):
1287 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1299 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1288 drevs = set()
1300 drevs = set()
1289 ancestordrevs = set()
1301 ancestordrevs = set()
1290 op = tree[0]
1302 op = tree[0]
1291 if op == b'symbol':
1303 if op == b'symbol':
1292 r = _parsedrev(tree[1])
1304 r = _parsedrev(tree[1])
1293 if r:
1305 if r:
1294 drevs.add(r)
1306 drevs.add(r)
1295 elif op == b'ancestors':
1307 elif op == b'ancestors':
1296 r, a = _prefetchdrevs(tree[1])
1308 r, a = _prefetchdrevs(tree[1])
1297 drevs.update(r)
1309 drevs.update(r)
1298 ancestordrevs.update(r)
1310 ancestordrevs.update(r)
1299 ancestordrevs.update(a)
1311 ancestordrevs.update(a)
1300 else:
1312 else:
1301 for t in tree[1:]:
1313 for t in tree[1:]:
1302 r, a = _prefetchdrevs(t)
1314 r, a = _prefetchdrevs(t)
1303 drevs.update(r)
1315 drevs.update(r)
1304 ancestordrevs.update(a)
1316 ancestordrevs.update(a)
1305 return drevs, ancestordrevs
1317 return drevs, ancestordrevs
1306
1318
1307
1319
1308 def querydrev(repo, spec):
1320 def querydrev(repo, spec):
1309 """return a list of "Differential Revision" dicts
1321 """return a list of "Differential Revision" dicts
1310
1322
1311 spec is a string using a simple query language, see docstring in phabread
1323 spec is a string using a simple query language, see docstring in phabread
1312 for details.
1324 for details.
1313
1325
1314 A "Differential Revision dict" looks like:
1326 A "Differential Revision dict" looks like:
1315
1327
1316 {
1328 {
1317 "id": "2",
1329 "id": "2",
1318 "phid": "PHID-DREV-672qvysjcczopag46qty",
1330 "phid": "PHID-DREV-672qvysjcczopag46qty",
1319 "title": "example",
1331 "title": "example",
1320 "uri": "https://phab.example.com/D2",
1332 "uri": "https://phab.example.com/D2",
1321 "dateCreated": "1499181406",
1333 "dateCreated": "1499181406",
1322 "dateModified": "1499182103",
1334 "dateModified": "1499182103",
1323 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1335 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1324 "status": "0",
1336 "status": "0",
1325 "statusName": "Needs Review",
1337 "statusName": "Needs Review",
1326 "properties": [],
1338 "properties": [],
1327 "branch": null,
1339 "branch": null,
1328 "summary": "",
1340 "summary": "",
1329 "testPlan": "",
1341 "testPlan": "",
1330 "lineCount": "2",
1342 "lineCount": "2",
1331 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1343 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1332 "diffs": [
1344 "diffs": [
1333 "3",
1345 "3",
1334 "4",
1346 "4",
1335 ],
1347 ],
1336 "commits": [],
1348 "commits": [],
1337 "reviewers": [],
1349 "reviewers": [],
1338 "ccs": [],
1350 "ccs": [],
1339 "hashes": [],
1351 "hashes": [],
1340 "auxiliary": {
1352 "auxiliary": {
1341 "phabricator:projects": [],
1353 "phabricator:projects": [],
1342 "phabricator:depends-on": [
1354 "phabricator:depends-on": [
1343 "PHID-DREV-gbapp366kutjebt7agcd"
1355 "PHID-DREV-gbapp366kutjebt7agcd"
1344 ]
1356 ]
1345 },
1357 },
1346 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1358 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1347 "sourcePath": null
1359 "sourcePath": null
1348 }
1360 }
1349 """
1361 """
1350
1362
1351 def fetch(params):
1363 def fetch(params):
1352 """params -> single drev or None"""
1364 """params -> single drev or None"""
1353 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1365 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1354 if key in prefetched:
1366 if key in prefetched:
1355 return prefetched[key]
1367 return prefetched[key]
1356 drevs = callconduit(repo.ui, b'differential.query', params)
1368 drevs = callconduit(repo.ui, b'differential.query', params)
1357 # Fill prefetched with the result
1369 # Fill prefetched with the result
1358 for drev in drevs:
1370 for drev in drevs:
1359 prefetched[drev[b'phid']] = drev
1371 prefetched[drev[b'phid']] = drev
1360 prefetched[int(drev[b'id'])] = drev
1372 prefetched[int(drev[b'id'])] = drev
1361 if key not in prefetched:
1373 if key not in prefetched:
1362 raise error.Abort(
1374 raise error.Abort(
1363 _(b'cannot get Differential Revision %r') % params
1375 _(b'cannot get Differential Revision %r') % params
1364 )
1376 )
1365 return prefetched[key]
1377 return prefetched[key]
1366
1378
1367 def getstack(topdrevids):
1379 def getstack(topdrevids):
1368 """given a top, get a stack from the bottom, [id] -> [id]"""
1380 """given a top, get a stack from the bottom, [id] -> [id]"""
1369 visited = set()
1381 visited = set()
1370 result = []
1382 result = []
1371 queue = [{b'ids': [i]} for i in topdrevids]
1383 queue = [{b'ids': [i]} for i in topdrevids]
1372 while queue:
1384 while queue:
1373 params = queue.pop()
1385 params = queue.pop()
1374 drev = fetch(params)
1386 drev = fetch(params)
1375 if drev[b'id'] in visited:
1387 if drev[b'id'] in visited:
1376 continue
1388 continue
1377 visited.add(drev[b'id'])
1389 visited.add(drev[b'id'])
1378 result.append(int(drev[b'id']))
1390 result.append(int(drev[b'id']))
1379 auxiliary = drev.get(b'auxiliary', {})
1391 auxiliary = drev.get(b'auxiliary', {})
1380 depends = auxiliary.get(b'phabricator:depends-on', [])
1392 depends = auxiliary.get(b'phabricator:depends-on', [])
1381 for phid in depends:
1393 for phid in depends:
1382 queue.append({b'phids': [phid]})
1394 queue.append({b'phids': [phid]})
1383 result.reverse()
1395 result.reverse()
1384 return smartset.baseset(result)
1396 return smartset.baseset(result)
1385
1397
1386 # Initialize prefetch cache
1398 # Initialize prefetch cache
1387 prefetched = {} # {id or phid: drev}
1399 prefetched = {} # {id or phid: drev}
1388
1400
1389 tree = _parse(spec)
1401 tree = _parse(spec)
1390 drevs, ancestordrevs = _prefetchdrevs(tree)
1402 drevs, ancestordrevs = _prefetchdrevs(tree)
1391
1403
1392 # developer config: phabricator.batchsize
1404 # developer config: phabricator.batchsize
1393 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1405 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1394
1406
1395 # Prefetch Differential Revisions in batch
1407 # Prefetch Differential Revisions in batch
1396 tofetch = set(drevs)
1408 tofetch = set(drevs)
1397 for r in ancestordrevs:
1409 for r in ancestordrevs:
1398 tofetch.update(range(max(1, r - batchsize), r + 1))
1410 tofetch.update(range(max(1, r - batchsize), r + 1))
1399 if drevs:
1411 if drevs:
1400 fetch({b'ids': list(tofetch)})
1412 fetch({b'ids': list(tofetch)})
1401 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1413 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1402
1414
1403 # Walk through the tree, return smartsets
1415 # Walk through the tree, return smartsets
1404 def walk(tree):
1416 def walk(tree):
1405 op = tree[0]
1417 op = tree[0]
1406 if op == b'symbol':
1418 if op == b'symbol':
1407 drev = _parsedrev(tree[1])
1419 drev = _parsedrev(tree[1])
1408 if drev:
1420 if drev:
1409 return smartset.baseset([drev])
1421 return smartset.baseset([drev])
1410 elif tree[1] in _knownstatusnames:
1422 elif tree[1] in _knownstatusnames:
1411 drevs = [
1423 drevs = [
1412 r
1424 r
1413 for r in validids
1425 for r in validids
1414 if _getstatusname(prefetched[r]) == tree[1]
1426 if _getstatusname(prefetched[r]) == tree[1]
1415 ]
1427 ]
1416 return smartset.baseset(drevs)
1428 return smartset.baseset(drevs)
1417 else:
1429 else:
1418 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1430 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1419 elif op in {b'and_', b'add', b'sub'}:
1431 elif op in {b'and_', b'add', b'sub'}:
1420 assert len(tree) == 3
1432 assert len(tree) == 3
1421 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1433 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1422 elif op == b'group':
1434 elif op == b'group':
1423 return walk(tree[1])
1435 return walk(tree[1])
1424 elif op == b'ancestors':
1436 elif op == b'ancestors':
1425 return getstack(walk(tree[1]))
1437 return getstack(walk(tree[1]))
1426 else:
1438 else:
1427 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1439 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1428
1440
1429 return [prefetched[r] for r in walk(tree)]
1441 return [prefetched[r] for r in walk(tree)]
1430
1442
1431
1443
1432 def getdescfromdrev(drev):
1444 def getdescfromdrev(drev):
1433 """get description (commit message) from "Differential Revision"
1445 """get description (commit message) from "Differential Revision"
1434
1446
1435 This is similar to differential.getcommitmessage API. But we only care
1447 This is similar to differential.getcommitmessage API. But we only care
1436 about limited fields: title, summary, test plan, and URL.
1448 about limited fields: title, summary, test plan, and URL.
1437 """
1449 """
1438 title = drev[b'title']
1450 title = drev[b'title']
1439 summary = drev[b'summary'].rstrip()
1451 summary = drev[b'summary'].rstrip()
1440 testplan = drev[b'testPlan'].rstrip()
1452 testplan = drev[b'testPlan'].rstrip()
1441 if testplan:
1453 if testplan:
1442 testplan = b'Test Plan:\n%s' % testplan
1454 testplan = b'Test Plan:\n%s' % testplan
1443 uri = b'Differential Revision: %s' % drev[b'uri']
1455 uri = b'Differential Revision: %s' % drev[b'uri']
1444 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1456 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1445
1457
1446
1458
1447 def getdiffmeta(diff):
1459 def getdiffmeta(diff):
1448 """get commit metadata (date, node, user, p1) from a diff object
1460 """get commit metadata (date, node, user, p1) from a diff object
1449
1461
1450 The metadata could be "hg:meta", sent by phabsend, like:
1462 The metadata could be "hg:meta", sent by phabsend, like:
1451
1463
1452 "properties": {
1464 "properties": {
1453 "hg:meta": {
1465 "hg:meta": {
1454 "date": "1499571514 25200",
1466 "date": "1499571514 25200",
1455 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1467 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1456 "user": "Foo Bar <foo@example.com>",
1468 "user": "Foo Bar <foo@example.com>",
1457 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1469 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1458 }
1470 }
1459 }
1471 }
1460
1472
1461 Or converted from "local:commits", sent by "arc", like:
1473 Or converted from "local:commits", sent by "arc", like:
1462
1474
1463 "properties": {
1475 "properties": {
1464 "local:commits": {
1476 "local:commits": {
1465 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1477 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1466 "author": "Foo Bar",
1478 "author": "Foo Bar",
1467 "time": 1499546314,
1479 "time": 1499546314,
1468 "branch": "default",
1480 "branch": "default",
1469 "tag": "",
1481 "tag": "",
1470 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1471 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1472 "local": "1000",
1484 "local": "1000",
1473 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1485 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1474 "summary": "...",
1486 "summary": "...",
1475 "message": "...",
1487 "message": "...",
1476 "authorEmail": "foo@example.com"
1488 "authorEmail": "foo@example.com"
1477 }
1489 }
1478 }
1490 }
1479 }
1491 }
1480
1492
1481 Note: metadata extracted from "local:commits" will lose time zone
1493 Note: metadata extracted from "local:commits" will lose time zone
1482 information.
1494 information.
1483 """
1495 """
1484 props = diff.get(b'properties') or {}
1496 props = diff.get(b'properties') or {}
1485 meta = props.get(b'hg:meta')
1497 meta = props.get(b'hg:meta')
1486 if not meta:
1498 if not meta:
1487 if props.get(b'local:commits'):
1499 if props.get(b'local:commits'):
1488 commit = sorted(props[b'local:commits'].values())[0]
1500 commit = sorted(props[b'local:commits'].values())[0]
1489 meta = {}
1501 meta = {}
1490 if b'author' in commit and b'authorEmail' in commit:
1502 if b'author' in commit and b'authorEmail' in commit:
1491 meta[b'user'] = b'%s <%s>' % (
1503 meta[b'user'] = b'%s <%s>' % (
1492 commit[b'author'],
1504 commit[b'author'],
1493 commit[b'authorEmail'],
1505 commit[b'authorEmail'],
1494 )
1506 )
1495 if b'time' in commit:
1507 if b'time' in commit:
1496 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1508 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1497 if b'branch' in commit:
1509 if b'branch' in commit:
1498 meta[b'branch'] = commit[b'branch']
1510 meta[b'branch'] = commit[b'branch']
1499 node = commit.get(b'commit', commit.get(b'rev'))
1511 node = commit.get(b'commit', commit.get(b'rev'))
1500 if node:
1512 if node:
1501 meta[b'node'] = node
1513 meta[b'node'] = node
1502 if len(commit.get(b'parents', ())) >= 1:
1514 if len(commit.get(b'parents', ())) >= 1:
1503 meta[b'parent'] = commit[b'parents'][0]
1515 meta[b'parent'] = commit[b'parents'][0]
1504 else:
1516 else:
1505 meta = {}
1517 meta = {}
1506 if b'date' not in meta and b'dateCreated' in diff:
1518 if b'date' not in meta and b'dateCreated' in diff:
1507 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1519 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1508 if b'branch' not in meta and diff.get(b'branch'):
1520 if b'branch' not in meta and diff.get(b'branch'):
1509 meta[b'branch'] = diff[b'branch']
1521 meta[b'branch'] = diff[b'branch']
1510 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1522 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1511 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1523 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1512 return meta
1524 return meta
1513
1525
1514
1526
1515 def readpatch(repo, drevs, write):
1527 def readpatch(repo, drevs, write):
1516 """generate plain-text patch readable by 'hg import'
1528 """generate plain-text patch readable by 'hg import'
1517
1529
1518 write is usually ui.write. drevs is what "querydrev" returns, results of
1530 write is usually ui.write. drevs is what "querydrev" returns, results of
1519 "differential.query".
1531 "differential.query".
1520 """
1532 """
1521 # Prefetch hg:meta property for all diffs
1533 # Prefetch hg:meta property for all diffs
1522 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1534 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1523 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1535 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1524
1536
1525 # Generate patch for each drev
1537 # Generate patch for each drev
1526 for drev in drevs:
1538 for drev in drevs:
1527 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1539 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1528
1540
1529 diffid = max(int(v) for v in drev[b'diffs'])
1541 diffid = max(int(v) for v in drev[b'diffs'])
1530 body = callconduit(
1542 body = callconduit(
1531 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1543 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1532 )
1544 )
1533 desc = getdescfromdrev(drev)
1545 desc = getdescfromdrev(drev)
1534 header = b'# HG changeset patch\n'
1546 header = b'# HG changeset patch\n'
1535
1547
1536 # Try to preserve metadata from hg:meta property. Write hg patch
1548 # Try to preserve metadata from hg:meta property. Write hg patch
1537 # headers that can be read by the "import" command. See patchheadermap
1549 # headers that can be read by the "import" command. See patchheadermap
1538 # and extract in mercurial/patch.py for supported headers.
1550 # and extract in mercurial/patch.py for supported headers.
1539 meta = getdiffmeta(diffs[b'%d' % diffid])
1551 meta = getdiffmeta(diffs[b'%d' % diffid])
1540 for k in _metanamemap.keys():
1552 for k in _metanamemap.keys():
1541 if k in meta:
1553 if k in meta:
1542 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1554 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1543
1555
1544 content = b'%s%s\n%s' % (header, desc, body)
1556 content = b'%s%s\n%s' % (header, desc, body)
1545 write(content)
1557 write(content)
1546
1558
1547
1559
1548 @vcrcommand(
1560 @vcrcommand(
1549 b'phabread',
1561 b'phabread',
1550 [(b'', b'stack', False, _(b'read dependencies'))],
1562 [(b'', b'stack', False, _(b'read dependencies'))],
1551 _(b'DREVSPEC [OPTIONS]'),
1563 _(b'DREVSPEC [OPTIONS]'),
1552 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1553 )
1565 )
1554 def phabread(ui, repo, spec, **opts):
1566 def phabread(ui, repo, spec, **opts):
1555 """print patches from Phabricator suitable for importing
1567 """print patches from Phabricator suitable for importing
1556
1568
1557 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1569 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1558 the number ``123``. It could also have common operators like ``+``, ``-``,
1570 the number ``123``. It could also have common operators like ``+``, ``-``,
1559 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1571 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1560 select a stack.
1572 select a stack.
1561
1573
1562 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1574 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1563 could be used to filter patches by status. For performance reason, they
1575 could be used to filter patches by status. For performance reason, they
1564 only represent a subset of non-status selections and cannot be used alone.
1576 only represent a subset of non-status selections and cannot be used alone.
1565
1577
1566 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1578 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1567 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1579 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1568 stack up to D9.
1580 stack up to D9.
1569
1581
1570 If --stack is given, follow dependencies information and read all patches.
1582 If --stack is given, follow dependencies information and read all patches.
1571 It is equivalent to the ``:`` operator.
1583 It is equivalent to the ``:`` operator.
1572 """
1584 """
1573 opts = pycompat.byteskwargs(opts)
1585 opts = pycompat.byteskwargs(opts)
1574 if opts.get(b'stack'):
1586 if opts.get(b'stack'):
1575 spec = b':(%s)' % spec
1587 spec = b':(%s)' % spec
1576 drevs = querydrev(repo, spec)
1588 drevs = querydrev(repo, spec)
1577 readpatch(repo, drevs, ui.write)
1589 readpatch(repo, drevs, ui.write)
1578
1590
1579
1591
1580 @vcrcommand(
1592 @vcrcommand(
1581 b'phabupdate',
1593 b'phabupdate',
1582 [
1594 [
1583 (b'', b'accept', False, _(b'accept revisions')),
1595 (b'', b'accept', False, _(b'accept revisions')),
1584 (b'', b'reject', False, _(b'reject revisions')),
1596 (b'', b'reject', False, _(b'reject revisions')),
1585 (b'', b'abandon', False, _(b'abandon revisions')),
1597 (b'', b'abandon', False, _(b'abandon revisions')),
1586 (b'', b'reclaim', False, _(b'reclaim revisions')),
1598 (b'', b'reclaim', False, _(b'reclaim revisions')),
1587 (b'm', b'comment', b'', _(b'comment on the last revision')),
1599 (b'm', b'comment', b'', _(b'comment on the last revision')),
1588 ],
1600 ],
1589 _(b'DREVSPEC [OPTIONS]'),
1601 _(b'DREVSPEC [OPTIONS]'),
1590 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1591 )
1603 )
1592 def phabupdate(ui, repo, spec, **opts):
1604 def phabupdate(ui, repo, spec, **opts):
1593 """update Differential Revision in batch
1605 """update Differential Revision in batch
1594
1606
1595 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1607 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1596 """
1608 """
1597 opts = pycompat.byteskwargs(opts)
1609 opts = pycompat.byteskwargs(opts)
1598 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1610 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1599 if len(flags) > 1:
1611 if len(flags) > 1:
1600 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1612 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1601
1613
1602 actions = []
1614 actions = []
1603 for f in flags:
1615 for f in flags:
1604 actions.append({b'type': f, b'value': b'true'})
1616 actions.append({b'type': f, b'value': b'true'})
1605
1617
1606 drevs = querydrev(repo, spec)
1618 drevs = querydrev(repo, spec)
1607 for i, drev in enumerate(drevs):
1619 for i, drev in enumerate(drevs):
1608 if i + 1 == len(drevs) and opts.get(b'comment'):
1620 if i + 1 == len(drevs) and opts.get(b'comment'):
1609 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1621 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1610 if actions:
1622 if actions:
1611 params = {
1623 params = {
1612 b'objectIdentifier': drev[b'phid'],
1624 b'objectIdentifier': drev[b'phid'],
1613 b'transactions': actions,
1625 b'transactions': actions,
1614 }
1626 }
1615 callconduit(ui, b'differential.revision.edit', params)
1627 callconduit(ui, b'differential.revision.edit', params)
1616
1628
1617
1629
1618 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1630 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1619 def template_review(context, mapping):
1631 def template_review(context, mapping):
1620 """:phabreview: Object describing the review for this changeset.
1632 """:phabreview: Object describing the review for this changeset.
1621 Has attributes `url` and `id`.
1633 Has attributes `url` and `id`.
1622 """
1634 """
1623 ctx = context.resource(mapping, b'ctx')
1635 ctx = context.resource(mapping, b'ctx')
1624 m = _differentialrevisiondescre.search(ctx.description())
1636 m = _differentialrevisiondescre.search(ctx.description())
1625 if m:
1637 if m:
1626 return templateutil.hybriddict(
1638 return templateutil.hybriddict(
1627 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1639 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1628 )
1640 )
1629 else:
1641 else:
1630 tags = ctx.repo().nodetags(ctx.node())
1642 tags = ctx.repo().nodetags(ctx.node())
1631 for t in tags:
1643 for t in tags:
1632 if _differentialrevisiontagre.match(t):
1644 if _differentialrevisiontagre.match(t):
1633 url = ctx.repo().ui.config(b'phabricator', b'url')
1645 url = ctx.repo().ui.config(b'phabricator', b'url')
1634 if not url.endswith(b'/'):
1646 if not url.endswith(b'/'):
1635 url += b'/'
1647 url += b'/'
1636 url += t
1648 url += t
1637
1649
1638 return templateutil.hybriddict({b'url': url, b'id': t,})
1650 return templateutil.hybriddict({b'url': url, b'id': t,})
1639 return None
1651 return None
General Comments 0
You need to be logged in to leave comments. Login now