##// END OF EJS Templates
index: use `index.has_node` in `phabricator.getoldnodedrevmap`...
marmoute -
r43949:4cb3f5bb default
parent child Browse files
Show More
@@ -1,1650 +1,1650 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = util.urlreq.parseqs(r1.body)
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
148 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
149 for key in r1params:
150 if key not in r2params:
150 if key not in r2params:
151 return False
151 return False
152 value = r1params[key][0]
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = pycompat.json_loads(value)
155 r1json = pycompat.json_loads(value)
156 r2json = pycompat.json_loads(r2params[key][0])
156 r2json = pycompat.json_loads(r2params[key][0])
157 if r1json != r2json:
157 if r1json != r2json:
158 return False
158 return False
159 elif r2params[key][0] != value:
159 elif r2params[key][0] != value:
160 return False
160 return False
161 return True
161 return True
162
162
163 def sanitiserequest(request):
163 def sanitiserequest(request):
164 request.body = re.sub(
164 request.body = re.sub(
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 )
166 )
167 return request
167 return request
168
168
169 def sanitiseresponse(response):
169 def sanitiseresponse(response):
170 if 'set-cookie' in response['headers']:
170 if 'set-cookie' in response['headers']:
171 del response['headers']['set-cookie']
171 del response['headers']['set-cookie']
172 return response
172 return response
173
173
174 def decorate(fn):
174 def decorate(fn):
175 def inner(*args, **kwargs):
175 def inner(*args, **kwargs):
176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
176 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
177 if cassette:
177 if cassette:
178 import hgdemandimport
178 import hgdemandimport
179
179
180 with hgdemandimport.deactivated():
180 with hgdemandimport.deactivated():
181 import vcr as vcrmod
181 import vcr as vcrmod
182 import vcr.stubs as stubs
182 import vcr.stubs as stubs
183
183
184 vcr = vcrmod.VCR(
184 vcr = vcrmod.VCR(
185 serializer='json',
185 serializer='json',
186 before_record_request=sanitiserequest,
186 before_record_request=sanitiserequest,
187 before_record_response=sanitiseresponse,
187 before_record_response=sanitiseresponse,
188 custom_patches=[
188 custom_patches=[
189 (
189 (
190 urlmod,
190 urlmod,
191 'httpconnection',
191 'httpconnection',
192 stubs.VCRHTTPConnection,
192 stubs.VCRHTTPConnection,
193 ),
193 ),
194 (
194 (
195 urlmod,
195 urlmod,
196 'httpsconnection',
196 'httpsconnection',
197 stubs.VCRHTTPSConnection,
197 stubs.VCRHTTPSConnection,
198 ),
198 ),
199 ],
199 ],
200 )
200 )
201 vcr.register_matcher('hgmatcher', hgmatcher)
201 vcr.register_matcher('hgmatcher', hgmatcher)
202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
202 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
203 return fn(*args, **kwargs)
203 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
205
205
206 inner.__name__ = fn.__name__
206 inner.__name__ = fn.__name__
207 inner.__doc__ = fn.__doc__
207 inner.__doc__ = fn.__doc__
208 return command(
208 return command(
209 name,
209 name,
210 fullflags,
210 fullflags,
211 spec,
211 spec,
212 helpcategory=helpcategory,
212 helpcategory=helpcategory,
213 optionalrepo=optionalrepo,
213 optionalrepo=optionalrepo,
214 )(inner)
214 )(inner)
215
215
216 return decorate
216 return decorate
217
217
218
218
219 def urlencodenested(params):
219 def urlencodenested(params):
220 """like urlencode, but works with nested parameters.
220 """like urlencode, but works with nested parameters.
221
221
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 """
225 """
226 flatparams = util.sortdict()
226 flatparams = util.sortdict()
227
227
228 def process(prefix, obj):
228 def process(prefix, obj):
229 if isinstance(obj, bool):
229 if isinstance(obj, bool):
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 if items is None:
233 if items is None:
234 flatparams[prefix] = obj
234 flatparams[prefix] = obj
235 else:
235 else:
236 for k, v in items(obj):
236 for k, v in items(obj):
237 if prefix:
237 if prefix:
238 process(b'%s[%s]' % (prefix, k), v)
238 process(b'%s[%s]' % (prefix, k), v)
239 else:
239 else:
240 process(k, v)
240 process(k, v)
241
241
242 process(b'', params)
242 process(b'', params)
243 return util.urlreq.urlencode(flatparams)
243 return util.urlreq.urlencode(flatparams)
244
244
245
245
246 def readurltoken(ui):
246 def readurltoken(ui):
247 """return conduit url, token and make sure they exist
247 """return conduit url, token and make sure they exist
248
248
249 Currently read from [auth] config section. In the future, it might
249 Currently read from [auth] config section. In the future, it might
250 make sense to read from .arcconfig and .arcrc as well.
250 make sense to read from .arcconfig and .arcrc as well.
251 """
251 """
252 url = ui.config(b'phabricator', b'url')
252 url = ui.config(b'phabricator', b'url')
253 if not url:
253 if not url:
254 raise error.Abort(
254 raise error.Abort(
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 )
256 )
257
257
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 token = None
259 token = None
260
260
261 if res:
261 if res:
262 group, auth = res
262 group, auth = res
263
263
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
265
265
266 token = auth.get(b'phabtoken')
266 token = auth.get(b'phabtoken')
267
267
268 if not token:
268 if not token:
269 raise error.Abort(
269 raise error.Abort(
270 _(b'Can\'t find conduit token associated to %s') % (url,)
270 _(b'Can\'t find conduit token associated to %s') % (url,)
271 )
271 )
272
272
273 return url, token
273 return url, token
274
274
275
275
276 def callconduit(ui, name, params):
276 def callconduit(ui, name, params):
277 """call Conduit API, params is a dict. return json.loads result, or None"""
277 """call Conduit API, params is a dict. return json.loads result, or None"""
278 host, token = readurltoken(ui)
278 host, token = readurltoken(ui)
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 params = params.copy()
281 params = params.copy()
282 params[b'__conduit__'] = {
282 params[b'__conduit__'] = {
283 b'token': token,
283 b'token': token,
284 }
284 }
285 rawdata = {
285 rawdata = {
286 b'params': templatefilters.json(params),
286 b'params': templatefilters.json(params),
287 b'output': b'json',
287 b'output': b'json',
288 b'__conduit__': 1,
288 b'__conduit__': 1,
289 }
289 }
290 data = urlencodenested(rawdata)
290 data = urlencodenested(rawdata)
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 if curlcmd:
292 if curlcmd:
293 sin, sout = procutil.popen2(
293 sin, sout = procutil.popen2(
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 )
295 )
296 sin.write(data)
296 sin.write(data)
297 sin.close()
297 sin.close()
298 body = sout.read()
298 body = sout.read()
299 else:
299 else:
300 urlopener = urlmod.opener(ui, authinfo)
300 urlopener = urlmod.opener(ui, authinfo)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 with contextlib.closing(urlopener.open(request)) as rsp:
302 with contextlib.closing(urlopener.open(request)) as rsp:
303 body = rsp.read()
303 body = rsp.read()
304 ui.debug(b'Conduit Response: %s\n' % body)
304 ui.debug(b'Conduit Response: %s\n' % body)
305 parsed = pycompat.rapply(
305 parsed = pycompat.rapply(
306 lambda x: encoding.unitolocal(x)
306 lambda x: encoding.unitolocal(x)
307 if isinstance(x, pycompat.unicode)
307 if isinstance(x, pycompat.unicode)
308 else x,
308 else x,
309 # json.loads only accepts bytes from py3.6+
309 # json.loads only accepts bytes from py3.6+
310 pycompat.json_loads(encoding.unifromlocal(body)),
310 pycompat.json_loads(encoding.unifromlocal(body)),
311 )
311 )
312 if parsed.get(b'error_code'):
312 if parsed.get(b'error_code'):
313 msg = _(b'Conduit Error (%s): %s') % (
313 msg = _(b'Conduit Error (%s): %s') % (
314 parsed[b'error_code'],
314 parsed[b'error_code'],
315 parsed[b'error_info'],
315 parsed[b'error_info'],
316 )
316 )
317 raise error.Abort(msg)
317 raise error.Abort(msg)
318 return parsed[b'result']
318 return parsed[b'result']
319
319
320
320
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 def debugcallconduit(ui, repo, name):
322 def debugcallconduit(ui, repo, name):
323 """call Conduit API
323 """call Conduit API
324
324
325 Call parameters are read from stdin as a JSON blob. Result will be written
325 Call parameters are read from stdin as a JSON blob. Result will be written
326 to stdout as a JSON blob.
326 to stdout as a JSON blob.
327 """
327 """
328 # json.loads only accepts bytes from 3.6+
328 # json.loads only accepts bytes from 3.6+
329 rawparams = encoding.unifromlocal(ui.fin.read())
329 rawparams = encoding.unifromlocal(ui.fin.read())
330 # json.loads only returns unicode strings
330 # json.loads only returns unicode strings
331 params = pycompat.rapply(
331 params = pycompat.rapply(
332 lambda x: encoding.unitolocal(x)
332 lambda x: encoding.unitolocal(x)
333 if isinstance(x, pycompat.unicode)
333 if isinstance(x, pycompat.unicode)
334 else x,
334 else x,
335 pycompat.json_loads(rawparams),
335 pycompat.json_loads(rawparams),
336 )
336 )
337 # json.dumps only accepts unicode strings
337 # json.dumps only accepts unicode strings
338 result = pycompat.rapply(
338 result = pycompat.rapply(
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 callconduit(ui, name, params),
340 callconduit(ui, name, params),
341 )
341 )
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
344
344
345
345
346 def getrepophid(repo):
346 def getrepophid(repo):
347 """given callsign, return repository PHID or None"""
347 """given callsign, return repository PHID or None"""
348 # developer config: phabricator.repophid
348 # developer config: phabricator.repophid
349 repophid = repo.ui.config(b'phabricator', b'repophid')
349 repophid = repo.ui.config(b'phabricator', b'repophid')
350 if repophid:
350 if repophid:
351 return repophid
351 return repophid
352 callsign = repo.ui.config(b'phabricator', b'callsign')
352 callsign = repo.ui.config(b'phabricator', b'callsign')
353 if not callsign:
353 if not callsign:
354 return None
354 return None
355 query = callconduit(
355 query = callconduit(
356 repo.ui,
356 repo.ui,
357 b'diffusion.repository.search',
357 b'diffusion.repository.search',
358 {b'constraints': {b'callsigns': [callsign]}},
358 {b'constraints': {b'callsigns': [callsign]}},
359 )
359 )
360 if len(query[b'data']) == 0:
360 if len(query[b'data']) == 0:
361 return None
361 return None
362 repophid = query[b'data'][0][b'phid']
362 repophid = query[b'data'][0][b'phid']
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 return repophid
364 return repophid
365
365
366
366
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 _differentialrevisiondescre = re.compile(
368 _differentialrevisiondescre = re.compile(
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 )
370 )
371
371
372
372
373 def getoldnodedrevmap(repo, nodelist):
373 def getoldnodedrevmap(repo, nodelist):
374 """find previous nodes that has been sent to Phabricator
374 """find previous nodes that has been sent to Phabricator
375
375
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 for node in nodelist with known previous sent versions, or associated
377 for node in nodelist with known previous sent versions, or associated
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 be ``None``.
379 be ``None``.
380
380
381 Examines commit messages like "Differential Revision:" to get the
381 Examines commit messages like "Differential Revision:" to get the
382 association information.
382 association information.
383
383
384 If such commit message line is not found, examines all precursors and their
384 If such commit message line is not found, examines all precursors and their
385 tags. Tags with format like "D1234" are considered a match and the node
385 tags. Tags with format like "D1234" are considered a match and the node
386 with that tag, and the number after "D" (ex. 1234) will be returned.
386 with that tag, and the number after "D" (ex. 1234) will be returned.
387
387
388 The ``old node``, if not None, is guaranteed to be the last diff of
388 The ``old node``, if not None, is guaranteed to be the last diff of
389 corresponding Differential Revision, and exist in the repo.
389 corresponding Differential Revision, and exist in the repo.
390 """
390 """
391 unfi = repo.unfiltered()
391 unfi = repo.unfiltered()
392 nodemap = unfi.changelog.nodemap
392 has_node = unfi.changelog.index.has_node
393
393
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
396 for node in nodelist:
396 for node in nodelist:
397 ctx = unfi[node]
397 ctx = unfi[node]
398 # For tags like "D123", put them into "toconfirm" to verify later
398 # For tags like "D123", put them into "toconfirm" to verify later
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 for n in precnodes:
400 for n in precnodes:
401 if n in nodemap:
401 if has_node(n):
402 for tag in unfi.nodetags(n):
402 for tag in unfi.nodetags(n):
403 m = _differentialrevisiontagre.match(tag)
403 m = _differentialrevisiontagre.match(tag)
404 if m:
404 if m:
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 continue
406 continue
407
407
408 # Check commit message
408 # Check commit message
409 m = _differentialrevisiondescre.search(ctx.description())
409 m = _differentialrevisiondescre.search(ctx.description())
410 if m:
410 if m:
411 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
411 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
412
412
413 # Double check if tags are genuine by collecting all old nodes from
413 # Double check if tags are genuine by collecting all old nodes from
414 # Phabricator, and expect precursors overlap with it.
414 # Phabricator, and expect precursors overlap with it.
415 if toconfirm:
415 if toconfirm:
416 drevs = [drev for force, precs, drev in toconfirm.values()]
416 drevs = [drev for force, precs, drev in toconfirm.values()]
417 alldiffs = callconduit(
417 alldiffs = callconduit(
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
419 )
419 )
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
421 for newnode, (force, precset, drev) in toconfirm.items():
421 for newnode, (force, precset, drev) in toconfirm.items():
422 diffs = [
422 diffs = [
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
424 ]
424 ]
425
425
426 # "precursors" as known by Phabricator
426 # "precursors" as known by Phabricator
427 phprecset = set(getnode(d) for d in diffs)
427 phprecset = set(getnode(d) for d in diffs)
428
428
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
430 # and force is not set (when commit message says nothing)
430 # and force is not set (when commit message says nothing)
431 if not force and not bool(phprecset & precset):
431 if not force and not bool(phprecset & precset):
432 tagname = b'D%d' % drev
432 tagname = b'D%d' % drev
433 tags.tag(
433 tags.tag(
434 repo,
434 repo,
435 tagname,
435 tagname,
436 nullid,
436 nullid,
437 message=None,
437 message=None,
438 user=None,
438 user=None,
439 date=None,
439 date=None,
440 local=True,
440 local=True,
441 )
441 )
442 unfi.ui.warn(
442 unfi.ui.warn(
443 _(
443 _(
444 b'D%d: local tag removed - does not match '
444 b'D%d: local tag removed - does not match '
445 b'Differential history\n'
445 b'Differential history\n'
446 )
446 )
447 % drev
447 % drev
448 )
448 )
449 continue
449 continue
450
450
451 # Find the last node using Phabricator metadata, and make sure it
451 # Find the last node using Phabricator metadata, and make sure it
452 # exists in the repo
452 # exists in the repo
453 oldnode = lastdiff = None
453 oldnode = lastdiff = None
454 if diffs:
454 if diffs:
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
456 oldnode = getnode(lastdiff)
456 oldnode = getnode(lastdiff)
457 if oldnode and oldnode not in nodemap:
457 if oldnode and not has_node(oldnode):
458 oldnode = None
458 oldnode = None
459
459
460 result[newnode] = (oldnode, lastdiff, drev)
460 result[newnode] = (oldnode, lastdiff, drev)
461
461
462 return result
462 return result
463
463
464
464
465 def getdiff(ctx, diffopts):
465 def getdiff(ctx, diffopts):
466 """plain-text diff without header (user, commit message, etc)"""
466 """plain-text diff without header (user, commit message, etc)"""
467 output = util.stringio()
467 output = util.stringio()
468 for chunk, _label in patch.diffui(
468 for chunk, _label in patch.diffui(
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
470 ):
470 ):
471 output.write(chunk)
471 output.write(chunk)
472 return output.getvalue()
472 return output.getvalue()
473
473
474
474
475 class DiffChangeType(object):
475 class DiffChangeType(object):
476 ADD = 1
476 ADD = 1
477 CHANGE = 2
477 CHANGE = 2
478 DELETE = 3
478 DELETE = 3
479 MOVE_AWAY = 4
479 MOVE_AWAY = 4
480 COPY_AWAY = 5
480 COPY_AWAY = 5
481 MOVE_HERE = 6
481 MOVE_HERE = 6
482 COPY_HERE = 7
482 COPY_HERE = 7
483 MULTICOPY = 8
483 MULTICOPY = 8
484
484
485
485
486 class DiffFileType(object):
486 class DiffFileType(object):
487 TEXT = 1
487 TEXT = 1
488 IMAGE = 2
488 IMAGE = 2
489 BINARY = 3
489 BINARY = 3
490
490
491
491
492 @attr.s
492 @attr.s
493 class phabhunk(dict):
493 class phabhunk(dict):
494 """Represents a Differential hunk, which is owned by a Differential change
494 """Represents a Differential hunk, which is owned by a Differential change
495 """
495 """
496
496
497 oldOffset = attr.ib(default=0) # camelcase-required
497 oldOffset = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
501 corpus = attr.ib(default='')
501 corpus = attr.ib(default='')
502 # These get added to the phabchange's equivalents
502 # These get added to the phabchange's equivalents
503 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
505
505
506
506
507 @attr.s
507 @attr.s
508 class phabchange(object):
508 class phabchange(object):
509 """Represents a Differential change, owns Differential hunks and owned by a
509 """Represents a Differential change, owns Differential hunks and owned by a
510 Differential diff. Each one represents one file in a diff.
510 Differential diff. Each one represents one file in a diff.
511 """
511 """
512
512
513 currentPath = attr.ib(default=None) # camelcase-required
513 currentPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
516 metadata = attr.ib(default=attr.Factory(dict))
516 metadata = attr.ib(default=attr.Factory(dict))
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
519 type = attr.ib(default=DiffChangeType.CHANGE)
519 type = attr.ib(default=DiffChangeType.CHANGE)
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
524 hunks = attr.ib(default=attr.Factory(list))
524 hunks = attr.ib(default=attr.Factory(list))
525
525
526 def copynewmetadatatoold(self):
526 def copynewmetadatatoold(self):
527 for key in list(self.metadata.keys()):
527 for key in list(self.metadata.keys()):
528 newkey = key.replace(b'new:', b'old:')
528 newkey = key.replace(b'new:', b'old:')
529 self.metadata[newkey] = self.metadata[key]
529 self.metadata[newkey] = self.metadata[key]
530
530
531 def addoldmode(self, value):
531 def addoldmode(self, value):
532 self.oldProperties[b'unix:filemode'] = value
532 self.oldProperties[b'unix:filemode'] = value
533
533
534 def addnewmode(self, value):
534 def addnewmode(self, value):
535 self.newProperties[b'unix:filemode'] = value
535 self.newProperties[b'unix:filemode'] = value
536
536
537 def addhunk(self, hunk):
537 def addhunk(self, hunk):
538 if not isinstance(hunk, phabhunk):
538 if not isinstance(hunk, phabhunk):
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
541 # It's useful to include these stats since the Phab web UI shows them,
541 # It's useful to include these stats since the Phab web UI shows them,
542 # and uses them to estimate how large a change a Revision is. Also used
542 # and uses them to estimate how large a change a Revision is. Also used
543 # in email subjects for the [+++--] bit.
543 # in email subjects for the [+++--] bit.
544 self.addLines += hunk.addLines
544 self.addLines += hunk.addLines
545 self.delLines += hunk.delLines
545 self.delLines += hunk.delLines
546
546
547
547
548 @attr.s
548 @attr.s
549 class phabdiff(object):
549 class phabdiff(object):
550 """Represents a Differential diff, owns Differential changes. Corresponds
550 """Represents a Differential diff, owns Differential changes. Corresponds
551 to a commit.
551 to a commit.
552 """
552 """
553
553
554 # Doesn't seem to be any reason to send this (output of uname -n)
554 # Doesn't seem to be any reason to send this (output of uname -n)
555 sourceMachine = attr.ib(default=b'') # camelcase-required
555 sourceMachine = attr.ib(default=b'') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
560 branch = attr.ib(default=b'default')
560 branch = attr.ib(default=b'default')
561 bookmark = attr.ib(default=None)
561 bookmark = attr.ib(default=None)
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
565 changes = attr.ib(default=attr.Factory(dict))
565 changes = attr.ib(default=attr.Factory(dict))
566 repositoryPHID = attr.ib(default=None) # camelcase-required
566 repositoryPHID = attr.ib(default=None) # camelcase-required
567
567
568 def addchange(self, change):
568 def addchange(self, change):
569 if not isinstance(change, phabchange):
569 if not isinstance(change, phabchange):
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
571 self.changes[change.currentPath] = pycompat.byteskwargs(
571 self.changes[change.currentPath] = pycompat.byteskwargs(
572 attr.asdict(change)
572 attr.asdict(change)
573 )
573 )
574
574
575
575
576 def maketext(pchange, ctx, fname):
576 def maketext(pchange, ctx, fname):
577 """populate the phabchange for a text file"""
577 """populate the phabchange for a text file"""
578 repo = ctx.repo()
578 repo = ctx.repo()
579 fmatcher = match.exact([fname])
579 fmatcher = match.exact([fname])
580 diffopts = mdiff.diffopts(git=True, context=32767)
580 diffopts = mdiff.diffopts(git=True, context=32767)
581 _pfctx, _fctx, header, fhunks = next(
581 _pfctx, _fctx, header, fhunks = next(
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
583 )
583 )
584
584
585 for fhunk in fhunks:
585 for fhunk in fhunks:
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
587 corpus = b''.join(lines[1:])
587 corpus = b''.join(lines[1:])
588 shunk = list(header)
588 shunk = list(header)
589 shunk.extend(lines)
589 shunk.extend(lines)
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
591 patch.diffstatdata(util.iterlines(shunk))
591 patch.diffstatdata(util.iterlines(shunk))
592 )
592 )
593 pchange.addhunk(
593 pchange.addhunk(
594 phabhunk(
594 phabhunk(
595 oldOffset,
595 oldOffset,
596 oldLength,
596 oldLength,
597 newOffset,
597 newOffset,
598 newLength,
598 newLength,
599 corpus,
599 corpus,
600 addLines,
600 addLines,
601 delLines,
601 delLines,
602 )
602 )
603 )
603 )
604
604
605
605
606 def uploadchunks(fctx, fphid):
606 def uploadchunks(fctx, fphid):
607 """upload large binary files as separate chunks.
607 """upload large binary files as separate chunks.
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
609 """
609 """
610 ui = fctx.repo().ui
610 ui = fctx.repo().ui
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
612 with ui.makeprogress(
612 with ui.makeprogress(
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
614 ) as progress:
614 ) as progress:
615 for chunk in chunks:
615 for chunk in chunks:
616 progress.increment()
616 progress.increment()
617 if chunk[b'complete']:
617 if chunk[b'complete']:
618 continue
618 continue
619 bstart = int(chunk[b'byteStart'])
619 bstart = int(chunk[b'byteStart'])
620 bend = int(chunk[b'byteEnd'])
620 bend = int(chunk[b'byteEnd'])
621 callconduit(
621 callconduit(
622 ui,
622 ui,
623 b'file.uploadchunk',
623 b'file.uploadchunk',
624 {
624 {
625 b'filePHID': fphid,
625 b'filePHID': fphid,
626 b'byteStart': bstart,
626 b'byteStart': bstart,
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
628 b'dataEncoding': b'base64',
628 b'dataEncoding': b'base64',
629 },
629 },
630 )
630 )
631
631
632
632
633 def uploadfile(fctx):
633 def uploadfile(fctx):
634 """upload binary files to Phabricator"""
634 """upload binary files to Phabricator"""
635 repo = fctx.repo()
635 repo = fctx.repo()
636 ui = repo.ui
636 ui = repo.ui
637 fname = fctx.path()
637 fname = fctx.path()
638 size = fctx.size()
638 size = fctx.size()
639 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
639 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
640
640
641 # an allocate call is required first to see if an upload is even required
641 # an allocate call is required first to see if an upload is even required
642 # (Phab might already have it) and to determine if chunking is needed
642 # (Phab might already have it) and to determine if chunking is needed
643 allocateparams = {
643 allocateparams = {
644 b'name': fname,
644 b'name': fname,
645 b'contentLength': size,
645 b'contentLength': size,
646 b'contentHash': fhash,
646 b'contentHash': fhash,
647 }
647 }
648 filealloc = callconduit(ui, b'file.allocate', allocateparams)
648 filealloc = callconduit(ui, b'file.allocate', allocateparams)
649 fphid = filealloc[b'filePHID']
649 fphid = filealloc[b'filePHID']
650
650
651 if filealloc[b'upload']:
651 if filealloc[b'upload']:
652 ui.write(_(b'uploading %s\n') % bytes(fctx))
652 ui.write(_(b'uploading %s\n') % bytes(fctx))
653 if not fphid:
653 if not fphid:
654 uploadparams = {
654 uploadparams = {
655 b'name': fname,
655 b'name': fname,
656 b'data_base64': base64.b64encode(fctx.data()),
656 b'data_base64': base64.b64encode(fctx.data()),
657 }
657 }
658 fphid = callconduit(ui, b'file.upload', uploadparams)
658 fphid = callconduit(ui, b'file.upload', uploadparams)
659 else:
659 else:
660 uploadchunks(fctx, fphid)
660 uploadchunks(fctx, fphid)
661 else:
661 else:
662 ui.debug(b'server already has %s\n' % bytes(fctx))
662 ui.debug(b'server already has %s\n' % bytes(fctx))
663
663
664 if not fphid:
664 if not fphid:
665 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
665 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
666
666
667 return fphid
667 return fphid
668
668
669
669
670 def addoldbinary(pchange, fctx, originalfname):
670 def addoldbinary(pchange, fctx, originalfname):
671 """add the metadata for the previous version of a binary file to the
671 """add the metadata for the previous version of a binary file to the
672 phabchange for the new version
672 phabchange for the new version
673 """
673 """
674 oldfctx = fctx.p1()[originalfname]
674 oldfctx = fctx.p1()[originalfname]
675 if fctx.cmp(oldfctx):
675 if fctx.cmp(oldfctx):
676 # Files differ, add the old one
676 # Files differ, add the old one
677 pchange.metadata[b'old:file:size'] = oldfctx.size()
677 pchange.metadata[b'old:file:size'] = oldfctx.size()
678 mimeguess, _enc = mimetypes.guess_type(
678 mimeguess, _enc = mimetypes.guess_type(
679 encoding.unifromlocal(oldfctx.path())
679 encoding.unifromlocal(oldfctx.path())
680 )
680 )
681 if mimeguess:
681 if mimeguess:
682 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
682 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
683 mimeguess
683 mimeguess
684 )
684 )
685 fphid = uploadfile(oldfctx)
685 fphid = uploadfile(oldfctx)
686 pchange.metadata[b'old:binary-phid'] = fphid
686 pchange.metadata[b'old:binary-phid'] = fphid
687 else:
687 else:
688 # If it's left as IMAGE/BINARY web UI might try to display it
688 # If it's left as IMAGE/BINARY web UI might try to display it
689 pchange.fileType = DiffFileType.TEXT
689 pchange.fileType = DiffFileType.TEXT
690 pchange.copynewmetadatatoold()
690 pchange.copynewmetadatatoold()
691
691
692
692
693 def makebinary(pchange, fctx):
693 def makebinary(pchange, fctx):
694 """populate the phabchange for a binary file"""
694 """populate the phabchange for a binary file"""
695 pchange.fileType = DiffFileType.BINARY
695 pchange.fileType = DiffFileType.BINARY
696 fphid = uploadfile(fctx)
696 fphid = uploadfile(fctx)
697 pchange.metadata[b'new:binary-phid'] = fphid
697 pchange.metadata[b'new:binary-phid'] = fphid
698 pchange.metadata[b'new:file:size'] = fctx.size()
698 pchange.metadata[b'new:file:size'] = fctx.size()
699 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
699 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
700 if mimeguess:
700 if mimeguess:
701 mimeguess = pycompat.bytestr(mimeguess)
701 mimeguess = pycompat.bytestr(mimeguess)
702 pchange.metadata[b'new:file:mime-type'] = mimeguess
702 pchange.metadata[b'new:file:mime-type'] = mimeguess
703 if mimeguess.startswith(b'image/'):
703 if mimeguess.startswith(b'image/'):
704 pchange.fileType = DiffFileType.IMAGE
704 pchange.fileType = DiffFileType.IMAGE
705
705
706
706
707 # Copied from mercurial/patch.py
707 # Copied from mercurial/patch.py
708 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
708 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
709
709
710
710
711 def notutf8(fctx):
711 def notutf8(fctx):
712 """detect non-UTF-8 text files since Phabricator requires them to be marked
712 """detect non-UTF-8 text files since Phabricator requires them to be marked
713 as binary
713 as binary
714 """
714 """
715 try:
715 try:
716 fctx.data().decode('utf-8')
716 fctx.data().decode('utf-8')
717 if fctx.parents():
717 if fctx.parents():
718 fctx.p1().data().decode('utf-8')
718 fctx.p1().data().decode('utf-8')
719 return False
719 return False
720 except UnicodeDecodeError:
720 except UnicodeDecodeError:
721 fctx.repo().ui.write(
721 fctx.repo().ui.write(
722 _(b'file %s detected as non-UTF-8, marked as binary\n')
722 _(b'file %s detected as non-UTF-8, marked as binary\n')
723 % fctx.path()
723 % fctx.path()
724 )
724 )
725 return True
725 return True
726
726
727
727
728 def addremoved(pdiff, ctx, removed):
728 def addremoved(pdiff, ctx, removed):
729 """add removed files to the phabdiff. Shouldn't include moves"""
729 """add removed files to the phabdiff. Shouldn't include moves"""
730 for fname in removed:
730 for fname in removed:
731 pchange = phabchange(
731 pchange = phabchange(
732 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
732 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
733 )
733 )
734 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
734 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
735 fctx = ctx.p1()[fname]
735 fctx = ctx.p1()[fname]
736 if not (fctx.isbinary() or notutf8(fctx)):
736 if not (fctx.isbinary() or notutf8(fctx)):
737 maketext(pchange, ctx, fname)
737 maketext(pchange, ctx, fname)
738
738
739 pdiff.addchange(pchange)
739 pdiff.addchange(pchange)
740
740
741
741
742 def addmodified(pdiff, ctx, modified):
742 def addmodified(pdiff, ctx, modified):
743 """add modified files to the phabdiff"""
743 """add modified files to the phabdiff"""
744 for fname in modified:
744 for fname in modified:
745 fctx = ctx[fname]
745 fctx = ctx[fname]
746 pchange = phabchange(currentPath=fname, oldPath=fname)
746 pchange = phabchange(currentPath=fname, oldPath=fname)
747 filemode = gitmode[ctx[fname].flags()]
747 filemode = gitmode[ctx[fname].flags()]
748 originalmode = gitmode[ctx.p1()[fname].flags()]
748 originalmode = gitmode[ctx.p1()[fname].flags()]
749 if filemode != originalmode:
749 if filemode != originalmode:
750 pchange.addoldmode(originalmode)
750 pchange.addoldmode(originalmode)
751 pchange.addnewmode(filemode)
751 pchange.addnewmode(filemode)
752
752
753 if fctx.isbinary() or notutf8(fctx):
753 if fctx.isbinary() or notutf8(fctx):
754 makebinary(pchange, fctx)
754 makebinary(pchange, fctx)
755 addoldbinary(pchange, fctx, fname)
755 addoldbinary(pchange, fctx, fname)
756 else:
756 else:
757 maketext(pchange, ctx, fname)
757 maketext(pchange, ctx, fname)
758
758
759 pdiff.addchange(pchange)
759 pdiff.addchange(pchange)
760
760
761
761
762 def addadded(pdiff, ctx, added, removed):
762 def addadded(pdiff, ctx, added, removed):
763 """add file adds to the phabdiff, both new files and copies/moves"""
763 """add file adds to the phabdiff, both new files and copies/moves"""
764 # Keep track of files that've been recorded as moved/copied, so if there are
764 # Keep track of files that've been recorded as moved/copied, so if there are
765 # additional copies we can mark them (moves get removed from removed)
765 # additional copies we can mark them (moves get removed from removed)
766 copiedchanges = {}
766 copiedchanges = {}
767 movedchanges = {}
767 movedchanges = {}
768 for fname in added:
768 for fname in added:
769 fctx = ctx[fname]
769 fctx = ctx[fname]
770 pchange = phabchange(currentPath=fname)
770 pchange = phabchange(currentPath=fname)
771
771
772 filemode = gitmode[ctx[fname].flags()]
772 filemode = gitmode[ctx[fname].flags()]
773 renamed = fctx.renamed()
773 renamed = fctx.renamed()
774
774
775 if renamed:
775 if renamed:
776 originalfname = renamed[0]
776 originalfname = renamed[0]
777 originalmode = gitmode[ctx.p1()[originalfname].flags()]
777 originalmode = gitmode[ctx.p1()[originalfname].flags()]
778 pchange.oldPath = originalfname
778 pchange.oldPath = originalfname
779
779
780 if originalfname in removed:
780 if originalfname in removed:
781 origpchange = phabchange(
781 origpchange = phabchange(
782 currentPath=originalfname,
782 currentPath=originalfname,
783 oldPath=originalfname,
783 oldPath=originalfname,
784 type=DiffChangeType.MOVE_AWAY,
784 type=DiffChangeType.MOVE_AWAY,
785 awayPaths=[fname],
785 awayPaths=[fname],
786 )
786 )
787 movedchanges[originalfname] = origpchange
787 movedchanges[originalfname] = origpchange
788 removed.remove(originalfname)
788 removed.remove(originalfname)
789 pchange.type = DiffChangeType.MOVE_HERE
789 pchange.type = DiffChangeType.MOVE_HERE
790 elif originalfname in movedchanges:
790 elif originalfname in movedchanges:
791 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
791 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
792 movedchanges[originalfname].awayPaths.append(fname)
792 movedchanges[originalfname].awayPaths.append(fname)
793 pchange.type = DiffChangeType.COPY_HERE
793 pchange.type = DiffChangeType.COPY_HERE
794 else: # pure copy
794 else: # pure copy
795 if originalfname not in copiedchanges:
795 if originalfname not in copiedchanges:
796 origpchange = phabchange(
796 origpchange = phabchange(
797 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
797 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
798 )
798 )
799 copiedchanges[originalfname] = origpchange
799 copiedchanges[originalfname] = origpchange
800 else:
800 else:
801 origpchange = copiedchanges[originalfname]
801 origpchange = copiedchanges[originalfname]
802 origpchange.awayPaths.append(fname)
802 origpchange.awayPaths.append(fname)
803 pchange.type = DiffChangeType.COPY_HERE
803 pchange.type = DiffChangeType.COPY_HERE
804
804
805 if filemode != originalmode:
805 if filemode != originalmode:
806 pchange.addoldmode(originalmode)
806 pchange.addoldmode(originalmode)
807 pchange.addnewmode(filemode)
807 pchange.addnewmode(filemode)
808 else: # Brand-new file
808 else: # Brand-new file
809 pchange.addnewmode(gitmode[fctx.flags()])
809 pchange.addnewmode(gitmode[fctx.flags()])
810 pchange.type = DiffChangeType.ADD
810 pchange.type = DiffChangeType.ADD
811
811
812 if fctx.isbinary() or notutf8(fctx):
812 if fctx.isbinary() or notutf8(fctx):
813 makebinary(pchange, fctx)
813 makebinary(pchange, fctx)
814 if renamed:
814 if renamed:
815 addoldbinary(pchange, fctx, originalfname)
815 addoldbinary(pchange, fctx, originalfname)
816 else:
816 else:
817 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
818
818
819 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
820
820
821 for _path, copiedchange in copiedchanges.items():
821 for _path, copiedchange in copiedchanges.items():
822 pdiff.addchange(copiedchange)
822 pdiff.addchange(copiedchange)
823 for _path, movedchange in movedchanges.items():
823 for _path, movedchange in movedchanges.items():
824 pdiff.addchange(movedchange)
824 pdiff.addchange(movedchange)
825
825
826
826
827 def creatediff(ctx):
827 def creatediff(ctx):
828 """create a Differential Diff"""
828 """create a Differential Diff"""
829 repo = ctx.repo()
829 repo = ctx.repo()
830 repophid = getrepophid(repo)
830 repophid = getrepophid(repo)
831 # Create a "Differential Diff" via "differential.creatediff" API
831 # Create a "Differential Diff" via "differential.creatediff" API
832 pdiff = phabdiff(
832 pdiff = phabdiff(
833 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
833 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
834 branch=b'%s' % ctx.branch(),
834 branch=b'%s' % ctx.branch(),
835 )
835 )
836 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
836 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
837 # addadded will remove moved files from removed, so addremoved won't get
837 # addadded will remove moved files from removed, so addremoved won't get
838 # them
838 # them
839 addadded(pdiff, ctx, added, removed)
839 addadded(pdiff, ctx, added, removed)
840 addmodified(pdiff, ctx, modified)
840 addmodified(pdiff, ctx, modified)
841 addremoved(pdiff, ctx, removed)
841 addremoved(pdiff, ctx, removed)
842 if repophid:
842 if repophid:
843 pdiff.repositoryPHID = repophid
843 pdiff.repositoryPHID = repophid
844 diff = callconduit(
844 diff = callconduit(
845 repo.ui,
845 repo.ui,
846 b'differential.creatediff',
846 b'differential.creatediff',
847 pycompat.byteskwargs(attr.asdict(pdiff)),
847 pycompat.byteskwargs(attr.asdict(pdiff)),
848 )
848 )
849 if not diff:
849 if not diff:
850 raise error.Abort(_(b'cannot create diff for %s') % ctx)
850 raise error.Abort(_(b'cannot create diff for %s') % ctx)
851 return diff
851 return diff
852
852
853
853
854 def writediffproperties(ctx, diff):
854 def writediffproperties(ctx, diff):
855 """write metadata to diff so patches could be applied losslessly"""
855 """write metadata to diff so patches could be applied losslessly"""
856 # creatediff returns with a diffid but query returns with an id
856 # creatediff returns with a diffid but query returns with an id
857 diffid = diff.get(b'diffid', diff.get(b'id'))
857 diffid = diff.get(b'diffid', diff.get(b'id'))
858 params = {
858 params = {
859 b'diff_id': diffid,
859 b'diff_id': diffid,
860 b'name': b'hg:meta',
860 b'name': b'hg:meta',
861 b'data': templatefilters.json(
861 b'data': templatefilters.json(
862 {
862 {
863 b'user': ctx.user(),
863 b'user': ctx.user(),
864 b'date': b'%d %d' % ctx.date(),
864 b'date': b'%d %d' % ctx.date(),
865 b'branch': ctx.branch(),
865 b'branch': ctx.branch(),
866 b'node': ctx.hex(),
866 b'node': ctx.hex(),
867 b'parent': ctx.p1().hex(),
867 b'parent': ctx.p1().hex(),
868 }
868 }
869 ),
869 ),
870 }
870 }
871 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
871 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
872
872
873 params = {
873 params = {
874 b'diff_id': diffid,
874 b'diff_id': diffid,
875 b'name': b'local:commits',
875 b'name': b'local:commits',
876 b'data': templatefilters.json(
876 b'data': templatefilters.json(
877 {
877 {
878 ctx.hex(): {
878 ctx.hex(): {
879 b'author': stringutil.person(ctx.user()),
879 b'author': stringutil.person(ctx.user()),
880 b'authorEmail': stringutil.email(ctx.user()),
880 b'authorEmail': stringutil.email(ctx.user()),
881 b'time': int(ctx.date()[0]),
881 b'time': int(ctx.date()[0]),
882 b'commit': ctx.hex(),
882 b'commit': ctx.hex(),
883 b'parents': [ctx.p1().hex()],
883 b'parents': [ctx.p1().hex()],
884 b'branch': ctx.branch(),
884 b'branch': ctx.branch(),
885 },
885 },
886 }
886 }
887 ),
887 ),
888 }
888 }
889 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
889 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
890
890
891
891
892 def createdifferentialrevision(
892 def createdifferentialrevision(
893 ctx,
893 ctx,
894 revid=None,
894 revid=None,
895 parentrevphid=None,
895 parentrevphid=None,
896 oldnode=None,
896 oldnode=None,
897 olddiff=None,
897 olddiff=None,
898 actions=None,
898 actions=None,
899 comment=None,
899 comment=None,
900 ):
900 ):
901 """create or update a Differential Revision
901 """create or update a Differential Revision
902
902
903 If revid is None, create a new Differential Revision, otherwise update
903 If revid is None, create a new Differential Revision, otherwise update
904 revid. If parentrevphid is not None, set it as a dependency.
904 revid. If parentrevphid is not None, set it as a dependency.
905
905
906 If oldnode is not None, check if the patch content (without commit message
906 If oldnode is not None, check if the patch content (without commit message
907 and metadata) has changed before creating another diff.
907 and metadata) has changed before creating another diff.
908
908
909 If actions is not None, they will be appended to the transaction.
909 If actions is not None, they will be appended to the transaction.
910 """
910 """
911 repo = ctx.repo()
911 repo = ctx.repo()
912 if oldnode:
912 if oldnode:
913 diffopts = mdiff.diffopts(git=True, context=32767)
913 diffopts = mdiff.diffopts(git=True, context=32767)
914 oldctx = repo.unfiltered()[oldnode]
914 oldctx = repo.unfiltered()[oldnode]
915 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
915 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
916 else:
916 else:
917 neednewdiff = True
917 neednewdiff = True
918
918
919 transactions = []
919 transactions = []
920 if neednewdiff:
920 if neednewdiff:
921 diff = creatediff(ctx)
921 diff = creatediff(ctx)
922 transactions.append({b'type': b'update', b'value': diff[b'phid']})
922 transactions.append({b'type': b'update', b'value': diff[b'phid']})
923 if comment:
923 if comment:
924 transactions.append({b'type': b'comment', b'value': comment})
924 transactions.append({b'type': b'comment', b'value': comment})
925 else:
925 else:
926 # Even if we don't need to upload a new diff because the patch content
926 # Even if we don't need to upload a new diff because the patch content
927 # does not change. We might still need to update its metadata so
927 # does not change. We might still need to update its metadata so
928 # pushers could know the correct node metadata.
928 # pushers could know the correct node metadata.
929 assert olddiff
929 assert olddiff
930 diff = olddiff
930 diff = olddiff
931 writediffproperties(ctx, diff)
931 writediffproperties(ctx, diff)
932
932
933 # Set the parent Revision every time, so commit re-ordering is picked-up
933 # Set the parent Revision every time, so commit re-ordering is picked-up
934 if parentrevphid:
934 if parentrevphid:
935 transactions.append(
935 transactions.append(
936 {b'type': b'parents.set', b'value': [parentrevphid]}
936 {b'type': b'parents.set', b'value': [parentrevphid]}
937 )
937 )
938
938
939 if actions:
939 if actions:
940 transactions += actions
940 transactions += actions
941
941
942 # Parse commit message and update related fields.
942 # Parse commit message and update related fields.
943 desc = ctx.description()
943 desc = ctx.description()
944 info = callconduit(
944 info = callconduit(
945 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
945 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
946 )
946 )
947 for k, v in info[b'fields'].items():
947 for k, v in info[b'fields'].items():
948 if k in [b'title', b'summary', b'testPlan']:
948 if k in [b'title', b'summary', b'testPlan']:
949 transactions.append({b'type': k, b'value': v})
949 transactions.append({b'type': k, b'value': v})
950
950
951 params = {b'transactions': transactions}
951 params = {b'transactions': transactions}
952 if revid is not None:
952 if revid is not None:
953 # Update an existing Differential Revision
953 # Update an existing Differential Revision
954 params[b'objectIdentifier'] = revid
954 params[b'objectIdentifier'] = revid
955
955
956 revision = callconduit(repo.ui, b'differential.revision.edit', params)
956 revision = callconduit(repo.ui, b'differential.revision.edit', params)
957 if not revision:
957 if not revision:
958 raise error.Abort(_(b'cannot create revision for %s') % ctx)
958 raise error.Abort(_(b'cannot create revision for %s') % ctx)
959
959
960 return revision, diff
960 return revision, diff
961
961
962
962
963 def userphids(repo, names):
963 def userphids(repo, names):
964 """convert user names to PHIDs"""
964 """convert user names to PHIDs"""
965 names = [name.lower() for name in names]
965 names = [name.lower() for name in names]
966 query = {b'constraints': {b'usernames': names}}
966 query = {b'constraints': {b'usernames': names}}
967 result = callconduit(repo.ui, b'user.search', query)
967 result = callconduit(repo.ui, b'user.search', query)
968 # username not found is not an error of the API. So check if we have missed
968 # username not found is not an error of the API. So check if we have missed
969 # some names here.
969 # some names here.
970 data = result[b'data']
970 data = result[b'data']
971 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
971 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
972 unresolved = set(names) - resolved
972 unresolved = set(names) - resolved
973 if unresolved:
973 if unresolved:
974 raise error.Abort(
974 raise error.Abort(
975 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
975 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
976 )
976 )
977 return [entry[b'phid'] for entry in data]
977 return [entry[b'phid'] for entry in data]
978
978
979
979
980 @vcrcommand(
980 @vcrcommand(
981 b'phabsend',
981 b'phabsend',
982 [
982 [
983 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
983 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
984 (b'', b'amend', True, _(b'update commit messages')),
984 (b'', b'amend', True, _(b'update commit messages')),
985 (b'', b'reviewer', [], _(b'specify reviewers')),
985 (b'', b'reviewer', [], _(b'specify reviewers')),
986 (b'', b'blocker', [], _(b'specify blocking reviewers')),
986 (b'', b'blocker', [], _(b'specify blocking reviewers')),
987 (
987 (
988 b'm',
988 b'm',
989 b'comment',
989 b'comment',
990 b'',
990 b'',
991 _(b'add a comment to Revisions with new/updated Diffs'),
991 _(b'add a comment to Revisions with new/updated Diffs'),
992 ),
992 ),
993 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
993 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
994 ],
994 ],
995 _(b'REV [OPTIONS]'),
995 _(b'REV [OPTIONS]'),
996 helpcategory=command.CATEGORY_IMPORT_EXPORT,
996 helpcategory=command.CATEGORY_IMPORT_EXPORT,
997 )
997 )
998 def phabsend(ui, repo, *revs, **opts):
998 def phabsend(ui, repo, *revs, **opts):
999 """upload changesets to Phabricator
999 """upload changesets to Phabricator
1000
1000
1001 If there are multiple revisions specified, they will be send as a stack
1001 If there are multiple revisions specified, they will be send as a stack
1002 with a linear dependencies relationship using the order specified by the
1002 with a linear dependencies relationship using the order specified by the
1003 revset.
1003 revset.
1004
1004
1005 For the first time uploading changesets, local tags will be created to
1005 For the first time uploading changesets, local tags will be created to
1006 maintain the association. After the first time, phabsend will check
1006 maintain the association. After the first time, phabsend will check
1007 obsstore and tags information so it can figure out whether to update an
1007 obsstore and tags information so it can figure out whether to update an
1008 existing Differential Revision, or create a new one.
1008 existing Differential Revision, or create a new one.
1009
1009
1010 If --amend is set, update commit messages so they have the
1010 If --amend is set, update commit messages so they have the
1011 ``Differential Revision`` URL, remove related tags. This is similar to what
1011 ``Differential Revision`` URL, remove related tags. This is similar to what
1012 arcanist will do, and is more desired in author-push workflows. Otherwise,
1012 arcanist will do, and is more desired in author-push workflows. Otherwise,
1013 use local tags to record the ``Differential Revision`` association.
1013 use local tags to record the ``Differential Revision`` association.
1014
1014
1015 The --confirm option lets you confirm changesets before sending them. You
1015 The --confirm option lets you confirm changesets before sending them. You
1016 can also add following to your configuration file to make it default
1016 can also add following to your configuration file to make it default
1017 behaviour::
1017 behaviour::
1018
1018
1019 [phabsend]
1019 [phabsend]
1020 confirm = true
1020 confirm = true
1021
1021
1022 phabsend will check obsstore and the above association to decide whether to
1022 phabsend will check obsstore and the above association to decide whether to
1023 update an existing Differential Revision, or create a new one.
1023 update an existing Differential Revision, or create a new one.
1024 """
1024 """
1025 opts = pycompat.byteskwargs(opts)
1025 opts = pycompat.byteskwargs(opts)
1026 revs = list(revs) + opts.get(b'rev', [])
1026 revs = list(revs) + opts.get(b'rev', [])
1027 revs = scmutil.revrange(repo, revs)
1027 revs = scmutil.revrange(repo, revs)
1028
1028
1029 if not revs:
1029 if not revs:
1030 raise error.Abort(_(b'phabsend requires at least one changeset'))
1030 raise error.Abort(_(b'phabsend requires at least one changeset'))
1031 if opts.get(b'amend'):
1031 if opts.get(b'amend'):
1032 cmdutil.checkunfinished(repo)
1032 cmdutil.checkunfinished(repo)
1033
1033
1034 # {newnode: (oldnode, olddiff, olddrev}
1034 # {newnode: (oldnode, olddiff, olddrev}
1035 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1035 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1036
1036
1037 confirm = ui.configbool(b'phabsend', b'confirm')
1037 confirm = ui.configbool(b'phabsend', b'confirm')
1038 confirm |= bool(opts.get(b'confirm'))
1038 confirm |= bool(opts.get(b'confirm'))
1039 if confirm:
1039 if confirm:
1040 confirmed = _confirmbeforesend(repo, revs, oldmap)
1040 confirmed = _confirmbeforesend(repo, revs, oldmap)
1041 if not confirmed:
1041 if not confirmed:
1042 raise error.Abort(_(b'phabsend cancelled'))
1042 raise error.Abort(_(b'phabsend cancelled'))
1043
1043
1044 actions = []
1044 actions = []
1045 reviewers = opts.get(b'reviewer', [])
1045 reviewers = opts.get(b'reviewer', [])
1046 blockers = opts.get(b'blocker', [])
1046 blockers = opts.get(b'blocker', [])
1047 phids = []
1047 phids = []
1048 if reviewers:
1048 if reviewers:
1049 phids.extend(userphids(repo, reviewers))
1049 phids.extend(userphids(repo, reviewers))
1050 if blockers:
1050 if blockers:
1051 phids.extend(
1051 phids.extend(
1052 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1052 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1053 )
1053 )
1054 if phids:
1054 if phids:
1055 actions.append({b'type': b'reviewers.add', b'value': phids})
1055 actions.append({b'type': b'reviewers.add', b'value': phids})
1056
1056
1057 drevids = [] # [int]
1057 drevids = [] # [int]
1058 diffmap = {} # {newnode: diff}
1058 diffmap = {} # {newnode: diff}
1059
1059
1060 # Send patches one by one so we know their Differential Revision PHIDs and
1060 # Send patches one by one so we know their Differential Revision PHIDs and
1061 # can provide dependency relationship
1061 # can provide dependency relationship
1062 lastrevphid = None
1062 lastrevphid = None
1063 for rev in revs:
1063 for rev in revs:
1064 ui.debug(b'sending rev %d\n' % rev)
1064 ui.debug(b'sending rev %d\n' % rev)
1065 ctx = repo[rev]
1065 ctx = repo[rev]
1066
1066
1067 # Get Differential Revision ID
1067 # Get Differential Revision ID
1068 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1068 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1069 if oldnode != ctx.node() or opts.get(b'amend'):
1069 if oldnode != ctx.node() or opts.get(b'amend'):
1070 # Create or update Differential Revision
1070 # Create or update Differential Revision
1071 revision, diff = createdifferentialrevision(
1071 revision, diff = createdifferentialrevision(
1072 ctx,
1072 ctx,
1073 revid,
1073 revid,
1074 lastrevphid,
1074 lastrevphid,
1075 oldnode,
1075 oldnode,
1076 olddiff,
1076 olddiff,
1077 actions,
1077 actions,
1078 opts.get(b'comment'),
1078 opts.get(b'comment'),
1079 )
1079 )
1080 diffmap[ctx.node()] = diff
1080 diffmap[ctx.node()] = diff
1081 newrevid = int(revision[b'object'][b'id'])
1081 newrevid = int(revision[b'object'][b'id'])
1082 newrevphid = revision[b'object'][b'phid']
1082 newrevphid = revision[b'object'][b'phid']
1083 if revid:
1083 if revid:
1084 action = b'updated'
1084 action = b'updated'
1085 else:
1085 else:
1086 action = b'created'
1086 action = b'created'
1087
1087
1088 # Create a local tag to note the association, if commit message
1088 # Create a local tag to note the association, if commit message
1089 # does not have it already
1089 # does not have it already
1090 m = _differentialrevisiondescre.search(ctx.description())
1090 m = _differentialrevisiondescre.search(ctx.description())
1091 if not m or int(m.group('id')) != newrevid:
1091 if not m or int(m.group('id')) != newrevid:
1092 tagname = b'D%d' % newrevid
1092 tagname = b'D%d' % newrevid
1093 tags.tag(
1093 tags.tag(
1094 repo,
1094 repo,
1095 tagname,
1095 tagname,
1096 ctx.node(),
1096 ctx.node(),
1097 message=None,
1097 message=None,
1098 user=None,
1098 user=None,
1099 date=None,
1099 date=None,
1100 local=True,
1100 local=True,
1101 )
1101 )
1102 else:
1102 else:
1103 # Nothing changed. But still set "newrevphid" so the next revision
1103 # Nothing changed. But still set "newrevphid" so the next revision
1104 # could depend on this one and "newrevid" for the summary line.
1104 # could depend on this one and "newrevid" for the summary line.
1105 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1105 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1106 newrevid = revid
1106 newrevid = revid
1107 action = b'skipped'
1107 action = b'skipped'
1108
1108
1109 actiondesc = ui.label(
1109 actiondesc = ui.label(
1110 {
1110 {
1111 b'created': _(b'created'),
1111 b'created': _(b'created'),
1112 b'skipped': _(b'skipped'),
1112 b'skipped': _(b'skipped'),
1113 b'updated': _(b'updated'),
1113 b'updated': _(b'updated'),
1114 }[action],
1114 }[action],
1115 b'phabricator.action.%s' % action,
1115 b'phabricator.action.%s' % action,
1116 )
1116 )
1117 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1117 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1118 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1118 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1119 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1119 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1120 ui.write(
1120 ui.write(
1121 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1121 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1122 )
1122 )
1123 drevids.append(newrevid)
1123 drevids.append(newrevid)
1124 lastrevphid = newrevphid
1124 lastrevphid = newrevphid
1125
1125
1126 # Update commit messages and remove tags
1126 # Update commit messages and remove tags
1127 if opts.get(b'amend'):
1127 if opts.get(b'amend'):
1128 unfi = repo.unfiltered()
1128 unfi = repo.unfiltered()
1129 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1129 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1130 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1130 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1131 wnode = unfi[b'.'].node()
1131 wnode = unfi[b'.'].node()
1132 mapping = {} # {oldnode: [newnode]}
1132 mapping = {} # {oldnode: [newnode]}
1133 for i, rev in enumerate(revs):
1133 for i, rev in enumerate(revs):
1134 old = unfi[rev]
1134 old = unfi[rev]
1135 drevid = drevids[i]
1135 drevid = drevids[i]
1136 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1136 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1137 newdesc = getdescfromdrev(drev)
1137 newdesc = getdescfromdrev(drev)
1138 # Make sure commit message contain "Differential Revision"
1138 # Make sure commit message contain "Differential Revision"
1139 if old.description() != newdesc:
1139 if old.description() != newdesc:
1140 if old.phase() == phases.public:
1140 if old.phase() == phases.public:
1141 ui.warn(
1141 ui.warn(
1142 _(b"warning: not updating public commit %s\n")
1142 _(b"warning: not updating public commit %s\n")
1143 % scmutil.formatchangeid(old)
1143 % scmutil.formatchangeid(old)
1144 )
1144 )
1145 continue
1145 continue
1146 parents = [
1146 parents = [
1147 mapping.get(old.p1().node(), (old.p1(),))[0],
1147 mapping.get(old.p1().node(), (old.p1(),))[0],
1148 mapping.get(old.p2().node(), (old.p2(),))[0],
1148 mapping.get(old.p2().node(), (old.p2(),))[0],
1149 ]
1149 ]
1150 new = context.metadataonlyctx(
1150 new = context.metadataonlyctx(
1151 repo,
1151 repo,
1152 old,
1152 old,
1153 parents=parents,
1153 parents=parents,
1154 text=newdesc,
1154 text=newdesc,
1155 user=old.user(),
1155 user=old.user(),
1156 date=old.date(),
1156 date=old.date(),
1157 extra=old.extra(),
1157 extra=old.extra(),
1158 )
1158 )
1159
1159
1160 newnode = new.commit()
1160 newnode = new.commit()
1161
1161
1162 mapping[old.node()] = [newnode]
1162 mapping[old.node()] = [newnode]
1163 # Update diff property
1163 # Update diff property
1164 # If it fails just warn and keep going, otherwise the DREV
1164 # If it fails just warn and keep going, otherwise the DREV
1165 # associations will be lost
1165 # associations will be lost
1166 try:
1166 try:
1167 writediffproperties(unfi[newnode], diffmap[old.node()])
1167 writediffproperties(unfi[newnode], diffmap[old.node()])
1168 except util.urlerr.urlerror:
1168 except util.urlerr.urlerror:
1169 ui.warnnoi18n(
1169 ui.warnnoi18n(
1170 b'Failed to update metadata for D%d\n' % drevid
1170 b'Failed to update metadata for D%d\n' % drevid
1171 )
1171 )
1172 # Remove local tags since it's no longer necessary
1172 # Remove local tags since it's no longer necessary
1173 tagname = b'D%d' % drevid
1173 tagname = b'D%d' % drevid
1174 if tagname in repo.tags():
1174 if tagname in repo.tags():
1175 tags.tag(
1175 tags.tag(
1176 repo,
1176 repo,
1177 tagname,
1177 tagname,
1178 nullid,
1178 nullid,
1179 message=None,
1179 message=None,
1180 user=None,
1180 user=None,
1181 date=None,
1181 date=None,
1182 local=True,
1182 local=True,
1183 )
1183 )
1184 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1184 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1185 if wnode in mapping:
1185 if wnode in mapping:
1186 unfi.setparents(mapping[wnode][0])
1186 unfi.setparents(mapping[wnode][0])
1187
1187
1188
1188
1189 # Map from "hg:meta" keys to header understood by "hg import". The order is
1189 # Map from "hg:meta" keys to header understood by "hg import". The order is
1190 # consistent with "hg export" output.
1190 # consistent with "hg export" output.
1191 _metanamemap = util.sortdict(
1191 _metanamemap = util.sortdict(
1192 [
1192 [
1193 (b'user', b'User'),
1193 (b'user', b'User'),
1194 (b'date', b'Date'),
1194 (b'date', b'Date'),
1195 (b'branch', b'Branch'),
1195 (b'branch', b'Branch'),
1196 (b'node', b'Node ID'),
1196 (b'node', b'Node ID'),
1197 (b'parent', b'Parent '),
1197 (b'parent', b'Parent '),
1198 ]
1198 ]
1199 )
1199 )
1200
1200
1201
1201
1202 def _confirmbeforesend(repo, revs, oldmap):
1202 def _confirmbeforesend(repo, revs, oldmap):
1203 url, token = readurltoken(repo.ui)
1203 url, token = readurltoken(repo.ui)
1204 ui = repo.ui
1204 ui = repo.ui
1205 for rev in revs:
1205 for rev in revs:
1206 ctx = repo[rev]
1206 ctx = repo[rev]
1207 desc = ctx.description().splitlines()[0]
1207 desc = ctx.description().splitlines()[0]
1208 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1208 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1209 if drevid:
1209 if drevid:
1210 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1210 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1211 else:
1211 else:
1212 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1212 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1213
1213
1214 ui.write(
1214 ui.write(
1215 _(b'%s - %s: %s\n')
1215 _(b'%s - %s: %s\n')
1216 % (
1216 % (
1217 drevdesc,
1217 drevdesc,
1218 ui.label(bytes(ctx), b'phabricator.node'),
1218 ui.label(bytes(ctx), b'phabricator.node'),
1219 ui.label(desc, b'phabricator.desc'),
1219 ui.label(desc, b'phabricator.desc'),
1220 )
1220 )
1221 )
1221 )
1222
1222
1223 if ui.promptchoice(
1223 if ui.promptchoice(
1224 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1224 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1225 ):
1225 ):
1226 return False
1226 return False
1227
1227
1228 return True
1228 return True
1229
1229
1230
1230
1231 _knownstatusnames = {
1231 _knownstatusnames = {
1232 b'accepted',
1232 b'accepted',
1233 b'needsreview',
1233 b'needsreview',
1234 b'needsrevision',
1234 b'needsrevision',
1235 b'closed',
1235 b'closed',
1236 b'abandoned',
1236 b'abandoned',
1237 }
1237 }
1238
1238
1239
1239
1240 def _getstatusname(drev):
1240 def _getstatusname(drev):
1241 """get normalized status name from a Differential Revision"""
1241 """get normalized status name from a Differential Revision"""
1242 return drev[b'statusName'].replace(b' ', b'').lower()
1242 return drev[b'statusName'].replace(b' ', b'').lower()
1243
1243
1244
1244
1245 # Small language to specify differential revisions. Support symbols: (), :X,
1245 # Small language to specify differential revisions. Support symbols: (), :X,
1246 # +, and -.
1246 # +, and -.
1247
1247
1248 _elements = {
1248 _elements = {
1249 # token-type: binding-strength, primary, prefix, infix, suffix
1249 # token-type: binding-strength, primary, prefix, infix, suffix
1250 b'(': (12, None, (b'group', 1, b')'), None, None),
1250 b'(': (12, None, (b'group', 1, b')'), None, None),
1251 b':': (8, None, (b'ancestors', 8), None, None),
1251 b':': (8, None, (b'ancestors', 8), None, None),
1252 b'&': (5, None, None, (b'and_', 5), None),
1252 b'&': (5, None, None, (b'and_', 5), None),
1253 b'+': (4, None, None, (b'add', 4), None),
1253 b'+': (4, None, None, (b'add', 4), None),
1254 b'-': (4, None, None, (b'sub', 4), None),
1254 b'-': (4, None, None, (b'sub', 4), None),
1255 b')': (0, None, None, None, None),
1255 b')': (0, None, None, None, None),
1256 b'symbol': (0, b'symbol', None, None, None),
1256 b'symbol': (0, b'symbol', None, None, None),
1257 b'end': (0, None, None, None, None),
1257 b'end': (0, None, None, None, None),
1258 }
1258 }
1259
1259
1260
1260
1261 def _tokenize(text):
1261 def _tokenize(text):
1262 view = memoryview(text) # zero-copy slice
1262 view = memoryview(text) # zero-copy slice
1263 special = b'():+-& '
1263 special = b'():+-& '
1264 pos = 0
1264 pos = 0
1265 length = len(text)
1265 length = len(text)
1266 while pos < length:
1266 while pos < length:
1267 symbol = b''.join(
1267 symbol = b''.join(
1268 itertools.takewhile(
1268 itertools.takewhile(
1269 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1269 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1270 )
1270 )
1271 )
1271 )
1272 if symbol:
1272 if symbol:
1273 yield (b'symbol', symbol, pos)
1273 yield (b'symbol', symbol, pos)
1274 pos += len(symbol)
1274 pos += len(symbol)
1275 else: # special char, ignore space
1275 else: # special char, ignore space
1276 if text[pos : pos + 1] != b' ':
1276 if text[pos : pos + 1] != b' ':
1277 yield (text[pos : pos + 1], None, pos)
1277 yield (text[pos : pos + 1], None, pos)
1278 pos += 1
1278 pos += 1
1279 yield (b'end', None, pos)
1279 yield (b'end', None, pos)
1280
1280
1281
1281
1282 def _parse(text):
1282 def _parse(text):
1283 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1283 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1284 if pos != len(text):
1284 if pos != len(text):
1285 raise error.ParseError(b'invalid token', pos)
1285 raise error.ParseError(b'invalid token', pos)
1286 return tree
1286 return tree
1287
1287
1288
1288
1289 def _parsedrev(symbol):
1289 def _parsedrev(symbol):
1290 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1290 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1291 if symbol.startswith(b'D') and symbol[1:].isdigit():
1291 if symbol.startswith(b'D') and symbol[1:].isdigit():
1292 return int(symbol[1:])
1292 return int(symbol[1:])
1293 if symbol.isdigit():
1293 if symbol.isdigit():
1294 return int(symbol)
1294 return int(symbol)
1295
1295
1296
1296
1297 def _prefetchdrevs(tree):
1297 def _prefetchdrevs(tree):
1298 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1298 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1299 drevs = set()
1299 drevs = set()
1300 ancestordrevs = set()
1300 ancestordrevs = set()
1301 op = tree[0]
1301 op = tree[0]
1302 if op == b'symbol':
1302 if op == b'symbol':
1303 r = _parsedrev(tree[1])
1303 r = _parsedrev(tree[1])
1304 if r:
1304 if r:
1305 drevs.add(r)
1305 drevs.add(r)
1306 elif op == b'ancestors':
1306 elif op == b'ancestors':
1307 r, a = _prefetchdrevs(tree[1])
1307 r, a = _prefetchdrevs(tree[1])
1308 drevs.update(r)
1308 drevs.update(r)
1309 ancestordrevs.update(r)
1309 ancestordrevs.update(r)
1310 ancestordrevs.update(a)
1310 ancestordrevs.update(a)
1311 else:
1311 else:
1312 for t in tree[1:]:
1312 for t in tree[1:]:
1313 r, a = _prefetchdrevs(t)
1313 r, a = _prefetchdrevs(t)
1314 drevs.update(r)
1314 drevs.update(r)
1315 ancestordrevs.update(a)
1315 ancestordrevs.update(a)
1316 return drevs, ancestordrevs
1316 return drevs, ancestordrevs
1317
1317
1318
1318
1319 def querydrev(repo, spec):
1319 def querydrev(repo, spec):
1320 """return a list of "Differential Revision" dicts
1320 """return a list of "Differential Revision" dicts
1321
1321
1322 spec is a string using a simple query language, see docstring in phabread
1322 spec is a string using a simple query language, see docstring in phabread
1323 for details.
1323 for details.
1324
1324
1325 A "Differential Revision dict" looks like:
1325 A "Differential Revision dict" looks like:
1326
1326
1327 {
1327 {
1328 "id": "2",
1328 "id": "2",
1329 "phid": "PHID-DREV-672qvysjcczopag46qty",
1329 "phid": "PHID-DREV-672qvysjcczopag46qty",
1330 "title": "example",
1330 "title": "example",
1331 "uri": "https://phab.example.com/D2",
1331 "uri": "https://phab.example.com/D2",
1332 "dateCreated": "1499181406",
1332 "dateCreated": "1499181406",
1333 "dateModified": "1499182103",
1333 "dateModified": "1499182103",
1334 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1334 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1335 "status": "0",
1335 "status": "0",
1336 "statusName": "Needs Review",
1336 "statusName": "Needs Review",
1337 "properties": [],
1337 "properties": [],
1338 "branch": null,
1338 "branch": null,
1339 "summary": "",
1339 "summary": "",
1340 "testPlan": "",
1340 "testPlan": "",
1341 "lineCount": "2",
1341 "lineCount": "2",
1342 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1342 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1343 "diffs": [
1343 "diffs": [
1344 "3",
1344 "3",
1345 "4",
1345 "4",
1346 ],
1346 ],
1347 "commits": [],
1347 "commits": [],
1348 "reviewers": [],
1348 "reviewers": [],
1349 "ccs": [],
1349 "ccs": [],
1350 "hashes": [],
1350 "hashes": [],
1351 "auxiliary": {
1351 "auxiliary": {
1352 "phabricator:projects": [],
1352 "phabricator:projects": [],
1353 "phabricator:depends-on": [
1353 "phabricator:depends-on": [
1354 "PHID-DREV-gbapp366kutjebt7agcd"
1354 "PHID-DREV-gbapp366kutjebt7agcd"
1355 ]
1355 ]
1356 },
1356 },
1357 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1357 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1358 "sourcePath": null
1358 "sourcePath": null
1359 }
1359 }
1360 """
1360 """
1361
1361
1362 def fetch(params):
1362 def fetch(params):
1363 """params -> single drev or None"""
1363 """params -> single drev or None"""
1364 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1364 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1365 if key in prefetched:
1365 if key in prefetched:
1366 return prefetched[key]
1366 return prefetched[key]
1367 drevs = callconduit(repo.ui, b'differential.query', params)
1367 drevs = callconduit(repo.ui, b'differential.query', params)
1368 # Fill prefetched with the result
1368 # Fill prefetched with the result
1369 for drev in drevs:
1369 for drev in drevs:
1370 prefetched[drev[b'phid']] = drev
1370 prefetched[drev[b'phid']] = drev
1371 prefetched[int(drev[b'id'])] = drev
1371 prefetched[int(drev[b'id'])] = drev
1372 if key not in prefetched:
1372 if key not in prefetched:
1373 raise error.Abort(
1373 raise error.Abort(
1374 _(b'cannot get Differential Revision %r') % params
1374 _(b'cannot get Differential Revision %r') % params
1375 )
1375 )
1376 return prefetched[key]
1376 return prefetched[key]
1377
1377
1378 def getstack(topdrevids):
1378 def getstack(topdrevids):
1379 """given a top, get a stack from the bottom, [id] -> [id]"""
1379 """given a top, get a stack from the bottom, [id] -> [id]"""
1380 visited = set()
1380 visited = set()
1381 result = []
1381 result = []
1382 queue = [{b'ids': [i]} for i in topdrevids]
1382 queue = [{b'ids': [i]} for i in topdrevids]
1383 while queue:
1383 while queue:
1384 params = queue.pop()
1384 params = queue.pop()
1385 drev = fetch(params)
1385 drev = fetch(params)
1386 if drev[b'id'] in visited:
1386 if drev[b'id'] in visited:
1387 continue
1387 continue
1388 visited.add(drev[b'id'])
1388 visited.add(drev[b'id'])
1389 result.append(int(drev[b'id']))
1389 result.append(int(drev[b'id']))
1390 auxiliary = drev.get(b'auxiliary', {})
1390 auxiliary = drev.get(b'auxiliary', {})
1391 depends = auxiliary.get(b'phabricator:depends-on', [])
1391 depends = auxiliary.get(b'phabricator:depends-on', [])
1392 for phid in depends:
1392 for phid in depends:
1393 queue.append({b'phids': [phid]})
1393 queue.append({b'phids': [phid]})
1394 result.reverse()
1394 result.reverse()
1395 return smartset.baseset(result)
1395 return smartset.baseset(result)
1396
1396
1397 # Initialize prefetch cache
1397 # Initialize prefetch cache
1398 prefetched = {} # {id or phid: drev}
1398 prefetched = {} # {id or phid: drev}
1399
1399
1400 tree = _parse(spec)
1400 tree = _parse(spec)
1401 drevs, ancestordrevs = _prefetchdrevs(tree)
1401 drevs, ancestordrevs = _prefetchdrevs(tree)
1402
1402
1403 # developer config: phabricator.batchsize
1403 # developer config: phabricator.batchsize
1404 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1404 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1405
1405
1406 # Prefetch Differential Revisions in batch
1406 # Prefetch Differential Revisions in batch
1407 tofetch = set(drevs)
1407 tofetch = set(drevs)
1408 for r in ancestordrevs:
1408 for r in ancestordrevs:
1409 tofetch.update(range(max(1, r - batchsize), r + 1))
1409 tofetch.update(range(max(1, r - batchsize), r + 1))
1410 if drevs:
1410 if drevs:
1411 fetch({b'ids': list(tofetch)})
1411 fetch({b'ids': list(tofetch)})
1412 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1412 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1413
1413
1414 # Walk through the tree, return smartsets
1414 # Walk through the tree, return smartsets
1415 def walk(tree):
1415 def walk(tree):
1416 op = tree[0]
1416 op = tree[0]
1417 if op == b'symbol':
1417 if op == b'symbol':
1418 drev = _parsedrev(tree[1])
1418 drev = _parsedrev(tree[1])
1419 if drev:
1419 if drev:
1420 return smartset.baseset([drev])
1420 return smartset.baseset([drev])
1421 elif tree[1] in _knownstatusnames:
1421 elif tree[1] in _knownstatusnames:
1422 drevs = [
1422 drevs = [
1423 r
1423 r
1424 for r in validids
1424 for r in validids
1425 if _getstatusname(prefetched[r]) == tree[1]
1425 if _getstatusname(prefetched[r]) == tree[1]
1426 ]
1426 ]
1427 return smartset.baseset(drevs)
1427 return smartset.baseset(drevs)
1428 else:
1428 else:
1429 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1429 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1430 elif op in {b'and_', b'add', b'sub'}:
1430 elif op in {b'and_', b'add', b'sub'}:
1431 assert len(tree) == 3
1431 assert len(tree) == 3
1432 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1432 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1433 elif op == b'group':
1433 elif op == b'group':
1434 return walk(tree[1])
1434 return walk(tree[1])
1435 elif op == b'ancestors':
1435 elif op == b'ancestors':
1436 return getstack(walk(tree[1]))
1436 return getstack(walk(tree[1]))
1437 else:
1437 else:
1438 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1438 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1439
1439
1440 return [prefetched[r] for r in walk(tree)]
1440 return [prefetched[r] for r in walk(tree)]
1441
1441
1442
1442
1443 def getdescfromdrev(drev):
1443 def getdescfromdrev(drev):
1444 """get description (commit message) from "Differential Revision"
1444 """get description (commit message) from "Differential Revision"
1445
1445
1446 This is similar to differential.getcommitmessage API. But we only care
1446 This is similar to differential.getcommitmessage API. But we only care
1447 about limited fields: title, summary, test plan, and URL.
1447 about limited fields: title, summary, test plan, and URL.
1448 """
1448 """
1449 title = drev[b'title']
1449 title = drev[b'title']
1450 summary = drev[b'summary'].rstrip()
1450 summary = drev[b'summary'].rstrip()
1451 testplan = drev[b'testPlan'].rstrip()
1451 testplan = drev[b'testPlan'].rstrip()
1452 if testplan:
1452 if testplan:
1453 testplan = b'Test Plan:\n%s' % testplan
1453 testplan = b'Test Plan:\n%s' % testplan
1454 uri = b'Differential Revision: %s' % drev[b'uri']
1454 uri = b'Differential Revision: %s' % drev[b'uri']
1455 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1455 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1456
1456
1457
1457
1458 def getdiffmeta(diff):
1458 def getdiffmeta(diff):
1459 """get commit metadata (date, node, user, p1) from a diff object
1459 """get commit metadata (date, node, user, p1) from a diff object
1460
1460
1461 The metadata could be "hg:meta", sent by phabsend, like:
1461 The metadata could be "hg:meta", sent by phabsend, like:
1462
1462
1463 "properties": {
1463 "properties": {
1464 "hg:meta": {
1464 "hg:meta": {
1465 "date": "1499571514 25200",
1465 "date": "1499571514 25200",
1466 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1466 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1467 "user": "Foo Bar <foo@example.com>",
1467 "user": "Foo Bar <foo@example.com>",
1468 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1468 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1469 }
1469 }
1470 }
1470 }
1471
1471
1472 Or converted from "local:commits", sent by "arc", like:
1472 Or converted from "local:commits", sent by "arc", like:
1473
1473
1474 "properties": {
1474 "properties": {
1475 "local:commits": {
1475 "local:commits": {
1476 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1476 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1477 "author": "Foo Bar",
1477 "author": "Foo Bar",
1478 "time": 1499546314,
1478 "time": 1499546314,
1479 "branch": "default",
1479 "branch": "default",
1480 "tag": "",
1480 "tag": "",
1481 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1481 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "local": "1000",
1483 "local": "1000",
1484 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1484 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1485 "summary": "...",
1485 "summary": "...",
1486 "message": "...",
1486 "message": "...",
1487 "authorEmail": "foo@example.com"
1487 "authorEmail": "foo@example.com"
1488 }
1488 }
1489 }
1489 }
1490 }
1490 }
1491
1491
1492 Note: metadata extracted from "local:commits" will lose time zone
1492 Note: metadata extracted from "local:commits" will lose time zone
1493 information.
1493 information.
1494 """
1494 """
1495 props = diff.get(b'properties') or {}
1495 props = diff.get(b'properties') or {}
1496 meta = props.get(b'hg:meta')
1496 meta = props.get(b'hg:meta')
1497 if not meta:
1497 if not meta:
1498 if props.get(b'local:commits'):
1498 if props.get(b'local:commits'):
1499 commit = sorted(props[b'local:commits'].values())[0]
1499 commit = sorted(props[b'local:commits'].values())[0]
1500 meta = {}
1500 meta = {}
1501 if b'author' in commit and b'authorEmail' in commit:
1501 if b'author' in commit and b'authorEmail' in commit:
1502 meta[b'user'] = b'%s <%s>' % (
1502 meta[b'user'] = b'%s <%s>' % (
1503 commit[b'author'],
1503 commit[b'author'],
1504 commit[b'authorEmail'],
1504 commit[b'authorEmail'],
1505 )
1505 )
1506 if b'time' in commit:
1506 if b'time' in commit:
1507 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1507 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1508 if b'branch' in commit:
1508 if b'branch' in commit:
1509 meta[b'branch'] = commit[b'branch']
1509 meta[b'branch'] = commit[b'branch']
1510 node = commit.get(b'commit', commit.get(b'rev'))
1510 node = commit.get(b'commit', commit.get(b'rev'))
1511 if node:
1511 if node:
1512 meta[b'node'] = node
1512 meta[b'node'] = node
1513 if len(commit.get(b'parents', ())) >= 1:
1513 if len(commit.get(b'parents', ())) >= 1:
1514 meta[b'parent'] = commit[b'parents'][0]
1514 meta[b'parent'] = commit[b'parents'][0]
1515 else:
1515 else:
1516 meta = {}
1516 meta = {}
1517 if b'date' not in meta and b'dateCreated' in diff:
1517 if b'date' not in meta and b'dateCreated' in diff:
1518 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1518 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1519 if b'branch' not in meta and diff.get(b'branch'):
1519 if b'branch' not in meta and diff.get(b'branch'):
1520 meta[b'branch'] = diff[b'branch']
1520 meta[b'branch'] = diff[b'branch']
1521 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1521 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1522 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1522 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1523 return meta
1523 return meta
1524
1524
1525
1525
1526 def readpatch(repo, drevs, write):
1526 def readpatch(repo, drevs, write):
1527 """generate plain-text patch readable by 'hg import'
1527 """generate plain-text patch readable by 'hg import'
1528
1528
1529 write is usually ui.write. drevs is what "querydrev" returns, results of
1529 write is usually ui.write. drevs is what "querydrev" returns, results of
1530 "differential.query".
1530 "differential.query".
1531 """
1531 """
1532 # Prefetch hg:meta property for all diffs
1532 # Prefetch hg:meta property for all diffs
1533 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1533 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1534 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1534 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1535
1535
1536 # Generate patch for each drev
1536 # Generate patch for each drev
1537 for drev in drevs:
1537 for drev in drevs:
1538 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1538 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1539
1539
1540 diffid = max(int(v) for v in drev[b'diffs'])
1540 diffid = max(int(v) for v in drev[b'diffs'])
1541 body = callconduit(
1541 body = callconduit(
1542 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1542 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1543 )
1543 )
1544 desc = getdescfromdrev(drev)
1544 desc = getdescfromdrev(drev)
1545 header = b'# HG changeset patch\n'
1545 header = b'# HG changeset patch\n'
1546
1546
1547 # Try to preserve metadata from hg:meta property. Write hg patch
1547 # Try to preserve metadata from hg:meta property. Write hg patch
1548 # headers that can be read by the "import" command. See patchheadermap
1548 # headers that can be read by the "import" command. See patchheadermap
1549 # and extract in mercurial/patch.py for supported headers.
1549 # and extract in mercurial/patch.py for supported headers.
1550 meta = getdiffmeta(diffs[b'%d' % diffid])
1550 meta = getdiffmeta(diffs[b'%d' % diffid])
1551 for k in _metanamemap.keys():
1551 for k in _metanamemap.keys():
1552 if k in meta:
1552 if k in meta:
1553 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1553 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1554
1554
1555 content = b'%s%s\n%s' % (header, desc, body)
1555 content = b'%s%s\n%s' % (header, desc, body)
1556 write(content)
1556 write(content)
1557
1557
1558
1558
1559 @vcrcommand(
1559 @vcrcommand(
1560 b'phabread',
1560 b'phabread',
1561 [(b'', b'stack', False, _(b'read dependencies'))],
1561 [(b'', b'stack', False, _(b'read dependencies'))],
1562 _(b'DREVSPEC [OPTIONS]'),
1562 _(b'DREVSPEC [OPTIONS]'),
1563 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1563 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1564 )
1564 )
1565 def phabread(ui, repo, spec, **opts):
1565 def phabread(ui, repo, spec, **opts):
1566 """print patches from Phabricator suitable for importing
1566 """print patches from Phabricator suitable for importing
1567
1567
1568 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1568 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1569 the number ``123``. It could also have common operators like ``+``, ``-``,
1569 the number ``123``. It could also have common operators like ``+``, ``-``,
1570 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1570 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1571 select a stack.
1571 select a stack.
1572
1572
1573 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1573 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1574 could be used to filter patches by status. For performance reason, they
1574 could be used to filter patches by status. For performance reason, they
1575 only represent a subset of non-status selections and cannot be used alone.
1575 only represent a subset of non-status selections and cannot be used alone.
1576
1576
1577 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1577 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1578 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1578 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1579 stack up to D9.
1579 stack up to D9.
1580
1580
1581 If --stack is given, follow dependencies information and read all patches.
1581 If --stack is given, follow dependencies information and read all patches.
1582 It is equivalent to the ``:`` operator.
1582 It is equivalent to the ``:`` operator.
1583 """
1583 """
1584 opts = pycompat.byteskwargs(opts)
1584 opts = pycompat.byteskwargs(opts)
1585 if opts.get(b'stack'):
1585 if opts.get(b'stack'):
1586 spec = b':(%s)' % spec
1586 spec = b':(%s)' % spec
1587 drevs = querydrev(repo, spec)
1587 drevs = querydrev(repo, spec)
1588 readpatch(repo, drevs, ui.write)
1588 readpatch(repo, drevs, ui.write)
1589
1589
1590
1590
1591 @vcrcommand(
1591 @vcrcommand(
1592 b'phabupdate',
1592 b'phabupdate',
1593 [
1593 [
1594 (b'', b'accept', False, _(b'accept revisions')),
1594 (b'', b'accept', False, _(b'accept revisions')),
1595 (b'', b'reject', False, _(b'reject revisions')),
1595 (b'', b'reject', False, _(b'reject revisions')),
1596 (b'', b'abandon', False, _(b'abandon revisions')),
1596 (b'', b'abandon', False, _(b'abandon revisions')),
1597 (b'', b'reclaim', False, _(b'reclaim revisions')),
1597 (b'', b'reclaim', False, _(b'reclaim revisions')),
1598 (b'm', b'comment', b'', _(b'comment on the last revision')),
1598 (b'm', b'comment', b'', _(b'comment on the last revision')),
1599 ],
1599 ],
1600 _(b'DREVSPEC [OPTIONS]'),
1600 _(b'DREVSPEC [OPTIONS]'),
1601 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1601 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1602 )
1602 )
1603 def phabupdate(ui, repo, spec, **opts):
1603 def phabupdate(ui, repo, spec, **opts):
1604 """update Differential Revision in batch
1604 """update Differential Revision in batch
1605
1605
1606 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1606 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1607 """
1607 """
1608 opts = pycompat.byteskwargs(opts)
1608 opts = pycompat.byteskwargs(opts)
1609 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1609 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1610 if len(flags) > 1:
1610 if len(flags) > 1:
1611 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1611 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1612
1612
1613 actions = []
1613 actions = []
1614 for f in flags:
1614 for f in flags:
1615 actions.append({b'type': f, b'value': True})
1615 actions.append({b'type': f, b'value': True})
1616
1616
1617 drevs = querydrev(repo, spec)
1617 drevs = querydrev(repo, spec)
1618 for i, drev in enumerate(drevs):
1618 for i, drev in enumerate(drevs):
1619 if i + 1 == len(drevs) and opts.get(b'comment'):
1619 if i + 1 == len(drevs) and opts.get(b'comment'):
1620 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1620 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1621 if actions:
1621 if actions:
1622 params = {
1622 params = {
1623 b'objectIdentifier': drev[b'phid'],
1623 b'objectIdentifier': drev[b'phid'],
1624 b'transactions': actions,
1624 b'transactions': actions,
1625 }
1625 }
1626 callconduit(ui, b'differential.revision.edit', params)
1626 callconduit(ui, b'differential.revision.edit', params)
1627
1627
1628
1628
1629 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1629 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1630 def template_review(context, mapping):
1630 def template_review(context, mapping):
1631 """:phabreview: Object describing the review for this changeset.
1631 """:phabreview: Object describing the review for this changeset.
1632 Has attributes `url` and `id`.
1632 Has attributes `url` and `id`.
1633 """
1633 """
1634 ctx = context.resource(mapping, b'ctx')
1634 ctx = context.resource(mapping, b'ctx')
1635 m = _differentialrevisiondescre.search(ctx.description())
1635 m = _differentialrevisiondescre.search(ctx.description())
1636 if m:
1636 if m:
1637 return templateutil.hybriddict(
1637 return templateutil.hybriddict(
1638 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1638 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1639 )
1639 )
1640 else:
1640 else:
1641 tags = ctx.repo().nodetags(ctx.node())
1641 tags = ctx.repo().nodetags(ctx.node())
1642 for t in tags:
1642 for t in tags:
1643 if _differentialrevisiontagre.match(t):
1643 if _differentialrevisiontagre.match(t):
1644 url = ctx.repo().ui.config(b'phabricator', b'url')
1644 url = ctx.repo().ui.config(b'phabricator', b'url')
1645 if not url.endswith(b'/'):
1645 if not url.endswith(b'/'):
1646 url += b'/'
1646 url += b'/'
1647 url += t
1647 url += t
1648
1648
1649 return templateutil.hybriddict({b'url': url, b'id': t,})
1649 return templateutil.hybriddict({b'url': url, b'id': t,})
1650 return None
1650 return None
General Comments 0
You need to be logged in to leave comments. Login now