##// END OF EJS Templates
phabricator: use context manager form of progress in uploadchunks...
Ian Moody -
r43810:a78a65c3 default
parent child Browse files
Show More
@@ -1,1651 +1,1650 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import base64
45 import contextlib
45 import contextlib
46 import hashlib
46 import hashlib
47 import itertools
47 import itertools
48 import json
48 import json
49 import mimetypes
49 import mimetypes
50 import operator
50 import operator
51 import re
51 import re
52
52
53 from mercurial.node import bin, nullid
53 from mercurial.node import bin, nullid
54 from mercurial.i18n import _
54 from mercurial.i18n import _
55 from mercurial.pycompat import getattr
55 from mercurial.pycompat import getattr
56 from mercurial.thirdparty import attr
56 from mercurial.thirdparty import attr
57 from mercurial import (
57 from mercurial import (
58 cmdutil,
58 cmdutil,
59 context,
59 context,
60 encoding,
60 encoding,
61 error,
61 error,
62 exthelper,
62 exthelper,
63 httpconnection as httpconnectionmod,
63 httpconnection as httpconnectionmod,
64 match,
64 match,
65 mdiff,
65 mdiff,
66 obsutil,
66 obsutil,
67 parser,
67 parser,
68 patch,
68 patch,
69 phases,
69 phases,
70 pycompat,
70 pycompat,
71 scmutil,
71 scmutil,
72 smartset,
72 smartset,
73 tags,
73 tags,
74 templatefilters,
74 templatefilters,
75 templateutil,
75 templateutil,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 )
78 )
79 from mercurial.utils import (
79 from mercurial.utils import (
80 procutil,
80 procutil,
81 stringutil,
81 stringutil,
82 )
82 )
83
83
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
84 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
85 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
86 # be specifying the version(s) of Mercurial they are tested with, or
86 # be specifying the version(s) of Mercurial they are tested with, or
87 # leave the attribute unspecified.
87 # leave the attribute unspecified.
88 testedwith = b'ships-with-hg-core'
88 testedwith = b'ships-with-hg-core'
89
89
90 eh = exthelper.exthelper()
90 eh = exthelper.exthelper()
91
91
92 cmdtable = eh.cmdtable
92 cmdtable = eh.cmdtable
93 command = eh.command
93 command = eh.command
94 configtable = eh.configtable
94 configtable = eh.configtable
95 templatekeyword = eh.templatekeyword
95 templatekeyword = eh.templatekeyword
96
96
97 # developer config: phabricator.batchsize
97 # developer config: phabricator.batchsize
98 eh.configitem(
98 eh.configitem(
99 b'phabricator', b'batchsize', default=12,
99 b'phabricator', b'batchsize', default=12,
100 )
100 )
101 eh.configitem(
101 eh.configitem(
102 b'phabricator', b'callsign', default=None,
102 b'phabricator', b'callsign', default=None,
103 )
103 )
104 eh.configitem(
104 eh.configitem(
105 b'phabricator', b'curlcmd', default=None,
105 b'phabricator', b'curlcmd', default=None,
106 )
106 )
107 # developer config: phabricator.repophid
107 # developer config: phabricator.repophid
108 eh.configitem(
108 eh.configitem(
109 b'phabricator', b'repophid', default=None,
109 b'phabricator', b'repophid', default=None,
110 )
110 )
111 eh.configitem(
111 eh.configitem(
112 b'phabricator', b'url', default=None,
112 b'phabricator', b'url', default=None,
113 )
113 )
114 eh.configitem(
114 eh.configitem(
115 b'phabsend', b'confirm', default=False,
115 b'phabsend', b'confirm', default=False,
116 )
116 )
117
117
118 colortable = {
118 colortable = {
119 b'phabricator.action.created': b'green',
119 b'phabricator.action.created': b'green',
120 b'phabricator.action.skipped': b'magenta',
120 b'phabricator.action.skipped': b'magenta',
121 b'phabricator.action.updated': b'magenta',
121 b'phabricator.action.updated': b'magenta',
122 b'phabricator.desc': b'',
122 b'phabricator.desc': b'',
123 b'phabricator.drev': b'bold',
123 b'phabricator.drev': b'bold',
124 b'phabricator.node': b'',
124 b'phabricator.node': b'',
125 }
125 }
126
126
127 _VCR_FLAGS = [
127 _VCR_FLAGS = [
128 (
128 (
129 b'',
129 b'',
130 b'test-vcr',
130 b'test-vcr',
131 b'',
131 b'',
132 _(
132 _(
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
133 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
134 b', otherwise will mock all http requests using the specified vcr file.'
134 b', otherwise will mock all http requests using the specified vcr file.'
135 b' (ADVANCED)'
135 b' (ADVANCED)'
136 ),
136 ),
137 ),
137 ),
138 ]
138 ]
139
139
140
140
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
141 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
142 fullflags = flags + _VCR_FLAGS
142 fullflags = flags + _VCR_FLAGS
143
143
144 def hgmatcher(r1, r2):
144 def hgmatcher(r1, r2):
145 if r1.uri != r2.uri or r1.method != r2.method:
145 if r1.uri != r2.uri or r1.method != r2.method:
146 return False
146 return False
147 r1params = util.urlreq.parseqs(r1.body)
147 r1params = util.urlreq.parseqs(r1.body)
148 r2params = util.urlreq.parseqs(r2.body)
148 r2params = util.urlreq.parseqs(r2.body)
149 for key in r1params:
149 for key in r1params:
150 if key not in r2params:
150 if key not in r2params:
151 return False
151 return False
152 value = r1params[key][0]
152 value = r1params[key][0]
153 # we want to compare json payloads without worrying about ordering
153 # we want to compare json payloads without worrying about ordering
154 if value.startswith(b'{') and value.endswith(b'}'):
154 if value.startswith(b'{') and value.endswith(b'}'):
155 r1json = pycompat.json_loads(value)
155 r1json = pycompat.json_loads(value)
156 r2json = pycompat.json_loads(r2params[key][0])
156 r2json = pycompat.json_loads(r2params[key][0])
157 if r1json != r2json:
157 if r1json != r2json:
158 return False
158 return False
159 elif r2params[key][0] != value:
159 elif r2params[key][0] != value:
160 return False
160 return False
161 return True
161 return True
162
162
163 def sanitiserequest(request):
163 def sanitiserequest(request):
164 request.body = re.sub(
164 request.body = re.sub(
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
165 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
166 )
166 )
167 return request
167 return request
168
168
169 def sanitiseresponse(response):
169 def sanitiseresponse(response):
170 if r'set-cookie' in response[r'headers']:
170 if r'set-cookie' in response[r'headers']:
171 del response[r'headers'][r'set-cookie']
171 del response[r'headers'][r'set-cookie']
172 return response
172 return response
173
173
174 def decorate(fn):
174 def decorate(fn):
175 def inner(*args, **kwargs):
175 def inner(*args, **kwargs):
176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
176 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
177 if cassette:
177 if cassette:
178 import hgdemandimport
178 import hgdemandimport
179
179
180 with hgdemandimport.deactivated():
180 with hgdemandimport.deactivated():
181 import vcr as vcrmod
181 import vcr as vcrmod
182 import vcr.stubs as stubs
182 import vcr.stubs as stubs
183
183
184 vcr = vcrmod.VCR(
184 vcr = vcrmod.VCR(
185 serializer=r'json',
185 serializer=r'json',
186 before_record_request=sanitiserequest,
186 before_record_request=sanitiserequest,
187 before_record_response=sanitiseresponse,
187 before_record_response=sanitiseresponse,
188 custom_patches=[
188 custom_patches=[
189 (
189 (
190 urlmod,
190 urlmod,
191 r'httpconnection',
191 r'httpconnection',
192 stubs.VCRHTTPConnection,
192 stubs.VCRHTTPConnection,
193 ),
193 ),
194 (
194 (
195 urlmod,
195 urlmod,
196 r'httpsconnection',
196 r'httpsconnection',
197 stubs.VCRHTTPSConnection,
197 stubs.VCRHTTPSConnection,
198 ),
198 ),
199 ],
199 ],
200 )
200 )
201 vcr.register_matcher(r'hgmatcher', hgmatcher)
201 vcr.register_matcher(r'hgmatcher', hgmatcher)
202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
202 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
203 return fn(*args, **kwargs)
203 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
204 return fn(*args, **kwargs)
205
205
206 inner.__name__ = fn.__name__
206 inner.__name__ = fn.__name__
207 inner.__doc__ = fn.__doc__
207 inner.__doc__ = fn.__doc__
208 return command(
208 return command(
209 name,
209 name,
210 fullflags,
210 fullflags,
211 spec,
211 spec,
212 helpcategory=helpcategory,
212 helpcategory=helpcategory,
213 optionalrepo=optionalrepo,
213 optionalrepo=optionalrepo,
214 )(inner)
214 )(inner)
215
215
216 return decorate
216 return decorate
217
217
218
218
219 def urlencodenested(params):
219 def urlencodenested(params):
220 """like urlencode, but works with nested parameters.
220 """like urlencode, but works with nested parameters.
221
221
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
222 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
223 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
224 urlencode. Note: the encoding is consistent with PHP's http_build_query.
225 """
225 """
226 flatparams = util.sortdict()
226 flatparams = util.sortdict()
227
227
228 def process(prefix, obj):
228 def process(prefix, obj):
229 if isinstance(obj, bool):
229 if isinstance(obj, bool):
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
230 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
231 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
232 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
233 if items is None:
233 if items is None:
234 flatparams[prefix] = obj
234 flatparams[prefix] = obj
235 else:
235 else:
236 for k, v in items(obj):
236 for k, v in items(obj):
237 if prefix:
237 if prefix:
238 process(b'%s[%s]' % (prefix, k), v)
238 process(b'%s[%s]' % (prefix, k), v)
239 else:
239 else:
240 process(k, v)
240 process(k, v)
241
241
242 process(b'', params)
242 process(b'', params)
243 return util.urlreq.urlencode(flatparams)
243 return util.urlreq.urlencode(flatparams)
244
244
245
245
246 def readurltoken(ui):
246 def readurltoken(ui):
247 """return conduit url, token and make sure they exist
247 """return conduit url, token and make sure they exist
248
248
249 Currently read from [auth] config section. In the future, it might
249 Currently read from [auth] config section. In the future, it might
250 make sense to read from .arcconfig and .arcrc as well.
250 make sense to read from .arcconfig and .arcrc as well.
251 """
251 """
252 url = ui.config(b'phabricator', b'url')
252 url = ui.config(b'phabricator', b'url')
253 if not url:
253 if not url:
254 raise error.Abort(
254 raise error.Abort(
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
255 _(b'config %s.%s is required') % (b'phabricator', b'url')
256 )
256 )
257
257
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
258 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
259 token = None
259 token = None
260
260
261 if res:
261 if res:
262 group, auth = res
262 group, auth = res
263
263
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
264 ui.debug(b"using auth.%s.* for authentication\n" % group)
265
265
266 token = auth.get(b'phabtoken')
266 token = auth.get(b'phabtoken')
267
267
268 if not token:
268 if not token:
269 raise error.Abort(
269 raise error.Abort(
270 _(b'Can\'t find conduit token associated to %s') % (url,)
270 _(b'Can\'t find conduit token associated to %s') % (url,)
271 )
271 )
272
272
273 return url, token
273 return url, token
274
274
275
275
276 def callconduit(ui, name, params):
276 def callconduit(ui, name, params):
277 """call Conduit API, params is a dict. return json.loads result, or None"""
277 """call Conduit API, params is a dict. return json.loads result, or None"""
278 host, token = readurltoken(ui)
278 host, token = readurltoken(ui)
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
279 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
280 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
281 params = params.copy()
281 params = params.copy()
282 params[b'__conduit__'] = {
282 params[b'__conduit__'] = {
283 b'token': token,
283 b'token': token,
284 }
284 }
285 rawdata = {
285 rawdata = {
286 b'params': templatefilters.json(params),
286 b'params': templatefilters.json(params),
287 b'output': b'json',
287 b'output': b'json',
288 b'__conduit__': 1,
288 b'__conduit__': 1,
289 }
289 }
290 data = urlencodenested(rawdata)
290 data = urlencodenested(rawdata)
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
291 curlcmd = ui.config(b'phabricator', b'curlcmd')
292 if curlcmd:
292 if curlcmd:
293 sin, sout = procutil.popen2(
293 sin, sout = procutil.popen2(
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
294 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
295 )
295 )
296 sin.write(data)
296 sin.write(data)
297 sin.close()
297 sin.close()
298 body = sout.read()
298 body = sout.read()
299 else:
299 else:
300 urlopener = urlmod.opener(ui, authinfo)
300 urlopener = urlmod.opener(ui, authinfo)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
301 request = util.urlreq.request(pycompat.strurl(url), data=data)
302 with contextlib.closing(urlopener.open(request)) as rsp:
302 with contextlib.closing(urlopener.open(request)) as rsp:
303 body = rsp.read()
303 body = rsp.read()
304 ui.debug(b'Conduit Response: %s\n' % body)
304 ui.debug(b'Conduit Response: %s\n' % body)
305 parsed = pycompat.rapply(
305 parsed = pycompat.rapply(
306 lambda x: encoding.unitolocal(x)
306 lambda x: encoding.unitolocal(x)
307 if isinstance(x, pycompat.unicode)
307 if isinstance(x, pycompat.unicode)
308 else x,
308 else x,
309 # json.loads only accepts bytes from py3.6+
309 # json.loads only accepts bytes from py3.6+
310 pycompat.json_loads(encoding.unifromlocal(body)),
310 pycompat.json_loads(encoding.unifromlocal(body)),
311 )
311 )
312 if parsed.get(b'error_code'):
312 if parsed.get(b'error_code'):
313 msg = _(b'Conduit Error (%s): %s') % (
313 msg = _(b'Conduit Error (%s): %s') % (
314 parsed[b'error_code'],
314 parsed[b'error_code'],
315 parsed[b'error_info'],
315 parsed[b'error_info'],
316 )
316 )
317 raise error.Abort(msg)
317 raise error.Abort(msg)
318 return parsed[b'result']
318 return parsed[b'result']
319
319
320
320
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
321 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
322 def debugcallconduit(ui, repo, name):
322 def debugcallconduit(ui, repo, name):
323 """call Conduit API
323 """call Conduit API
324
324
325 Call parameters are read from stdin as a JSON blob. Result will be written
325 Call parameters are read from stdin as a JSON blob. Result will be written
326 to stdout as a JSON blob.
326 to stdout as a JSON blob.
327 """
327 """
328 # json.loads only accepts bytes from 3.6+
328 # json.loads only accepts bytes from 3.6+
329 rawparams = encoding.unifromlocal(ui.fin.read())
329 rawparams = encoding.unifromlocal(ui.fin.read())
330 # json.loads only returns unicode strings
330 # json.loads only returns unicode strings
331 params = pycompat.rapply(
331 params = pycompat.rapply(
332 lambda x: encoding.unitolocal(x)
332 lambda x: encoding.unitolocal(x)
333 if isinstance(x, pycompat.unicode)
333 if isinstance(x, pycompat.unicode)
334 else x,
334 else x,
335 pycompat.json_loads(rawparams),
335 pycompat.json_loads(rawparams),
336 )
336 )
337 # json.dumps only accepts unicode strings
337 # json.dumps only accepts unicode strings
338 result = pycompat.rapply(
338 result = pycompat.rapply(
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
339 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
340 callconduit(ui, name, params),
340 callconduit(ui, name, params),
341 )
341 )
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
342 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
343 ui.write(b'%s\n' % encoding.unitolocal(s))
344
344
345
345
346 def getrepophid(repo):
346 def getrepophid(repo):
347 """given callsign, return repository PHID or None"""
347 """given callsign, return repository PHID or None"""
348 # developer config: phabricator.repophid
348 # developer config: phabricator.repophid
349 repophid = repo.ui.config(b'phabricator', b'repophid')
349 repophid = repo.ui.config(b'phabricator', b'repophid')
350 if repophid:
350 if repophid:
351 return repophid
351 return repophid
352 callsign = repo.ui.config(b'phabricator', b'callsign')
352 callsign = repo.ui.config(b'phabricator', b'callsign')
353 if not callsign:
353 if not callsign:
354 return None
354 return None
355 query = callconduit(
355 query = callconduit(
356 repo.ui,
356 repo.ui,
357 b'diffusion.repository.search',
357 b'diffusion.repository.search',
358 {b'constraints': {b'callsigns': [callsign]}},
358 {b'constraints': {b'callsigns': [callsign]}},
359 )
359 )
360 if len(query[b'data']) == 0:
360 if len(query[b'data']) == 0:
361 return None
361 return None
362 repophid = query[b'data'][0][b'phid']
362 repophid = query[b'data'][0][b'phid']
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
363 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
364 return repophid
364 return repophid
365
365
366
366
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
367 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
368 _differentialrevisiondescre = re.compile(
368 _differentialrevisiondescre = re.compile(
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
369 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
370 )
370 )
371
371
372
372
373 def getoldnodedrevmap(repo, nodelist):
373 def getoldnodedrevmap(repo, nodelist):
374 """find previous nodes that has been sent to Phabricator
374 """find previous nodes that has been sent to Phabricator
375
375
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
376 return {node: (oldnode, Differential diff, Differential Revision ID)}
377 for node in nodelist with known previous sent versions, or associated
377 for node in nodelist with known previous sent versions, or associated
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
378 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
379 be ``None``.
379 be ``None``.
380
380
381 Examines commit messages like "Differential Revision:" to get the
381 Examines commit messages like "Differential Revision:" to get the
382 association information.
382 association information.
383
383
384 If such commit message line is not found, examines all precursors and their
384 If such commit message line is not found, examines all precursors and their
385 tags. Tags with format like "D1234" are considered a match and the node
385 tags. Tags with format like "D1234" are considered a match and the node
386 with that tag, and the number after "D" (ex. 1234) will be returned.
386 with that tag, and the number after "D" (ex. 1234) will be returned.
387
387
388 The ``old node``, if not None, is guaranteed to be the last diff of
388 The ``old node``, if not None, is guaranteed to be the last diff of
389 corresponding Differential Revision, and exist in the repo.
389 corresponding Differential Revision, and exist in the repo.
390 """
390 """
391 unfi = repo.unfiltered()
391 unfi = repo.unfiltered()
392 nodemap = unfi.changelog.nodemap
392 nodemap = unfi.changelog.nodemap
393
393
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
394 result = {} # {node: (oldnode?, lastdiff?, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
395 toconfirm = {} # {node: (force, {precnode}, drev)}
396 for node in nodelist:
396 for node in nodelist:
397 ctx = unfi[node]
397 ctx = unfi[node]
398 # For tags like "D123", put them into "toconfirm" to verify later
398 # For tags like "D123", put them into "toconfirm" to verify later
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
399 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
400 for n in precnodes:
400 for n in precnodes:
401 if n in nodemap:
401 if n in nodemap:
402 for tag in unfi.nodetags(n):
402 for tag in unfi.nodetags(n):
403 m = _differentialrevisiontagre.match(tag)
403 m = _differentialrevisiontagre.match(tag)
404 if m:
404 if m:
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
405 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
406 continue
406 continue
407
407
408 # Check commit message
408 # Check commit message
409 m = _differentialrevisiondescre.search(ctx.description())
409 m = _differentialrevisiondescre.search(ctx.description())
410 if m:
410 if m:
411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
411 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
412
412
413 # Double check if tags are genuine by collecting all old nodes from
413 # Double check if tags are genuine by collecting all old nodes from
414 # Phabricator, and expect precursors overlap with it.
414 # Phabricator, and expect precursors overlap with it.
415 if toconfirm:
415 if toconfirm:
416 drevs = [drev for force, precs, drev in toconfirm.values()]
416 drevs = [drev for force, precs, drev in toconfirm.values()]
417 alldiffs = callconduit(
417 alldiffs = callconduit(
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
418 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
419 )
419 )
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
420 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
421 for newnode, (force, precset, drev) in toconfirm.items():
421 for newnode, (force, precset, drev) in toconfirm.items():
422 diffs = [
422 diffs = [
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
423 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
424 ]
424 ]
425
425
426 # "precursors" as known by Phabricator
426 # "precursors" as known by Phabricator
427 phprecset = set(getnode(d) for d in diffs)
427 phprecset = set(getnode(d) for d in diffs)
428
428
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
429 # Ignore if precursors (Phabricator and local repo) do not overlap,
430 # and force is not set (when commit message says nothing)
430 # and force is not set (when commit message says nothing)
431 if not force and not bool(phprecset & precset):
431 if not force and not bool(phprecset & precset):
432 tagname = b'D%d' % drev
432 tagname = b'D%d' % drev
433 tags.tag(
433 tags.tag(
434 repo,
434 repo,
435 tagname,
435 tagname,
436 nullid,
436 nullid,
437 message=None,
437 message=None,
438 user=None,
438 user=None,
439 date=None,
439 date=None,
440 local=True,
440 local=True,
441 )
441 )
442 unfi.ui.warn(
442 unfi.ui.warn(
443 _(
443 _(
444 b'D%d: local tag removed - does not match '
444 b'D%d: local tag removed - does not match '
445 b'Differential history\n'
445 b'Differential history\n'
446 )
446 )
447 % drev
447 % drev
448 )
448 )
449 continue
449 continue
450
450
451 # Find the last node using Phabricator metadata, and make sure it
451 # Find the last node using Phabricator metadata, and make sure it
452 # exists in the repo
452 # exists in the repo
453 oldnode = lastdiff = None
453 oldnode = lastdiff = None
454 if diffs:
454 if diffs:
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
455 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
456 oldnode = getnode(lastdiff)
456 oldnode = getnode(lastdiff)
457 if oldnode and oldnode not in nodemap:
457 if oldnode and oldnode not in nodemap:
458 oldnode = None
458 oldnode = None
459
459
460 result[newnode] = (oldnode, lastdiff, drev)
460 result[newnode] = (oldnode, lastdiff, drev)
461
461
462 return result
462 return result
463
463
464
464
465 def getdiff(ctx, diffopts):
465 def getdiff(ctx, diffopts):
466 """plain-text diff without header (user, commit message, etc)"""
466 """plain-text diff without header (user, commit message, etc)"""
467 output = util.stringio()
467 output = util.stringio()
468 for chunk, _label in patch.diffui(
468 for chunk, _label in patch.diffui(
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
469 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
470 ):
470 ):
471 output.write(chunk)
471 output.write(chunk)
472 return output.getvalue()
472 return output.getvalue()
473
473
474
474
475 class DiffChangeType(object):
475 class DiffChangeType(object):
476 ADD = 1
476 ADD = 1
477 CHANGE = 2
477 CHANGE = 2
478 DELETE = 3
478 DELETE = 3
479 MOVE_AWAY = 4
479 MOVE_AWAY = 4
480 COPY_AWAY = 5
480 COPY_AWAY = 5
481 MOVE_HERE = 6
481 MOVE_HERE = 6
482 COPY_HERE = 7
482 COPY_HERE = 7
483 MULTICOPY = 8
483 MULTICOPY = 8
484
484
485
485
486 class DiffFileType(object):
486 class DiffFileType(object):
487 TEXT = 1
487 TEXT = 1
488 IMAGE = 2
488 IMAGE = 2
489 BINARY = 3
489 BINARY = 3
490
490
491
491
492 @attr.s
492 @attr.s
493 class phabhunk(dict):
493 class phabhunk(dict):
494 """Represents a Differential hunk, which is owned by a Differential change
494 """Represents a Differential hunk, which is owned by a Differential change
495 """
495 """
496
496
497 oldOffset = attr.ib(default=0) # camelcase-required
497 oldOffset = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
498 oldLength = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
499 newOffset = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
500 newLength = attr.ib(default=0) # camelcase-required
501 corpus = attr.ib(default='')
501 corpus = attr.ib(default='')
502 # These get added to the phabchange's equivalents
502 # These get added to the phabchange's equivalents
503 addLines = attr.ib(default=0) # camelcase-required
503 addLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
504 delLines = attr.ib(default=0) # camelcase-required
505
505
506
506
507 @attr.s
507 @attr.s
508 class phabchange(object):
508 class phabchange(object):
509 """Represents a Differential change, owns Differential hunks and owned by a
509 """Represents a Differential change, owns Differential hunks and owned by a
510 Differential diff. Each one represents one file in a diff.
510 Differential diff. Each one represents one file in a diff.
511 """
511 """
512
512
513 currentPath = attr.ib(default=None) # camelcase-required
513 currentPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
514 oldPath = attr.ib(default=None) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
515 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
516 metadata = attr.ib(default=attr.Factory(dict))
516 metadata = attr.ib(default=attr.Factory(dict))
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
517 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
518 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
519 type = attr.ib(default=DiffChangeType.CHANGE)
519 type = attr.ib(default=DiffChangeType.CHANGE)
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
520 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
521 commitHash = attr.ib(default=None) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
522 addLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
523 delLines = attr.ib(default=0) # camelcase-required
524 hunks = attr.ib(default=attr.Factory(list))
524 hunks = attr.ib(default=attr.Factory(list))
525
525
526 def copynewmetadatatoold(self):
526 def copynewmetadatatoold(self):
527 for key in list(self.metadata.keys()):
527 for key in list(self.metadata.keys()):
528 newkey = key.replace(b'new:', b'old:')
528 newkey = key.replace(b'new:', b'old:')
529 self.metadata[newkey] = self.metadata[key]
529 self.metadata[newkey] = self.metadata[key]
530
530
531 def addoldmode(self, value):
531 def addoldmode(self, value):
532 self.oldProperties[b'unix:filemode'] = value
532 self.oldProperties[b'unix:filemode'] = value
533
533
534 def addnewmode(self, value):
534 def addnewmode(self, value):
535 self.newProperties[b'unix:filemode'] = value
535 self.newProperties[b'unix:filemode'] = value
536
536
537 def addhunk(self, hunk):
537 def addhunk(self, hunk):
538 if not isinstance(hunk, phabhunk):
538 if not isinstance(hunk, phabhunk):
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
539 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
540 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
541 # It's useful to include these stats since the Phab web UI shows them,
541 # It's useful to include these stats since the Phab web UI shows them,
542 # and uses them to estimate how large a change a Revision is. Also used
542 # and uses them to estimate how large a change a Revision is. Also used
543 # in email subjects for the [+++--] bit.
543 # in email subjects for the [+++--] bit.
544 self.addLines += hunk.addLines
544 self.addLines += hunk.addLines
545 self.delLines += hunk.delLines
545 self.delLines += hunk.delLines
546
546
547
547
548 @attr.s
548 @attr.s
549 class phabdiff(object):
549 class phabdiff(object):
550 """Represents a Differential diff, owns Differential changes. Corresponds
550 """Represents a Differential diff, owns Differential changes. Corresponds
551 to a commit.
551 to a commit.
552 """
552 """
553
553
554 # Doesn't seem to be any reason to send this (output of uname -n)
554 # Doesn't seem to be any reason to send this (output of uname -n)
555 sourceMachine = attr.ib(default=b'') # camelcase-required
555 sourceMachine = attr.ib(default=b'') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
556 sourcePath = attr.ib(default=b'/') # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
557 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
558 sourceControlPath = attr.ib(default=b'/') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
559 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
560 branch = attr.ib(default=b'default')
560 branch = attr.ib(default=b'default')
561 bookmark = attr.ib(default=None)
561 bookmark = attr.ib(default=None)
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
562 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
563 lintStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
564 unitStatus = attr.ib(default=b'none') # camelcase-required
565 changes = attr.ib(default=attr.Factory(dict))
565 changes = attr.ib(default=attr.Factory(dict))
566 repositoryPHID = attr.ib(default=None) # camelcase-required
566 repositoryPHID = attr.ib(default=None) # camelcase-required
567
567
568 def addchange(self, change):
568 def addchange(self, change):
569 if not isinstance(change, phabchange):
569 if not isinstance(change, phabchange):
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
570 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
571 self.changes[change.currentPath] = pycompat.byteskwargs(
571 self.changes[change.currentPath] = pycompat.byteskwargs(
572 attr.asdict(change)
572 attr.asdict(change)
573 )
573 )
574
574
575
575
576 def maketext(pchange, ctx, fname):
576 def maketext(pchange, ctx, fname):
577 """populate the phabchange for a text file"""
577 """populate the phabchange for a text file"""
578 repo = ctx.repo()
578 repo = ctx.repo()
579 fmatcher = match.exact([fname])
579 fmatcher = match.exact([fname])
580 diffopts = mdiff.diffopts(git=True, context=32767)
580 diffopts = mdiff.diffopts(git=True, context=32767)
581 _pfctx, _fctx, header, fhunks = next(
581 _pfctx, _fctx, header, fhunks = next(
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
582 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
583 )
583 )
584
584
585 for fhunk in fhunks:
585 for fhunk in fhunks:
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
586 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
587 corpus = b''.join(lines[1:])
587 corpus = b''.join(lines[1:])
588 shunk = list(header)
588 shunk = list(header)
589 shunk.extend(lines)
589 shunk.extend(lines)
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
590 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
591 patch.diffstatdata(util.iterlines(shunk))
591 patch.diffstatdata(util.iterlines(shunk))
592 )
592 )
593 pchange.addhunk(
593 pchange.addhunk(
594 phabhunk(
594 phabhunk(
595 oldOffset,
595 oldOffset,
596 oldLength,
596 oldLength,
597 newOffset,
597 newOffset,
598 newLength,
598 newLength,
599 corpus,
599 corpus,
600 addLines,
600 addLines,
601 delLines,
601 delLines,
602 )
602 )
603 )
603 )
604
604
605
605
606 def uploadchunks(fctx, fphid):
606 def uploadchunks(fctx, fphid):
607 """upload large binary files as separate chunks.
607 """upload large binary files as separate chunks.
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
608 Phab requests chunking over 8MiB, and splits into 4MiB chunks
609 """
609 """
610 ui = fctx.repo().ui
610 ui = fctx.repo().ui
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
611 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
612 progress = ui.makeprogress(
612 with ui.makeprogress(
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
613 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
614 )
614 ) as progress:
615 for chunk in chunks:
615 for chunk in chunks:
616 progress.increment()
616 progress.increment()
617 if chunk[b'complete']:
617 if chunk[b'complete']:
618 continue
618 continue
619 bstart = int(chunk[b'byteStart'])
619 bstart = int(chunk[b'byteStart'])
620 bend = int(chunk[b'byteEnd'])
620 bend = int(chunk[b'byteEnd'])
621 callconduit(
621 callconduit(
622 ui,
622 ui,
623 b'file.uploadchunk',
623 b'file.uploadchunk',
624 {
624 {
625 b'filePHID': fphid,
625 b'filePHID': fphid,
626 b'byteStart': bstart,
626 b'byteStart': bstart,
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
627 b'data': base64.b64encode(fctx.data()[bstart:bend]),
628 b'dataEncoding': b'base64',
628 b'dataEncoding': b'base64',
629 },
629 },
630 )
630 )
631 progress.complete()
632
631
633
632
634 def uploadfile(fctx):
633 def uploadfile(fctx):
635 """upload binary files to Phabricator"""
634 """upload binary files to Phabricator"""
636 repo = fctx.repo()
635 repo = fctx.repo()
637 ui = repo.ui
636 ui = repo.ui
638 fname = fctx.path()
637 fname = fctx.path()
639 size = fctx.size()
638 size = fctx.size()
640 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
639 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
641
640
642 # an allocate call is required first to see if an upload is even required
641 # an allocate call is required first to see if an upload is even required
643 # (Phab might already have it) and to determine if chunking is needed
642 # (Phab might already have it) and to determine if chunking is needed
644 allocateparams = {
643 allocateparams = {
645 b'name': fname,
644 b'name': fname,
646 b'contentLength': size,
645 b'contentLength': size,
647 b'contentHash': fhash,
646 b'contentHash': fhash,
648 }
647 }
649 filealloc = callconduit(ui, b'file.allocate', allocateparams)
648 filealloc = callconduit(ui, b'file.allocate', allocateparams)
650 fphid = filealloc[b'filePHID']
649 fphid = filealloc[b'filePHID']
651
650
652 if filealloc[b'upload']:
651 if filealloc[b'upload']:
653 ui.write(_(b'uploading %s\n') % bytes(fctx))
652 ui.write(_(b'uploading %s\n') % bytes(fctx))
654 if not fphid:
653 if not fphid:
655 uploadparams = {
654 uploadparams = {
656 b'name': fname,
655 b'name': fname,
657 b'data_base64': base64.b64encode(fctx.data()),
656 b'data_base64': base64.b64encode(fctx.data()),
658 }
657 }
659 fphid = callconduit(ui, b'file.upload', uploadparams)
658 fphid = callconduit(ui, b'file.upload', uploadparams)
660 else:
659 else:
661 uploadchunks(fctx, fphid)
660 uploadchunks(fctx, fphid)
662 else:
661 else:
663 ui.debug(b'server already has %s\n' % bytes(fctx))
662 ui.debug(b'server already has %s\n' % bytes(fctx))
664
663
665 if not fphid:
664 if not fphid:
666 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
665 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
667
666
668 return fphid
667 return fphid
669
668
670
669
671 def addoldbinary(pchange, fctx, originalfname):
670 def addoldbinary(pchange, fctx, originalfname):
672 """add the metadata for the previous version of a binary file to the
671 """add the metadata for the previous version of a binary file to the
673 phabchange for the new version
672 phabchange for the new version
674 """
673 """
675 oldfctx = fctx.p1()[originalfname]
674 oldfctx = fctx.p1()[originalfname]
676 if fctx.cmp(oldfctx):
675 if fctx.cmp(oldfctx):
677 # Files differ, add the old one
676 # Files differ, add the old one
678 pchange.metadata[b'old:file:size'] = oldfctx.size()
677 pchange.metadata[b'old:file:size'] = oldfctx.size()
679 mimeguess, _enc = mimetypes.guess_type(
678 mimeguess, _enc = mimetypes.guess_type(
680 encoding.unifromlocal(oldfctx.path())
679 encoding.unifromlocal(oldfctx.path())
681 )
680 )
682 if mimeguess:
681 if mimeguess:
683 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
682 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
684 mimeguess
683 mimeguess
685 )
684 )
686 fphid = uploadfile(oldfctx)
685 fphid = uploadfile(oldfctx)
687 pchange.metadata[b'old:binary-phid'] = fphid
686 pchange.metadata[b'old:binary-phid'] = fphid
688 else:
687 else:
689 # If it's left as IMAGE/BINARY web UI might try to display it
688 # If it's left as IMAGE/BINARY web UI might try to display it
690 pchange.fileType = DiffFileType.TEXT
689 pchange.fileType = DiffFileType.TEXT
691 pchange.copynewmetadatatoold()
690 pchange.copynewmetadatatoold()
692
691
693
692
694 def makebinary(pchange, fctx):
693 def makebinary(pchange, fctx):
695 """populate the phabchange for a binary file"""
694 """populate the phabchange for a binary file"""
696 pchange.fileType = DiffFileType.BINARY
695 pchange.fileType = DiffFileType.BINARY
697 fphid = uploadfile(fctx)
696 fphid = uploadfile(fctx)
698 pchange.metadata[b'new:binary-phid'] = fphid
697 pchange.metadata[b'new:binary-phid'] = fphid
699 pchange.metadata[b'new:file:size'] = fctx.size()
698 pchange.metadata[b'new:file:size'] = fctx.size()
700 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
699 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
701 if mimeguess:
700 if mimeguess:
702 mimeguess = pycompat.bytestr(mimeguess)
701 mimeguess = pycompat.bytestr(mimeguess)
703 pchange.metadata[b'new:file:mime-type'] = mimeguess
702 pchange.metadata[b'new:file:mime-type'] = mimeguess
704 if mimeguess.startswith(b'image/'):
703 if mimeguess.startswith(b'image/'):
705 pchange.fileType = DiffFileType.IMAGE
704 pchange.fileType = DiffFileType.IMAGE
706
705
707
706
708 # Copied from mercurial/patch.py
707 # Copied from mercurial/patch.py
709 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
708 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
710
709
711
710
712 def notutf8(fctx):
711 def notutf8(fctx):
713 """detect non-UTF-8 text files since Phabricator requires them to be marked
712 """detect non-UTF-8 text files since Phabricator requires them to be marked
714 as binary
713 as binary
715 """
714 """
716 try:
715 try:
717 fctx.data().decode('utf-8')
716 fctx.data().decode('utf-8')
718 if fctx.parents():
717 if fctx.parents():
719 fctx.p1().data().decode('utf-8')
718 fctx.p1().data().decode('utf-8')
720 return False
719 return False
721 except UnicodeDecodeError:
720 except UnicodeDecodeError:
722 fctx.repo().ui.write(
721 fctx.repo().ui.write(
723 _(b'file %s detected as non-UTF-8, marked as binary\n')
722 _(b'file %s detected as non-UTF-8, marked as binary\n')
724 % fctx.path()
723 % fctx.path()
725 )
724 )
726 return True
725 return True
727
726
728
727
729 def addremoved(pdiff, ctx, removed):
728 def addremoved(pdiff, ctx, removed):
730 """add removed files to the phabdiff. Shouldn't include moves"""
729 """add removed files to the phabdiff. Shouldn't include moves"""
731 for fname in removed:
730 for fname in removed:
732 pchange = phabchange(
731 pchange = phabchange(
733 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
732 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
734 )
733 )
735 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
734 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
736 fctx = ctx.p1()[fname]
735 fctx = ctx.p1()[fname]
737 if not (fctx.isbinary() or notutf8(fctx)):
736 if not (fctx.isbinary() or notutf8(fctx)):
738 maketext(pchange, ctx, fname)
737 maketext(pchange, ctx, fname)
739
738
740 pdiff.addchange(pchange)
739 pdiff.addchange(pchange)
741
740
742
741
743 def addmodified(pdiff, ctx, modified):
742 def addmodified(pdiff, ctx, modified):
744 """add modified files to the phabdiff"""
743 """add modified files to the phabdiff"""
745 for fname in modified:
744 for fname in modified:
746 fctx = ctx[fname]
745 fctx = ctx[fname]
747 pchange = phabchange(currentPath=fname, oldPath=fname)
746 pchange = phabchange(currentPath=fname, oldPath=fname)
748 filemode = gitmode[ctx[fname].flags()]
747 filemode = gitmode[ctx[fname].flags()]
749 originalmode = gitmode[ctx.p1()[fname].flags()]
748 originalmode = gitmode[ctx.p1()[fname].flags()]
750 if filemode != originalmode:
749 if filemode != originalmode:
751 pchange.addoldmode(originalmode)
750 pchange.addoldmode(originalmode)
752 pchange.addnewmode(filemode)
751 pchange.addnewmode(filemode)
753
752
754 if fctx.isbinary() or notutf8(fctx):
753 if fctx.isbinary() or notutf8(fctx):
755 makebinary(pchange, fctx)
754 makebinary(pchange, fctx)
756 addoldbinary(pchange, fctx, fname)
755 addoldbinary(pchange, fctx, fname)
757 else:
756 else:
758 maketext(pchange, ctx, fname)
757 maketext(pchange, ctx, fname)
759
758
760 pdiff.addchange(pchange)
759 pdiff.addchange(pchange)
761
760
762
761
763 def addadded(pdiff, ctx, added, removed):
762 def addadded(pdiff, ctx, added, removed):
764 """add file adds to the phabdiff, both new files and copies/moves"""
763 """add file adds to the phabdiff, both new files and copies/moves"""
765 # Keep track of files that've been recorded as moved/copied, so if there are
764 # Keep track of files that've been recorded as moved/copied, so if there are
766 # additional copies we can mark them (moves get removed from removed)
765 # additional copies we can mark them (moves get removed from removed)
767 copiedchanges = {}
766 copiedchanges = {}
768 movedchanges = {}
767 movedchanges = {}
769 for fname in added:
768 for fname in added:
770 fctx = ctx[fname]
769 fctx = ctx[fname]
771 pchange = phabchange(currentPath=fname)
770 pchange = phabchange(currentPath=fname)
772
771
773 filemode = gitmode[ctx[fname].flags()]
772 filemode = gitmode[ctx[fname].flags()]
774 renamed = fctx.renamed()
773 renamed = fctx.renamed()
775
774
776 if renamed:
775 if renamed:
777 originalfname = renamed[0]
776 originalfname = renamed[0]
778 originalmode = gitmode[ctx.p1()[originalfname].flags()]
777 originalmode = gitmode[ctx.p1()[originalfname].flags()]
779 pchange.oldPath = originalfname
778 pchange.oldPath = originalfname
780
779
781 if originalfname in removed:
780 if originalfname in removed:
782 origpchange = phabchange(
781 origpchange = phabchange(
783 currentPath=originalfname,
782 currentPath=originalfname,
784 oldPath=originalfname,
783 oldPath=originalfname,
785 type=DiffChangeType.MOVE_AWAY,
784 type=DiffChangeType.MOVE_AWAY,
786 awayPaths=[fname],
785 awayPaths=[fname],
787 )
786 )
788 movedchanges[originalfname] = origpchange
787 movedchanges[originalfname] = origpchange
789 removed.remove(originalfname)
788 removed.remove(originalfname)
790 pchange.type = DiffChangeType.MOVE_HERE
789 pchange.type = DiffChangeType.MOVE_HERE
791 elif originalfname in movedchanges:
790 elif originalfname in movedchanges:
792 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
791 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
793 movedchanges[originalfname].awayPaths.append(fname)
792 movedchanges[originalfname].awayPaths.append(fname)
794 pchange.type = DiffChangeType.COPY_HERE
793 pchange.type = DiffChangeType.COPY_HERE
795 else: # pure copy
794 else: # pure copy
796 if originalfname not in copiedchanges:
795 if originalfname not in copiedchanges:
797 origpchange = phabchange(
796 origpchange = phabchange(
798 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
797 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
799 )
798 )
800 copiedchanges[originalfname] = origpchange
799 copiedchanges[originalfname] = origpchange
801 else:
800 else:
802 origpchange = copiedchanges[originalfname]
801 origpchange = copiedchanges[originalfname]
803 origpchange.awayPaths.append(fname)
802 origpchange.awayPaths.append(fname)
804 pchange.type = DiffChangeType.COPY_HERE
803 pchange.type = DiffChangeType.COPY_HERE
805
804
806 if filemode != originalmode:
805 if filemode != originalmode:
807 pchange.addoldmode(originalmode)
806 pchange.addoldmode(originalmode)
808 pchange.addnewmode(filemode)
807 pchange.addnewmode(filemode)
809 else: # Brand-new file
808 else: # Brand-new file
810 pchange.addnewmode(gitmode[fctx.flags()])
809 pchange.addnewmode(gitmode[fctx.flags()])
811 pchange.type = DiffChangeType.ADD
810 pchange.type = DiffChangeType.ADD
812
811
813 if fctx.isbinary() or notutf8(fctx):
812 if fctx.isbinary() or notutf8(fctx):
814 makebinary(pchange, fctx)
813 makebinary(pchange, fctx)
815 if renamed:
814 if renamed:
816 addoldbinary(pchange, fctx, originalfname)
815 addoldbinary(pchange, fctx, originalfname)
817 else:
816 else:
818 maketext(pchange, ctx, fname)
817 maketext(pchange, ctx, fname)
819
818
820 pdiff.addchange(pchange)
819 pdiff.addchange(pchange)
821
820
822 for _path, copiedchange in copiedchanges.items():
821 for _path, copiedchange in copiedchanges.items():
823 pdiff.addchange(copiedchange)
822 pdiff.addchange(copiedchange)
824 for _path, movedchange in movedchanges.items():
823 for _path, movedchange in movedchanges.items():
825 pdiff.addchange(movedchange)
824 pdiff.addchange(movedchange)
826
825
827
826
828 def creatediff(ctx):
827 def creatediff(ctx):
829 """create a Differential Diff"""
828 """create a Differential Diff"""
830 repo = ctx.repo()
829 repo = ctx.repo()
831 repophid = getrepophid(repo)
830 repophid = getrepophid(repo)
832 # Create a "Differential Diff" via "differential.creatediff" API
831 # Create a "Differential Diff" via "differential.creatediff" API
833 pdiff = phabdiff(
832 pdiff = phabdiff(
834 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
833 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
835 branch=b'%s' % ctx.branch(),
834 branch=b'%s' % ctx.branch(),
836 )
835 )
837 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
836 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
838 # addadded will remove moved files from removed, so addremoved won't get
837 # addadded will remove moved files from removed, so addremoved won't get
839 # them
838 # them
840 addadded(pdiff, ctx, added, removed)
839 addadded(pdiff, ctx, added, removed)
841 addmodified(pdiff, ctx, modified)
840 addmodified(pdiff, ctx, modified)
842 addremoved(pdiff, ctx, removed)
841 addremoved(pdiff, ctx, removed)
843 if repophid:
842 if repophid:
844 pdiff.repositoryPHID = repophid
843 pdiff.repositoryPHID = repophid
845 diff = callconduit(
844 diff = callconduit(
846 repo.ui,
845 repo.ui,
847 b'differential.creatediff',
846 b'differential.creatediff',
848 pycompat.byteskwargs(attr.asdict(pdiff)),
847 pycompat.byteskwargs(attr.asdict(pdiff)),
849 )
848 )
850 if not diff:
849 if not diff:
851 raise error.Abort(_(b'cannot create diff for %s') % ctx)
850 raise error.Abort(_(b'cannot create diff for %s') % ctx)
852 return diff
851 return diff
853
852
854
853
855 def writediffproperties(ctx, diff):
854 def writediffproperties(ctx, diff):
856 """write metadata to diff so patches could be applied losslessly"""
855 """write metadata to diff so patches could be applied losslessly"""
857 # creatediff returns with a diffid but query returns with an id
856 # creatediff returns with a diffid but query returns with an id
858 diffid = diff.get(b'diffid', diff.get(b'id'))
857 diffid = diff.get(b'diffid', diff.get(b'id'))
859 params = {
858 params = {
860 b'diff_id': diffid,
859 b'diff_id': diffid,
861 b'name': b'hg:meta',
860 b'name': b'hg:meta',
862 b'data': templatefilters.json(
861 b'data': templatefilters.json(
863 {
862 {
864 b'user': ctx.user(),
863 b'user': ctx.user(),
865 b'date': b'%d %d' % ctx.date(),
864 b'date': b'%d %d' % ctx.date(),
866 b'branch': ctx.branch(),
865 b'branch': ctx.branch(),
867 b'node': ctx.hex(),
866 b'node': ctx.hex(),
868 b'parent': ctx.p1().hex(),
867 b'parent': ctx.p1().hex(),
869 }
868 }
870 ),
869 ),
871 }
870 }
872 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
871 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
873
872
874 params = {
873 params = {
875 b'diff_id': diffid,
874 b'diff_id': diffid,
876 b'name': b'local:commits',
875 b'name': b'local:commits',
877 b'data': templatefilters.json(
876 b'data': templatefilters.json(
878 {
877 {
879 ctx.hex(): {
878 ctx.hex(): {
880 b'author': stringutil.person(ctx.user()),
879 b'author': stringutil.person(ctx.user()),
881 b'authorEmail': stringutil.email(ctx.user()),
880 b'authorEmail': stringutil.email(ctx.user()),
882 b'time': int(ctx.date()[0]),
881 b'time': int(ctx.date()[0]),
883 b'commit': ctx.hex(),
882 b'commit': ctx.hex(),
884 b'parents': [ctx.p1().hex()],
883 b'parents': [ctx.p1().hex()],
885 b'branch': ctx.branch(),
884 b'branch': ctx.branch(),
886 },
885 },
887 }
886 }
888 ),
887 ),
889 }
888 }
890 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
889 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
891
890
892
891
893 def createdifferentialrevision(
892 def createdifferentialrevision(
894 ctx,
893 ctx,
895 revid=None,
894 revid=None,
896 parentrevphid=None,
895 parentrevphid=None,
897 oldnode=None,
896 oldnode=None,
898 olddiff=None,
897 olddiff=None,
899 actions=None,
898 actions=None,
900 comment=None,
899 comment=None,
901 ):
900 ):
902 """create or update a Differential Revision
901 """create or update a Differential Revision
903
902
904 If revid is None, create a new Differential Revision, otherwise update
903 If revid is None, create a new Differential Revision, otherwise update
905 revid. If parentrevphid is not None, set it as a dependency.
904 revid. If parentrevphid is not None, set it as a dependency.
906
905
907 If oldnode is not None, check if the patch content (without commit message
906 If oldnode is not None, check if the patch content (without commit message
908 and metadata) has changed before creating another diff.
907 and metadata) has changed before creating another diff.
909
908
910 If actions is not None, they will be appended to the transaction.
909 If actions is not None, they will be appended to the transaction.
911 """
910 """
912 repo = ctx.repo()
911 repo = ctx.repo()
913 if oldnode:
912 if oldnode:
914 diffopts = mdiff.diffopts(git=True, context=32767)
913 diffopts = mdiff.diffopts(git=True, context=32767)
915 oldctx = repo.unfiltered()[oldnode]
914 oldctx = repo.unfiltered()[oldnode]
916 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
915 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
917 else:
916 else:
918 neednewdiff = True
917 neednewdiff = True
919
918
920 transactions = []
919 transactions = []
921 if neednewdiff:
920 if neednewdiff:
922 diff = creatediff(ctx)
921 diff = creatediff(ctx)
923 transactions.append({b'type': b'update', b'value': diff[b'phid']})
922 transactions.append({b'type': b'update', b'value': diff[b'phid']})
924 if comment:
923 if comment:
925 transactions.append({b'type': b'comment', b'value': comment})
924 transactions.append({b'type': b'comment', b'value': comment})
926 else:
925 else:
927 # Even if we don't need to upload a new diff because the patch content
926 # Even if we don't need to upload a new diff because the patch content
928 # does not change. We might still need to update its metadata so
927 # does not change. We might still need to update its metadata so
929 # pushers could know the correct node metadata.
928 # pushers could know the correct node metadata.
930 assert olddiff
929 assert olddiff
931 diff = olddiff
930 diff = olddiff
932 writediffproperties(ctx, diff)
931 writediffproperties(ctx, diff)
933
932
934 # Set the parent Revision every time, so commit re-ordering is picked-up
933 # Set the parent Revision every time, so commit re-ordering is picked-up
935 if parentrevphid:
934 if parentrevphid:
936 transactions.append(
935 transactions.append(
937 {b'type': b'parents.set', b'value': [parentrevphid]}
936 {b'type': b'parents.set', b'value': [parentrevphid]}
938 )
937 )
939
938
940 if actions:
939 if actions:
941 transactions += actions
940 transactions += actions
942
941
943 # Parse commit message and update related fields.
942 # Parse commit message and update related fields.
944 desc = ctx.description()
943 desc = ctx.description()
945 info = callconduit(
944 info = callconduit(
946 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
945 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
947 )
946 )
948 for k, v in info[b'fields'].items():
947 for k, v in info[b'fields'].items():
949 if k in [b'title', b'summary', b'testPlan']:
948 if k in [b'title', b'summary', b'testPlan']:
950 transactions.append({b'type': k, b'value': v})
949 transactions.append({b'type': k, b'value': v})
951
950
952 params = {b'transactions': transactions}
951 params = {b'transactions': transactions}
953 if revid is not None:
952 if revid is not None:
954 # Update an existing Differential Revision
953 # Update an existing Differential Revision
955 params[b'objectIdentifier'] = revid
954 params[b'objectIdentifier'] = revid
956
955
957 revision = callconduit(repo.ui, b'differential.revision.edit', params)
956 revision = callconduit(repo.ui, b'differential.revision.edit', params)
958 if not revision:
957 if not revision:
959 raise error.Abort(_(b'cannot create revision for %s') % ctx)
958 raise error.Abort(_(b'cannot create revision for %s') % ctx)
960
959
961 return revision, diff
960 return revision, diff
962
961
963
962
964 def userphids(repo, names):
963 def userphids(repo, names):
965 """convert user names to PHIDs"""
964 """convert user names to PHIDs"""
966 names = [name.lower() for name in names]
965 names = [name.lower() for name in names]
967 query = {b'constraints': {b'usernames': names}}
966 query = {b'constraints': {b'usernames': names}}
968 result = callconduit(repo.ui, b'user.search', query)
967 result = callconduit(repo.ui, b'user.search', query)
969 # username not found is not an error of the API. So check if we have missed
968 # username not found is not an error of the API. So check if we have missed
970 # some names here.
969 # some names here.
971 data = result[b'data']
970 data = result[b'data']
972 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
971 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
973 unresolved = set(names) - resolved
972 unresolved = set(names) - resolved
974 if unresolved:
973 if unresolved:
975 raise error.Abort(
974 raise error.Abort(
976 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
975 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
977 )
976 )
978 return [entry[b'phid'] for entry in data]
977 return [entry[b'phid'] for entry in data]
979
978
980
979
981 @vcrcommand(
980 @vcrcommand(
982 b'phabsend',
981 b'phabsend',
983 [
982 [
984 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
983 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
985 (b'', b'amend', True, _(b'update commit messages')),
984 (b'', b'amend', True, _(b'update commit messages')),
986 (b'', b'reviewer', [], _(b'specify reviewers')),
985 (b'', b'reviewer', [], _(b'specify reviewers')),
987 (b'', b'blocker', [], _(b'specify blocking reviewers')),
986 (b'', b'blocker', [], _(b'specify blocking reviewers')),
988 (
987 (
989 b'm',
988 b'm',
990 b'comment',
989 b'comment',
991 b'',
990 b'',
992 _(b'add a comment to Revisions with new/updated Diffs'),
991 _(b'add a comment to Revisions with new/updated Diffs'),
993 ),
992 ),
994 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
993 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
995 ],
994 ],
996 _(b'REV [OPTIONS]'),
995 _(b'REV [OPTIONS]'),
997 helpcategory=command.CATEGORY_IMPORT_EXPORT,
996 helpcategory=command.CATEGORY_IMPORT_EXPORT,
998 )
997 )
999 def phabsend(ui, repo, *revs, **opts):
998 def phabsend(ui, repo, *revs, **opts):
1000 """upload changesets to Phabricator
999 """upload changesets to Phabricator
1001
1000
1002 If there are multiple revisions specified, they will be send as a stack
1001 If there are multiple revisions specified, they will be send as a stack
1003 with a linear dependencies relationship using the order specified by the
1002 with a linear dependencies relationship using the order specified by the
1004 revset.
1003 revset.
1005
1004
1006 For the first time uploading changesets, local tags will be created to
1005 For the first time uploading changesets, local tags will be created to
1007 maintain the association. After the first time, phabsend will check
1006 maintain the association. After the first time, phabsend will check
1008 obsstore and tags information so it can figure out whether to update an
1007 obsstore and tags information so it can figure out whether to update an
1009 existing Differential Revision, or create a new one.
1008 existing Differential Revision, or create a new one.
1010
1009
1011 If --amend is set, update commit messages so they have the
1010 If --amend is set, update commit messages so they have the
1012 ``Differential Revision`` URL, remove related tags. This is similar to what
1011 ``Differential Revision`` URL, remove related tags. This is similar to what
1013 arcanist will do, and is more desired in author-push workflows. Otherwise,
1012 arcanist will do, and is more desired in author-push workflows. Otherwise,
1014 use local tags to record the ``Differential Revision`` association.
1013 use local tags to record the ``Differential Revision`` association.
1015
1014
1016 The --confirm option lets you confirm changesets before sending them. You
1015 The --confirm option lets you confirm changesets before sending them. You
1017 can also add following to your configuration file to make it default
1016 can also add following to your configuration file to make it default
1018 behaviour::
1017 behaviour::
1019
1018
1020 [phabsend]
1019 [phabsend]
1021 confirm = true
1020 confirm = true
1022
1021
1023 phabsend will check obsstore and the above association to decide whether to
1022 phabsend will check obsstore and the above association to decide whether to
1024 update an existing Differential Revision, or create a new one.
1023 update an existing Differential Revision, or create a new one.
1025 """
1024 """
1026 opts = pycompat.byteskwargs(opts)
1025 opts = pycompat.byteskwargs(opts)
1027 revs = list(revs) + opts.get(b'rev', [])
1026 revs = list(revs) + opts.get(b'rev', [])
1028 revs = scmutil.revrange(repo, revs)
1027 revs = scmutil.revrange(repo, revs)
1029
1028
1030 if not revs:
1029 if not revs:
1031 raise error.Abort(_(b'phabsend requires at least one changeset'))
1030 raise error.Abort(_(b'phabsend requires at least one changeset'))
1032 if opts.get(b'amend'):
1031 if opts.get(b'amend'):
1033 cmdutil.checkunfinished(repo)
1032 cmdutil.checkunfinished(repo)
1034
1033
1035 # {newnode: (oldnode, olddiff, olddrev}
1034 # {newnode: (oldnode, olddiff, olddrev}
1036 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1035 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1037
1036
1038 confirm = ui.configbool(b'phabsend', b'confirm')
1037 confirm = ui.configbool(b'phabsend', b'confirm')
1039 confirm |= bool(opts.get(b'confirm'))
1038 confirm |= bool(opts.get(b'confirm'))
1040 if confirm:
1039 if confirm:
1041 confirmed = _confirmbeforesend(repo, revs, oldmap)
1040 confirmed = _confirmbeforesend(repo, revs, oldmap)
1042 if not confirmed:
1041 if not confirmed:
1043 raise error.Abort(_(b'phabsend cancelled'))
1042 raise error.Abort(_(b'phabsend cancelled'))
1044
1043
1045 actions = []
1044 actions = []
1046 reviewers = opts.get(b'reviewer', [])
1045 reviewers = opts.get(b'reviewer', [])
1047 blockers = opts.get(b'blocker', [])
1046 blockers = opts.get(b'blocker', [])
1048 phids = []
1047 phids = []
1049 if reviewers:
1048 if reviewers:
1050 phids.extend(userphids(repo, reviewers))
1049 phids.extend(userphids(repo, reviewers))
1051 if blockers:
1050 if blockers:
1052 phids.extend(
1051 phids.extend(
1053 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1052 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1054 )
1053 )
1055 if phids:
1054 if phids:
1056 actions.append({b'type': b'reviewers.add', b'value': phids})
1055 actions.append({b'type': b'reviewers.add', b'value': phids})
1057
1056
1058 drevids = [] # [int]
1057 drevids = [] # [int]
1059 diffmap = {} # {newnode: diff}
1058 diffmap = {} # {newnode: diff}
1060
1059
1061 # Send patches one by one so we know their Differential Revision PHIDs and
1060 # Send patches one by one so we know their Differential Revision PHIDs and
1062 # can provide dependency relationship
1061 # can provide dependency relationship
1063 lastrevphid = None
1062 lastrevphid = None
1064 for rev in revs:
1063 for rev in revs:
1065 ui.debug(b'sending rev %d\n' % rev)
1064 ui.debug(b'sending rev %d\n' % rev)
1066 ctx = repo[rev]
1065 ctx = repo[rev]
1067
1066
1068 # Get Differential Revision ID
1067 # Get Differential Revision ID
1069 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1068 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1070 if oldnode != ctx.node() or opts.get(b'amend'):
1069 if oldnode != ctx.node() or opts.get(b'amend'):
1071 # Create or update Differential Revision
1070 # Create or update Differential Revision
1072 revision, diff = createdifferentialrevision(
1071 revision, diff = createdifferentialrevision(
1073 ctx,
1072 ctx,
1074 revid,
1073 revid,
1075 lastrevphid,
1074 lastrevphid,
1076 oldnode,
1075 oldnode,
1077 olddiff,
1076 olddiff,
1078 actions,
1077 actions,
1079 opts.get(b'comment'),
1078 opts.get(b'comment'),
1080 )
1079 )
1081 diffmap[ctx.node()] = diff
1080 diffmap[ctx.node()] = diff
1082 newrevid = int(revision[b'object'][b'id'])
1081 newrevid = int(revision[b'object'][b'id'])
1083 newrevphid = revision[b'object'][b'phid']
1082 newrevphid = revision[b'object'][b'phid']
1084 if revid:
1083 if revid:
1085 action = b'updated'
1084 action = b'updated'
1086 else:
1085 else:
1087 action = b'created'
1086 action = b'created'
1088
1087
1089 # Create a local tag to note the association, if commit message
1088 # Create a local tag to note the association, if commit message
1090 # does not have it already
1089 # does not have it already
1091 m = _differentialrevisiondescre.search(ctx.description())
1090 m = _differentialrevisiondescre.search(ctx.description())
1092 if not m or int(m.group(r'id')) != newrevid:
1091 if not m or int(m.group(r'id')) != newrevid:
1093 tagname = b'D%d' % newrevid
1092 tagname = b'D%d' % newrevid
1094 tags.tag(
1093 tags.tag(
1095 repo,
1094 repo,
1096 tagname,
1095 tagname,
1097 ctx.node(),
1096 ctx.node(),
1098 message=None,
1097 message=None,
1099 user=None,
1098 user=None,
1100 date=None,
1099 date=None,
1101 local=True,
1100 local=True,
1102 )
1101 )
1103 else:
1102 else:
1104 # Nothing changed. But still set "newrevphid" so the next revision
1103 # Nothing changed. But still set "newrevphid" so the next revision
1105 # could depend on this one and "newrevid" for the summary line.
1104 # could depend on this one and "newrevid" for the summary line.
1106 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1105 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1107 newrevid = revid
1106 newrevid = revid
1108 action = b'skipped'
1107 action = b'skipped'
1109
1108
1110 actiondesc = ui.label(
1109 actiondesc = ui.label(
1111 {
1110 {
1112 b'created': _(b'created'),
1111 b'created': _(b'created'),
1113 b'skipped': _(b'skipped'),
1112 b'skipped': _(b'skipped'),
1114 b'updated': _(b'updated'),
1113 b'updated': _(b'updated'),
1115 }[action],
1114 }[action],
1116 b'phabricator.action.%s' % action,
1115 b'phabricator.action.%s' % action,
1117 )
1116 )
1118 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1117 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1119 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1118 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1120 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1119 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1121 ui.write(
1120 ui.write(
1122 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1121 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1123 )
1122 )
1124 drevids.append(newrevid)
1123 drevids.append(newrevid)
1125 lastrevphid = newrevphid
1124 lastrevphid = newrevphid
1126
1125
1127 # Update commit messages and remove tags
1126 # Update commit messages and remove tags
1128 if opts.get(b'amend'):
1127 if opts.get(b'amend'):
1129 unfi = repo.unfiltered()
1128 unfi = repo.unfiltered()
1130 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1129 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1131 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1130 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1132 wnode = unfi[b'.'].node()
1131 wnode = unfi[b'.'].node()
1133 mapping = {} # {oldnode: [newnode]}
1132 mapping = {} # {oldnode: [newnode]}
1134 for i, rev in enumerate(revs):
1133 for i, rev in enumerate(revs):
1135 old = unfi[rev]
1134 old = unfi[rev]
1136 drevid = drevids[i]
1135 drevid = drevids[i]
1137 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1136 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1138 newdesc = getdescfromdrev(drev)
1137 newdesc = getdescfromdrev(drev)
1139 # Make sure commit message contain "Differential Revision"
1138 # Make sure commit message contain "Differential Revision"
1140 if old.description() != newdesc:
1139 if old.description() != newdesc:
1141 if old.phase() == phases.public:
1140 if old.phase() == phases.public:
1142 ui.warn(
1141 ui.warn(
1143 _(b"warning: not updating public commit %s\n")
1142 _(b"warning: not updating public commit %s\n")
1144 % scmutil.formatchangeid(old)
1143 % scmutil.formatchangeid(old)
1145 )
1144 )
1146 continue
1145 continue
1147 parents = [
1146 parents = [
1148 mapping.get(old.p1().node(), (old.p1(),))[0],
1147 mapping.get(old.p1().node(), (old.p1(),))[0],
1149 mapping.get(old.p2().node(), (old.p2(),))[0],
1148 mapping.get(old.p2().node(), (old.p2(),))[0],
1150 ]
1149 ]
1151 new = context.metadataonlyctx(
1150 new = context.metadataonlyctx(
1152 repo,
1151 repo,
1153 old,
1152 old,
1154 parents=parents,
1153 parents=parents,
1155 text=newdesc,
1154 text=newdesc,
1156 user=old.user(),
1155 user=old.user(),
1157 date=old.date(),
1156 date=old.date(),
1158 extra=old.extra(),
1157 extra=old.extra(),
1159 )
1158 )
1160
1159
1161 newnode = new.commit()
1160 newnode = new.commit()
1162
1161
1163 mapping[old.node()] = [newnode]
1162 mapping[old.node()] = [newnode]
1164 # Update diff property
1163 # Update diff property
1165 # If it fails just warn and keep going, otherwise the DREV
1164 # If it fails just warn and keep going, otherwise the DREV
1166 # associations will be lost
1165 # associations will be lost
1167 try:
1166 try:
1168 writediffproperties(unfi[newnode], diffmap[old.node()])
1167 writediffproperties(unfi[newnode], diffmap[old.node()])
1169 except util.urlerr.urlerror:
1168 except util.urlerr.urlerror:
1170 ui.warnnoi18n(
1169 ui.warnnoi18n(
1171 b'Failed to update metadata for D%d\n' % drevid
1170 b'Failed to update metadata for D%d\n' % drevid
1172 )
1171 )
1173 # Remove local tags since it's no longer necessary
1172 # Remove local tags since it's no longer necessary
1174 tagname = b'D%d' % drevid
1173 tagname = b'D%d' % drevid
1175 if tagname in repo.tags():
1174 if tagname in repo.tags():
1176 tags.tag(
1175 tags.tag(
1177 repo,
1176 repo,
1178 tagname,
1177 tagname,
1179 nullid,
1178 nullid,
1180 message=None,
1179 message=None,
1181 user=None,
1180 user=None,
1182 date=None,
1181 date=None,
1183 local=True,
1182 local=True,
1184 )
1183 )
1185 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1184 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1186 if wnode in mapping:
1185 if wnode in mapping:
1187 unfi.setparents(mapping[wnode][0])
1186 unfi.setparents(mapping[wnode][0])
1188
1187
1189
1188
1190 # Map from "hg:meta" keys to header understood by "hg import". The order is
1189 # Map from "hg:meta" keys to header understood by "hg import". The order is
1191 # consistent with "hg export" output.
1190 # consistent with "hg export" output.
1192 _metanamemap = util.sortdict(
1191 _metanamemap = util.sortdict(
1193 [
1192 [
1194 (b'user', b'User'),
1193 (b'user', b'User'),
1195 (b'date', b'Date'),
1194 (b'date', b'Date'),
1196 (b'branch', b'Branch'),
1195 (b'branch', b'Branch'),
1197 (b'node', b'Node ID'),
1196 (b'node', b'Node ID'),
1198 (b'parent', b'Parent '),
1197 (b'parent', b'Parent '),
1199 ]
1198 ]
1200 )
1199 )
1201
1200
1202
1201
1203 def _confirmbeforesend(repo, revs, oldmap):
1202 def _confirmbeforesend(repo, revs, oldmap):
1204 url, token = readurltoken(repo.ui)
1203 url, token = readurltoken(repo.ui)
1205 ui = repo.ui
1204 ui = repo.ui
1206 for rev in revs:
1205 for rev in revs:
1207 ctx = repo[rev]
1206 ctx = repo[rev]
1208 desc = ctx.description().splitlines()[0]
1207 desc = ctx.description().splitlines()[0]
1209 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1208 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1210 if drevid:
1209 if drevid:
1211 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1210 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1212 else:
1211 else:
1213 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1212 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1214
1213
1215 ui.write(
1214 ui.write(
1216 _(b'%s - %s: %s\n')
1215 _(b'%s - %s: %s\n')
1217 % (
1216 % (
1218 drevdesc,
1217 drevdesc,
1219 ui.label(bytes(ctx), b'phabricator.node'),
1218 ui.label(bytes(ctx), b'phabricator.node'),
1220 ui.label(desc, b'phabricator.desc'),
1219 ui.label(desc, b'phabricator.desc'),
1221 )
1220 )
1222 )
1221 )
1223
1222
1224 if ui.promptchoice(
1223 if ui.promptchoice(
1225 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1224 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1226 ):
1225 ):
1227 return False
1226 return False
1228
1227
1229 return True
1228 return True
1230
1229
1231
1230
1232 _knownstatusnames = {
1231 _knownstatusnames = {
1233 b'accepted',
1232 b'accepted',
1234 b'needsreview',
1233 b'needsreview',
1235 b'needsrevision',
1234 b'needsrevision',
1236 b'closed',
1235 b'closed',
1237 b'abandoned',
1236 b'abandoned',
1238 }
1237 }
1239
1238
1240
1239
1241 def _getstatusname(drev):
1240 def _getstatusname(drev):
1242 """get normalized status name from a Differential Revision"""
1241 """get normalized status name from a Differential Revision"""
1243 return drev[b'statusName'].replace(b' ', b'').lower()
1242 return drev[b'statusName'].replace(b' ', b'').lower()
1244
1243
1245
1244
1246 # Small language to specify differential revisions. Support symbols: (), :X,
1245 # Small language to specify differential revisions. Support symbols: (), :X,
1247 # +, and -.
1246 # +, and -.
1248
1247
1249 _elements = {
1248 _elements = {
1250 # token-type: binding-strength, primary, prefix, infix, suffix
1249 # token-type: binding-strength, primary, prefix, infix, suffix
1251 b'(': (12, None, (b'group', 1, b')'), None, None),
1250 b'(': (12, None, (b'group', 1, b')'), None, None),
1252 b':': (8, None, (b'ancestors', 8), None, None),
1251 b':': (8, None, (b'ancestors', 8), None, None),
1253 b'&': (5, None, None, (b'and_', 5), None),
1252 b'&': (5, None, None, (b'and_', 5), None),
1254 b'+': (4, None, None, (b'add', 4), None),
1253 b'+': (4, None, None, (b'add', 4), None),
1255 b'-': (4, None, None, (b'sub', 4), None),
1254 b'-': (4, None, None, (b'sub', 4), None),
1256 b')': (0, None, None, None, None),
1255 b')': (0, None, None, None, None),
1257 b'symbol': (0, b'symbol', None, None, None),
1256 b'symbol': (0, b'symbol', None, None, None),
1258 b'end': (0, None, None, None, None),
1257 b'end': (0, None, None, None, None),
1259 }
1258 }
1260
1259
1261
1260
1262 def _tokenize(text):
1261 def _tokenize(text):
1263 view = memoryview(text) # zero-copy slice
1262 view = memoryview(text) # zero-copy slice
1264 special = b'():+-& '
1263 special = b'():+-& '
1265 pos = 0
1264 pos = 0
1266 length = len(text)
1265 length = len(text)
1267 while pos < length:
1266 while pos < length:
1268 symbol = b''.join(
1267 symbol = b''.join(
1269 itertools.takewhile(
1268 itertools.takewhile(
1270 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1269 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1271 )
1270 )
1272 )
1271 )
1273 if symbol:
1272 if symbol:
1274 yield (b'symbol', symbol, pos)
1273 yield (b'symbol', symbol, pos)
1275 pos += len(symbol)
1274 pos += len(symbol)
1276 else: # special char, ignore space
1275 else: # special char, ignore space
1277 if text[pos : pos + 1] != b' ':
1276 if text[pos : pos + 1] != b' ':
1278 yield (text[pos : pos + 1], None, pos)
1277 yield (text[pos : pos + 1], None, pos)
1279 pos += 1
1278 pos += 1
1280 yield (b'end', None, pos)
1279 yield (b'end', None, pos)
1281
1280
1282
1281
1283 def _parse(text):
1282 def _parse(text):
1284 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1283 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1285 if pos != len(text):
1284 if pos != len(text):
1286 raise error.ParseError(b'invalid token', pos)
1285 raise error.ParseError(b'invalid token', pos)
1287 return tree
1286 return tree
1288
1287
1289
1288
1290 def _parsedrev(symbol):
1289 def _parsedrev(symbol):
1291 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1290 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1292 if symbol.startswith(b'D') and symbol[1:].isdigit():
1291 if symbol.startswith(b'D') and symbol[1:].isdigit():
1293 return int(symbol[1:])
1292 return int(symbol[1:])
1294 if symbol.isdigit():
1293 if symbol.isdigit():
1295 return int(symbol)
1294 return int(symbol)
1296
1295
1297
1296
1298 def _prefetchdrevs(tree):
1297 def _prefetchdrevs(tree):
1299 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1298 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1300 drevs = set()
1299 drevs = set()
1301 ancestordrevs = set()
1300 ancestordrevs = set()
1302 op = tree[0]
1301 op = tree[0]
1303 if op == b'symbol':
1302 if op == b'symbol':
1304 r = _parsedrev(tree[1])
1303 r = _parsedrev(tree[1])
1305 if r:
1304 if r:
1306 drevs.add(r)
1305 drevs.add(r)
1307 elif op == b'ancestors':
1306 elif op == b'ancestors':
1308 r, a = _prefetchdrevs(tree[1])
1307 r, a = _prefetchdrevs(tree[1])
1309 drevs.update(r)
1308 drevs.update(r)
1310 ancestordrevs.update(r)
1309 ancestordrevs.update(r)
1311 ancestordrevs.update(a)
1310 ancestordrevs.update(a)
1312 else:
1311 else:
1313 for t in tree[1:]:
1312 for t in tree[1:]:
1314 r, a = _prefetchdrevs(t)
1313 r, a = _prefetchdrevs(t)
1315 drevs.update(r)
1314 drevs.update(r)
1316 ancestordrevs.update(a)
1315 ancestordrevs.update(a)
1317 return drevs, ancestordrevs
1316 return drevs, ancestordrevs
1318
1317
1319
1318
1320 def querydrev(repo, spec):
1319 def querydrev(repo, spec):
1321 """return a list of "Differential Revision" dicts
1320 """return a list of "Differential Revision" dicts
1322
1321
1323 spec is a string using a simple query language, see docstring in phabread
1322 spec is a string using a simple query language, see docstring in phabread
1324 for details.
1323 for details.
1325
1324
1326 A "Differential Revision dict" looks like:
1325 A "Differential Revision dict" looks like:
1327
1326
1328 {
1327 {
1329 "id": "2",
1328 "id": "2",
1330 "phid": "PHID-DREV-672qvysjcczopag46qty",
1329 "phid": "PHID-DREV-672qvysjcczopag46qty",
1331 "title": "example",
1330 "title": "example",
1332 "uri": "https://phab.example.com/D2",
1331 "uri": "https://phab.example.com/D2",
1333 "dateCreated": "1499181406",
1332 "dateCreated": "1499181406",
1334 "dateModified": "1499182103",
1333 "dateModified": "1499182103",
1335 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1334 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1336 "status": "0",
1335 "status": "0",
1337 "statusName": "Needs Review",
1336 "statusName": "Needs Review",
1338 "properties": [],
1337 "properties": [],
1339 "branch": null,
1338 "branch": null,
1340 "summary": "",
1339 "summary": "",
1341 "testPlan": "",
1340 "testPlan": "",
1342 "lineCount": "2",
1341 "lineCount": "2",
1343 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1342 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1344 "diffs": [
1343 "diffs": [
1345 "3",
1344 "3",
1346 "4",
1345 "4",
1347 ],
1346 ],
1348 "commits": [],
1347 "commits": [],
1349 "reviewers": [],
1348 "reviewers": [],
1350 "ccs": [],
1349 "ccs": [],
1351 "hashes": [],
1350 "hashes": [],
1352 "auxiliary": {
1351 "auxiliary": {
1353 "phabricator:projects": [],
1352 "phabricator:projects": [],
1354 "phabricator:depends-on": [
1353 "phabricator:depends-on": [
1355 "PHID-DREV-gbapp366kutjebt7agcd"
1354 "PHID-DREV-gbapp366kutjebt7agcd"
1356 ]
1355 ]
1357 },
1356 },
1358 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1357 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1359 "sourcePath": null
1358 "sourcePath": null
1360 }
1359 }
1361 """
1360 """
1362
1361
1363 def fetch(params):
1362 def fetch(params):
1364 """params -> single drev or None"""
1363 """params -> single drev or None"""
1365 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1364 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1366 if key in prefetched:
1365 if key in prefetched:
1367 return prefetched[key]
1366 return prefetched[key]
1368 drevs = callconduit(repo.ui, b'differential.query', params)
1367 drevs = callconduit(repo.ui, b'differential.query', params)
1369 # Fill prefetched with the result
1368 # Fill prefetched with the result
1370 for drev in drevs:
1369 for drev in drevs:
1371 prefetched[drev[b'phid']] = drev
1370 prefetched[drev[b'phid']] = drev
1372 prefetched[int(drev[b'id'])] = drev
1371 prefetched[int(drev[b'id'])] = drev
1373 if key not in prefetched:
1372 if key not in prefetched:
1374 raise error.Abort(
1373 raise error.Abort(
1375 _(b'cannot get Differential Revision %r') % params
1374 _(b'cannot get Differential Revision %r') % params
1376 )
1375 )
1377 return prefetched[key]
1376 return prefetched[key]
1378
1377
1379 def getstack(topdrevids):
1378 def getstack(topdrevids):
1380 """given a top, get a stack from the bottom, [id] -> [id]"""
1379 """given a top, get a stack from the bottom, [id] -> [id]"""
1381 visited = set()
1380 visited = set()
1382 result = []
1381 result = []
1383 queue = [{b'ids': [i]} for i in topdrevids]
1382 queue = [{b'ids': [i]} for i in topdrevids]
1384 while queue:
1383 while queue:
1385 params = queue.pop()
1384 params = queue.pop()
1386 drev = fetch(params)
1385 drev = fetch(params)
1387 if drev[b'id'] in visited:
1386 if drev[b'id'] in visited:
1388 continue
1387 continue
1389 visited.add(drev[b'id'])
1388 visited.add(drev[b'id'])
1390 result.append(int(drev[b'id']))
1389 result.append(int(drev[b'id']))
1391 auxiliary = drev.get(b'auxiliary', {})
1390 auxiliary = drev.get(b'auxiliary', {})
1392 depends = auxiliary.get(b'phabricator:depends-on', [])
1391 depends = auxiliary.get(b'phabricator:depends-on', [])
1393 for phid in depends:
1392 for phid in depends:
1394 queue.append({b'phids': [phid]})
1393 queue.append({b'phids': [phid]})
1395 result.reverse()
1394 result.reverse()
1396 return smartset.baseset(result)
1395 return smartset.baseset(result)
1397
1396
1398 # Initialize prefetch cache
1397 # Initialize prefetch cache
1399 prefetched = {} # {id or phid: drev}
1398 prefetched = {} # {id or phid: drev}
1400
1399
1401 tree = _parse(spec)
1400 tree = _parse(spec)
1402 drevs, ancestordrevs = _prefetchdrevs(tree)
1401 drevs, ancestordrevs = _prefetchdrevs(tree)
1403
1402
1404 # developer config: phabricator.batchsize
1403 # developer config: phabricator.batchsize
1405 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1404 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1406
1405
1407 # Prefetch Differential Revisions in batch
1406 # Prefetch Differential Revisions in batch
1408 tofetch = set(drevs)
1407 tofetch = set(drevs)
1409 for r in ancestordrevs:
1408 for r in ancestordrevs:
1410 tofetch.update(range(max(1, r - batchsize), r + 1))
1409 tofetch.update(range(max(1, r - batchsize), r + 1))
1411 if drevs:
1410 if drevs:
1412 fetch({b'ids': list(tofetch)})
1411 fetch({b'ids': list(tofetch)})
1413 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1412 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1414
1413
1415 # Walk through the tree, return smartsets
1414 # Walk through the tree, return smartsets
1416 def walk(tree):
1415 def walk(tree):
1417 op = tree[0]
1416 op = tree[0]
1418 if op == b'symbol':
1417 if op == b'symbol':
1419 drev = _parsedrev(tree[1])
1418 drev = _parsedrev(tree[1])
1420 if drev:
1419 if drev:
1421 return smartset.baseset([drev])
1420 return smartset.baseset([drev])
1422 elif tree[1] in _knownstatusnames:
1421 elif tree[1] in _knownstatusnames:
1423 drevs = [
1422 drevs = [
1424 r
1423 r
1425 for r in validids
1424 for r in validids
1426 if _getstatusname(prefetched[r]) == tree[1]
1425 if _getstatusname(prefetched[r]) == tree[1]
1427 ]
1426 ]
1428 return smartset.baseset(drevs)
1427 return smartset.baseset(drevs)
1429 else:
1428 else:
1430 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1429 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1431 elif op in {b'and_', b'add', b'sub'}:
1430 elif op in {b'and_', b'add', b'sub'}:
1432 assert len(tree) == 3
1431 assert len(tree) == 3
1433 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1432 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1434 elif op == b'group':
1433 elif op == b'group':
1435 return walk(tree[1])
1434 return walk(tree[1])
1436 elif op == b'ancestors':
1435 elif op == b'ancestors':
1437 return getstack(walk(tree[1]))
1436 return getstack(walk(tree[1]))
1438 else:
1437 else:
1439 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1438 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1440
1439
1441 return [prefetched[r] for r in walk(tree)]
1440 return [prefetched[r] for r in walk(tree)]
1442
1441
1443
1442
1444 def getdescfromdrev(drev):
1443 def getdescfromdrev(drev):
1445 """get description (commit message) from "Differential Revision"
1444 """get description (commit message) from "Differential Revision"
1446
1445
1447 This is similar to differential.getcommitmessage API. But we only care
1446 This is similar to differential.getcommitmessage API. But we only care
1448 about limited fields: title, summary, test plan, and URL.
1447 about limited fields: title, summary, test plan, and URL.
1449 """
1448 """
1450 title = drev[b'title']
1449 title = drev[b'title']
1451 summary = drev[b'summary'].rstrip()
1450 summary = drev[b'summary'].rstrip()
1452 testplan = drev[b'testPlan'].rstrip()
1451 testplan = drev[b'testPlan'].rstrip()
1453 if testplan:
1452 if testplan:
1454 testplan = b'Test Plan:\n%s' % testplan
1453 testplan = b'Test Plan:\n%s' % testplan
1455 uri = b'Differential Revision: %s' % drev[b'uri']
1454 uri = b'Differential Revision: %s' % drev[b'uri']
1456 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1455 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1457
1456
1458
1457
1459 def getdiffmeta(diff):
1458 def getdiffmeta(diff):
1460 """get commit metadata (date, node, user, p1) from a diff object
1459 """get commit metadata (date, node, user, p1) from a diff object
1461
1460
1462 The metadata could be "hg:meta", sent by phabsend, like:
1461 The metadata could be "hg:meta", sent by phabsend, like:
1463
1462
1464 "properties": {
1463 "properties": {
1465 "hg:meta": {
1464 "hg:meta": {
1466 "date": "1499571514 25200",
1465 "date": "1499571514 25200",
1467 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1466 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1468 "user": "Foo Bar <foo@example.com>",
1467 "user": "Foo Bar <foo@example.com>",
1469 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1468 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1470 }
1469 }
1471 }
1470 }
1472
1471
1473 Or converted from "local:commits", sent by "arc", like:
1472 Or converted from "local:commits", sent by "arc", like:
1474
1473
1475 "properties": {
1474 "properties": {
1476 "local:commits": {
1475 "local:commits": {
1477 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1476 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1478 "author": "Foo Bar",
1477 "author": "Foo Bar",
1479 "time": 1499546314,
1478 "time": 1499546314,
1480 "branch": "default",
1479 "branch": "default",
1481 "tag": "",
1480 "tag": "",
1482 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1481 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1483 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1482 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1484 "local": "1000",
1483 "local": "1000",
1485 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1484 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1486 "summary": "...",
1485 "summary": "...",
1487 "message": "...",
1486 "message": "...",
1488 "authorEmail": "foo@example.com"
1487 "authorEmail": "foo@example.com"
1489 }
1488 }
1490 }
1489 }
1491 }
1490 }
1492
1491
1493 Note: metadata extracted from "local:commits" will lose time zone
1492 Note: metadata extracted from "local:commits" will lose time zone
1494 information.
1493 information.
1495 """
1494 """
1496 props = diff.get(b'properties') or {}
1495 props = diff.get(b'properties') or {}
1497 meta = props.get(b'hg:meta')
1496 meta = props.get(b'hg:meta')
1498 if not meta:
1497 if not meta:
1499 if props.get(b'local:commits'):
1498 if props.get(b'local:commits'):
1500 commit = sorted(props[b'local:commits'].values())[0]
1499 commit = sorted(props[b'local:commits'].values())[0]
1501 meta = {}
1500 meta = {}
1502 if b'author' in commit and b'authorEmail' in commit:
1501 if b'author' in commit and b'authorEmail' in commit:
1503 meta[b'user'] = b'%s <%s>' % (
1502 meta[b'user'] = b'%s <%s>' % (
1504 commit[b'author'],
1503 commit[b'author'],
1505 commit[b'authorEmail'],
1504 commit[b'authorEmail'],
1506 )
1505 )
1507 if b'time' in commit:
1506 if b'time' in commit:
1508 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1507 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1509 if b'branch' in commit:
1508 if b'branch' in commit:
1510 meta[b'branch'] = commit[b'branch']
1509 meta[b'branch'] = commit[b'branch']
1511 node = commit.get(b'commit', commit.get(b'rev'))
1510 node = commit.get(b'commit', commit.get(b'rev'))
1512 if node:
1511 if node:
1513 meta[b'node'] = node
1512 meta[b'node'] = node
1514 if len(commit.get(b'parents', ())) >= 1:
1513 if len(commit.get(b'parents', ())) >= 1:
1515 meta[b'parent'] = commit[b'parents'][0]
1514 meta[b'parent'] = commit[b'parents'][0]
1516 else:
1515 else:
1517 meta = {}
1516 meta = {}
1518 if b'date' not in meta and b'dateCreated' in diff:
1517 if b'date' not in meta and b'dateCreated' in diff:
1519 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1518 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1520 if b'branch' not in meta and diff.get(b'branch'):
1519 if b'branch' not in meta and diff.get(b'branch'):
1521 meta[b'branch'] = diff[b'branch']
1520 meta[b'branch'] = diff[b'branch']
1522 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1521 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1523 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1522 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1524 return meta
1523 return meta
1525
1524
1526
1525
1527 def readpatch(repo, drevs, write):
1526 def readpatch(repo, drevs, write):
1528 """generate plain-text patch readable by 'hg import'
1527 """generate plain-text patch readable by 'hg import'
1529
1528
1530 write is usually ui.write. drevs is what "querydrev" returns, results of
1529 write is usually ui.write. drevs is what "querydrev" returns, results of
1531 "differential.query".
1530 "differential.query".
1532 """
1531 """
1533 # Prefetch hg:meta property for all diffs
1532 # Prefetch hg:meta property for all diffs
1534 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1533 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1535 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1534 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1536
1535
1537 # Generate patch for each drev
1536 # Generate patch for each drev
1538 for drev in drevs:
1537 for drev in drevs:
1539 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1538 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1540
1539
1541 diffid = max(int(v) for v in drev[b'diffs'])
1540 diffid = max(int(v) for v in drev[b'diffs'])
1542 body = callconduit(
1541 body = callconduit(
1543 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1542 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1544 )
1543 )
1545 desc = getdescfromdrev(drev)
1544 desc = getdescfromdrev(drev)
1546 header = b'# HG changeset patch\n'
1545 header = b'# HG changeset patch\n'
1547
1546
1548 # Try to preserve metadata from hg:meta property. Write hg patch
1547 # Try to preserve metadata from hg:meta property. Write hg patch
1549 # headers that can be read by the "import" command. See patchheadermap
1548 # headers that can be read by the "import" command. See patchheadermap
1550 # and extract in mercurial/patch.py for supported headers.
1549 # and extract in mercurial/patch.py for supported headers.
1551 meta = getdiffmeta(diffs[b'%d' % diffid])
1550 meta = getdiffmeta(diffs[b'%d' % diffid])
1552 for k in _metanamemap.keys():
1551 for k in _metanamemap.keys():
1553 if k in meta:
1552 if k in meta:
1554 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1553 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1555
1554
1556 content = b'%s%s\n%s' % (header, desc, body)
1555 content = b'%s%s\n%s' % (header, desc, body)
1557 write(content)
1556 write(content)
1558
1557
1559
1558
1560 @vcrcommand(
1559 @vcrcommand(
1561 b'phabread',
1560 b'phabread',
1562 [(b'', b'stack', False, _(b'read dependencies'))],
1561 [(b'', b'stack', False, _(b'read dependencies'))],
1563 _(b'DREVSPEC [OPTIONS]'),
1562 _(b'DREVSPEC [OPTIONS]'),
1564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1563 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1565 )
1564 )
1566 def phabread(ui, repo, spec, **opts):
1565 def phabread(ui, repo, spec, **opts):
1567 """print patches from Phabricator suitable for importing
1566 """print patches from Phabricator suitable for importing
1568
1567
1569 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1568 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1570 the number ``123``. It could also have common operators like ``+``, ``-``,
1569 the number ``123``. It could also have common operators like ``+``, ``-``,
1571 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1570 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1572 select a stack.
1571 select a stack.
1573
1572
1574 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1573 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1575 could be used to filter patches by status. For performance reason, they
1574 could be used to filter patches by status. For performance reason, they
1576 only represent a subset of non-status selections and cannot be used alone.
1575 only represent a subset of non-status selections and cannot be used alone.
1577
1576
1578 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1577 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1579 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1578 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1580 stack up to D9.
1579 stack up to D9.
1581
1580
1582 If --stack is given, follow dependencies information and read all patches.
1581 If --stack is given, follow dependencies information and read all patches.
1583 It is equivalent to the ``:`` operator.
1582 It is equivalent to the ``:`` operator.
1584 """
1583 """
1585 opts = pycompat.byteskwargs(opts)
1584 opts = pycompat.byteskwargs(opts)
1586 if opts.get(b'stack'):
1585 if opts.get(b'stack'):
1587 spec = b':(%s)' % spec
1586 spec = b':(%s)' % spec
1588 drevs = querydrev(repo, spec)
1587 drevs = querydrev(repo, spec)
1589 readpatch(repo, drevs, ui.write)
1588 readpatch(repo, drevs, ui.write)
1590
1589
1591
1590
1592 @vcrcommand(
1591 @vcrcommand(
1593 b'phabupdate',
1592 b'phabupdate',
1594 [
1593 [
1595 (b'', b'accept', False, _(b'accept revisions')),
1594 (b'', b'accept', False, _(b'accept revisions')),
1596 (b'', b'reject', False, _(b'reject revisions')),
1595 (b'', b'reject', False, _(b'reject revisions')),
1597 (b'', b'abandon', False, _(b'abandon revisions')),
1596 (b'', b'abandon', False, _(b'abandon revisions')),
1598 (b'', b'reclaim', False, _(b'reclaim revisions')),
1597 (b'', b'reclaim', False, _(b'reclaim revisions')),
1599 (b'm', b'comment', b'', _(b'comment on the last revision')),
1598 (b'm', b'comment', b'', _(b'comment on the last revision')),
1600 ],
1599 ],
1601 _(b'DREVSPEC [OPTIONS]'),
1600 _(b'DREVSPEC [OPTIONS]'),
1602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1601 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1603 )
1602 )
1604 def phabupdate(ui, repo, spec, **opts):
1603 def phabupdate(ui, repo, spec, **opts):
1605 """update Differential Revision in batch
1604 """update Differential Revision in batch
1606
1605
1607 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1606 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1608 """
1607 """
1609 opts = pycompat.byteskwargs(opts)
1608 opts = pycompat.byteskwargs(opts)
1610 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1609 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1611 if len(flags) > 1:
1610 if len(flags) > 1:
1612 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1611 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1613
1612
1614 actions = []
1613 actions = []
1615 for f in flags:
1614 for f in flags:
1616 actions.append({b'type': f, b'value': True})
1615 actions.append({b'type': f, b'value': True})
1617
1616
1618 drevs = querydrev(repo, spec)
1617 drevs = querydrev(repo, spec)
1619 for i, drev in enumerate(drevs):
1618 for i, drev in enumerate(drevs):
1620 if i + 1 == len(drevs) and opts.get(b'comment'):
1619 if i + 1 == len(drevs) and opts.get(b'comment'):
1621 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1620 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1622 if actions:
1621 if actions:
1623 params = {
1622 params = {
1624 b'objectIdentifier': drev[b'phid'],
1623 b'objectIdentifier': drev[b'phid'],
1625 b'transactions': actions,
1624 b'transactions': actions,
1626 }
1625 }
1627 callconduit(ui, b'differential.revision.edit', params)
1626 callconduit(ui, b'differential.revision.edit', params)
1628
1627
1629
1628
1630 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1629 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1631 def template_review(context, mapping):
1630 def template_review(context, mapping):
1632 """:phabreview: Object describing the review for this changeset.
1631 """:phabreview: Object describing the review for this changeset.
1633 Has attributes `url` and `id`.
1632 Has attributes `url` and `id`.
1634 """
1633 """
1635 ctx = context.resource(mapping, b'ctx')
1634 ctx = context.resource(mapping, b'ctx')
1636 m = _differentialrevisiondescre.search(ctx.description())
1635 m = _differentialrevisiondescre.search(ctx.description())
1637 if m:
1636 if m:
1638 return templateutil.hybriddict(
1637 return templateutil.hybriddict(
1639 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1638 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1640 )
1639 )
1641 else:
1640 else:
1642 tags = ctx.repo().nodetags(ctx.node())
1641 tags = ctx.repo().nodetags(ctx.node())
1643 for t in tags:
1642 for t in tags:
1644 if _differentialrevisiontagre.match(t):
1643 if _differentialrevisiontagre.match(t):
1645 url = ctx.repo().ui.config(b'phabricator', b'url')
1644 url = ctx.repo().ui.config(b'phabricator', b'url')
1646 if not url.endswith(b'/'):
1645 if not url.endswith(b'/'):
1647 url += b'/'
1646 url += b'/'
1648 url += t
1647 url += t
1649
1648
1650 return templateutil.hybriddict({b'url': url, b'id': t,})
1649 return templateutil.hybriddict({b'url': url, b'id': t,})
1651 return None
1650 return None
General Comments 0
You need to be logged in to leave comments. Login now