##// END OF EJS Templates
phabricator: add the uploadchunks function...
Ian Moody -
r43842:45307960 default
parent child Browse files
Show More
@@ -1,1390 +1,1419 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import base64
44 import contextlib
45 import contextlib
45 import itertools
46 import itertools
46 import json
47 import json
47 import operator
48 import operator
48 import re
49 import re
49
50
50 from mercurial.node import bin, nullid
51 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
52 from mercurial.i18n import _
52 from mercurial.pycompat import getattr
53 from mercurial.pycompat import getattr
53 from mercurial.thirdparty import attr
54 from mercurial.thirdparty import attr
54 from mercurial import (
55 from mercurial import (
55 cmdutil,
56 cmdutil,
56 context,
57 context,
57 encoding,
58 encoding,
58 error,
59 error,
59 exthelper,
60 exthelper,
60 httpconnection as httpconnectionmod,
61 httpconnection as httpconnectionmod,
61 match,
62 match,
62 mdiff,
63 mdiff,
63 obsutil,
64 obsutil,
64 parser,
65 parser,
65 patch,
66 patch,
66 phases,
67 phases,
67 pycompat,
68 pycompat,
68 scmutil,
69 scmutil,
69 smartset,
70 smartset,
70 tags,
71 tags,
71 templatefilters,
72 templatefilters,
72 templateutil,
73 templateutil,
73 url as urlmod,
74 url as urlmod,
74 util,
75 util,
75 )
76 )
76 from mercurial.utils import (
77 from mercurial.utils import (
77 procutil,
78 procutil,
78 stringutil,
79 stringutil,
79 )
80 )
80
81
81 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
82 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
82 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
83 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
83 # be specifying the version(s) of Mercurial they are tested with, or
84 # be specifying the version(s) of Mercurial they are tested with, or
84 # leave the attribute unspecified.
85 # leave the attribute unspecified.
85 testedwith = b'ships-with-hg-core'
86 testedwith = b'ships-with-hg-core'
86
87
87 eh = exthelper.exthelper()
88 eh = exthelper.exthelper()
88
89
89 cmdtable = eh.cmdtable
90 cmdtable = eh.cmdtable
90 command = eh.command
91 command = eh.command
91 configtable = eh.configtable
92 configtable = eh.configtable
92 templatekeyword = eh.templatekeyword
93 templatekeyword = eh.templatekeyword
93
94
94 # developer config: phabricator.batchsize
95 # developer config: phabricator.batchsize
95 eh.configitem(
96 eh.configitem(
96 b'phabricator', b'batchsize', default=12,
97 b'phabricator', b'batchsize', default=12,
97 )
98 )
98 eh.configitem(
99 eh.configitem(
99 b'phabricator', b'callsign', default=None,
100 b'phabricator', b'callsign', default=None,
100 )
101 )
101 eh.configitem(
102 eh.configitem(
102 b'phabricator', b'curlcmd', default=None,
103 b'phabricator', b'curlcmd', default=None,
103 )
104 )
104 # developer config: phabricator.repophid
105 # developer config: phabricator.repophid
105 eh.configitem(
106 eh.configitem(
106 b'phabricator', b'repophid', default=None,
107 b'phabricator', b'repophid', default=None,
107 )
108 )
108 eh.configitem(
109 eh.configitem(
109 b'phabricator', b'url', default=None,
110 b'phabricator', b'url', default=None,
110 )
111 )
111 eh.configitem(
112 eh.configitem(
112 b'phabsend', b'confirm', default=False,
113 b'phabsend', b'confirm', default=False,
113 )
114 )
114
115
115 colortable = {
116 colortable = {
116 b'phabricator.action.created': b'green',
117 b'phabricator.action.created': b'green',
117 b'phabricator.action.skipped': b'magenta',
118 b'phabricator.action.skipped': b'magenta',
118 b'phabricator.action.updated': b'magenta',
119 b'phabricator.action.updated': b'magenta',
119 b'phabricator.desc': b'',
120 b'phabricator.desc': b'',
120 b'phabricator.drev': b'bold',
121 b'phabricator.drev': b'bold',
121 b'phabricator.node': b'',
122 b'phabricator.node': b'',
122 }
123 }
123
124
124 _VCR_FLAGS = [
125 _VCR_FLAGS = [
125 (
126 (
126 b'',
127 b'',
127 b'test-vcr',
128 b'test-vcr',
128 b'',
129 b'',
129 _(
130 _(
130 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
131 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
131 b', otherwise will mock all http requests using the specified vcr file.'
132 b', otherwise will mock all http requests using the specified vcr file.'
132 b' (ADVANCED)'
133 b' (ADVANCED)'
133 ),
134 ),
134 ),
135 ),
135 ]
136 ]
136
137
137
138
138 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
139 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
139 fullflags = flags + _VCR_FLAGS
140 fullflags = flags + _VCR_FLAGS
140
141
141 def hgmatcher(r1, r2):
142 def hgmatcher(r1, r2):
142 if r1.uri != r2.uri or r1.method != r2.method:
143 if r1.uri != r2.uri or r1.method != r2.method:
143 return False
144 return False
144 r1params = r1.body.split(b'&')
145 r1params = r1.body.split(b'&')
145 r2params = r2.body.split(b'&')
146 r2params = r2.body.split(b'&')
146 return set(r1params) == set(r2params)
147 return set(r1params) == set(r2params)
147
148
148 def sanitiserequest(request):
149 def sanitiserequest(request):
149 request.body = re.sub(
150 request.body = re.sub(
150 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
151 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
151 )
152 )
152 return request
153 return request
153
154
154 def sanitiseresponse(response):
155 def sanitiseresponse(response):
155 if r'set-cookie' in response[r'headers']:
156 if r'set-cookie' in response[r'headers']:
156 del response[r'headers'][r'set-cookie']
157 del response[r'headers'][r'set-cookie']
157 return response
158 return response
158
159
159 def decorate(fn):
160 def decorate(fn):
160 def inner(*args, **kwargs):
161 def inner(*args, **kwargs):
161 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
162 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
162 if cassette:
163 if cassette:
163 import hgdemandimport
164 import hgdemandimport
164
165
165 with hgdemandimport.deactivated():
166 with hgdemandimport.deactivated():
166 import vcr as vcrmod
167 import vcr as vcrmod
167 import vcr.stubs as stubs
168 import vcr.stubs as stubs
168
169
169 vcr = vcrmod.VCR(
170 vcr = vcrmod.VCR(
170 serializer=r'json',
171 serializer=r'json',
171 before_record_request=sanitiserequest,
172 before_record_request=sanitiserequest,
172 before_record_response=sanitiseresponse,
173 before_record_response=sanitiseresponse,
173 custom_patches=[
174 custom_patches=[
174 (
175 (
175 urlmod,
176 urlmod,
176 r'httpconnection',
177 r'httpconnection',
177 stubs.VCRHTTPConnection,
178 stubs.VCRHTTPConnection,
178 ),
179 ),
179 (
180 (
180 urlmod,
181 urlmod,
181 r'httpsconnection',
182 r'httpsconnection',
182 stubs.VCRHTTPSConnection,
183 stubs.VCRHTTPSConnection,
183 ),
184 ),
184 ],
185 ],
185 )
186 )
186 vcr.register_matcher(r'hgmatcher', hgmatcher)
187 vcr.register_matcher(r'hgmatcher', hgmatcher)
187 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
188 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
188 return fn(*args, **kwargs)
189 return fn(*args, **kwargs)
189 return fn(*args, **kwargs)
190 return fn(*args, **kwargs)
190
191
191 inner.__name__ = fn.__name__
192 inner.__name__ = fn.__name__
192 inner.__doc__ = fn.__doc__
193 inner.__doc__ = fn.__doc__
193 return command(
194 return command(
194 name,
195 name,
195 fullflags,
196 fullflags,
196 spec,
197 spec,
197 helpcategory=helpcategory,
198 helpcategory=helpcategory,
198 optionalrepo=optionalrepo,
199 optionalrepo=optionalrepo,
199 )(inner)
200 )(inner)
200
201
201 return decorate
202 return decorate
202
203
203
204
204 def urlencodenested(params):
205 def urlencodenested(params):
205 """like urlencode, but works with nested parameters.
206 """like urlencode, but works with nested parameters.
206
207
207 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
208 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
208 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
209 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
209 urlencode. Note: the encoding is consistent with PHP's http_build_query.
210 urlencode. Note: the encoding is consistent with PHP's http_build_query.
210 """
211 """
211 flatparams = util.sortdict()
212 flatparams = util.sortdict()
212
213
213 def process(prefix, obj):
214 def process(prefix, obj):
214 if isinstance(obj, bool):
215 if isinstance(obj, bool):
215 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
216 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
216 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
217 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
217 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
218 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
218 if items is None:
219 if items is None:
219 flatparams[prefix] = obj
220 flatparams[prefix] = obj
220 else:
221 else:
221 for k, v in items(obj):
222 for k, v in items(obj):
222 if prefix:
223 if prefix:
223 process(b'%s[%s]' % (prefix, k), v)
224 process(b'%s[%s]' % (prefix, k), v)
224 else:
225 else:
225 process(k, v)
226 process(k, v)
226
227
227 process(b'', params)
228 process(b'', params)
228 return util.urlreq.urlencode(flatparams)
229 return util.urlreq.urlencode(flatparams)
229
230
230
231
231 def readurltoken(ui):
232 def readurltoken(ui):
232 """return conduit url, token and make sure they exist
233 """return conduit url, token and make sure they exist
233
234
234 Currently read from [auth] config section. In the future, it might
235 Currently read from [auth] config section. In the future, it might
235 make sense to read from .arcconfig and .arcrc as well.
236 make sense to read from .arcconfig and .arcrc as well.
236 """
237 """
237 url = ui.config(b'phabricator', b'url')
238 url = ui.config(b'phabricator', b'url')
238 if not url:
239 if not url:
239 raise error.Abort(
240 raise error.Abort(
240 _(b'config %s.%s is required') % (b'phabricator', b'url')
241 _(b'config %s.%s is required') % (b'phabricator', b'url')
241 )
242 )
242
243
243 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
244 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
244 token = None
245 token = None
245
246
246 if res:
247 if res:
247 group, auth = res
248 group, auth = res
248
249
249 ui.debug(b"using auth.%s.* for authentication\n" % group)
250 ui.debug(b"using auth.%s.* for authentication\n" % group)
250
251
251 token = auth.get(b'phabtoken')
252 token = auth.get(b'phabtoken')
252
253
253 if not token:
254 if not token:
254 raise error.Abort(
255 raise error.Abort(
255 _(b'Can\'t find conduit token associated to %s') % (url,)
256 _(b'Can\'t find conduit token associated to %s') % (url,)
256 )
257 )
257
258
258 return url, token
259 return url, token
259
260
260
261
261 def callconduit(ui, name, params):
262 def callconduit(ui, name, params):
262 """call Conduit API, params is a dict. return json.loads result, or None"""
263 """call Conduit API, params is a dict. return json.loads result, or None"""
263 host, token = readurltoken(ui)
264 host, token = readurltoken(ui)
264 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
265 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
265 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
266 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
266 params = params.copy()
267 params = params.copy()
267 params[b'api.token'] = token
268 params[b'api.token'] = token
268 data = urlencodenested(params)
269 data = urlencodenested(params)
269 curlcmd = ui.config(b'phabricator', b'curlcmd')
270 curlcmd = ui.config(b'phabricator', b'curlcmd')
270 if curlcmd:
271 if curlcmd:
271 sin, sout = procutil.popen2(
272 sin, sout = procutil.popen2(
272 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
273 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
273 )
274 )
274 sin.write(data)
275 sin.write(data)
275 sin.close()
276 sin.close()
276 body = sout.read()
277 body = sout.read()
277 else:
278 else:
278 urlopener = urlmod.opener(ui, authinfo)
279 urlopener = urlmod.opener(ui, authinfo)
279 request = util.urlreq.request(pycompat.strurl(url), data=data)
280 request = util.urlreq.request(pycompat.strurl(url), data=data)
280 with contextlib.closing(urlopener.open(request)) as rsp:
281 with contextlib.closing(urlopener.open(request)) as rsp:
281 body = rsp.read()
282 body = rsp.read()
282 ui.debug(b'Conduit Response: %s\n' % body)
283 ui.debug(b'Conduit Response: %s\n' % body)
283 parsed = pycompat.rapply(
284 parsed = pycompat.rapply(
284 lambda x: encoding.unitolocal(x)
285 lambda x: encoding.unitolocal(x)
285 if isinstance(x, pycompat.unicode)
286 if isinstance(x, pycompat.unicode)
286 else x,
287 else x,
287 # json.loads only accepts bytes from py3.6+
288 # json.loads only accepts bytes from py3.6+
288 json.loads(encoding.unifromlocal(body)),
289 json.loads(encoding.unifromlocal(body)),
289 )
290 )
290 if parsed.get(b'error_code'):
291 if parsed.get(b'error_code'):
291 msg = _(b'Conduit Error (%s): %s') % (
292 msg = _(b'Conduit Error (%s): %s') % (
292 parsed[b'error_code'],
293 parsed[b'error_code'],
293 parsed[b'error_info'],
294 parsed[b'error_info'],
294 )
295 )
295 raise error.Abort(msg)
296 raise error.Abort(msg)
296 return parsed[b'result']
297 return parsed[b'result']
297
298
298
299
299 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
300 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
300 def debugcallconduit(ui, repo, name):
301 def debugcallconduit(ui, repo, name):
301 """call Conduit API
302 """call Conduit API
302
303
303 Call parameters are read from stdin as a JSON blob. Result will be written
304 Call parameters are read from stdin as a JSON blob. Result will be written
304 to stdout as a JSON blob.
305 to stdout as a JSON blob.
305 """
306 """
306 # json.loads only accepts bytes from 3.6+
307 # json.loads only accepts bytes from 3.6+
307 rawparams = encoding.unifromlocal(ui.fin.read())
308 rawparams = encoding.unifromlocal(ui.fin.read())
308 # json.loads only returns unicode strings
309 # json.loads only returns unicode strings
309 params = pycompat.rapply(
310 params = pycompat.rapply(
310 lambda x: encoding.unitolocal(x)
311 lambda x: encoding.unitolocal(x)
311 if isinstance(x, pycompat.unicode)
312 if isinstance(x, pycompat.unicode)
312 else x,
313 else x,
313 json.loads(rawparams),
314 json.loads(rawparams),
314 )
315 )
315 # json.dumps only accepts unicode strings
316 # json.dumps only accepts unicode strings
316 result = pycompat.rapply(
317 result = pycompat.rapply(
317 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
318 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
318 callconduit(ui, name, params),
319 callconduit(ui, name, params),
319 )
320 )
320 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
321 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
321 ui.write(b'%s\n' % encoding.unitolocal(s))
322 ui.write(b'%s\n' % encoding.unitolocal(s))
322
323
323
324
324 def getrepophid(repo):
325 def getrepophid(repo):
325 """given callsign, return repository PHID or None"""
326 """given callsign, return repository PHID or None"""
326 # developer config: phabricator.repophid
327 # developer config: phabricator.repophid
327 repophid = repo.ui.config(b'phabricator', b'repophid')
328 repophid = repo.ui.config(b'phabricator', b'repophid')
328 if repophid:
329 if repophid:
329 return repophid
330 return repophid
330 callsign = repo.ui.config(b'phabricator', b'callsign')
331 callsign = repo.ui.config(b'phabricator', b'callsign')
331 if not callsign:
332 if not callsign:
332 return None
333 return None
333 query = callconduit(
334 query = callconduit(
334 repo.ui,
335 repo.ui,
335 b'diffusion.repository.search',
336 b'diffusion.repository.search',
336 {b'constraints': {b'callsigns': [callsign]}},
337 {b'constraints': {b'callsigns': [callsign]}},
337 )
338 )
338 if len(query[b'data']) == 0:
339 if len(query[b'data']) == 0:
339 return None
340 return None
340 repophid = query[b'data'][0][b'phid']
341 repophid = query[b'data'][0][b'phid']
341 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
342 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
342 return repophid
343 return repophid
343
344
344
345
345 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
346 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
346 _differentialrevisiondescre = re.compile(
347 _differentialrevisiondescre = re.compile(
347 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
348 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
348 )
349 )
349
350
350
351
351 def getoldnodedrevmap(repo, nodelist):
352 def getoldnodedrevmap(repo, nodelist):
352 """find previous nodes that has been sent to Phabricator
353 """find previous nodes that has been sent to Phabricator
353
354
354 return {node: (oldnode, Differential diff, Differential Revision ID)}
355 return {node: (oldnode, Differential diff, Differential Revision ID)}
355 for node in nodelist with known previous sent versions, or associated
356 for node in nodelist with known previous sent versions, or associated
356 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
357 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
357 be ``None``.
358 be ``None``.
358
359
359 Examines commit messages like "Differential Revision:" to get the
360 Examines commit messages like "Differential Revision:" to get the
360 association information.
361 association information.
361
362
362 If such commit message line is not found, examines all precursors and their
363 If such commit message line is not found, examines all precursors and their
363 tags. Tags with format like "D1234" are considered a match and the node
364 tags. Tags with format like "D1234" are considered a match and the node
364 with that tag, and the number after "D" (ex. 1234) will be returned.
365 with that tag, and the number after "D" (ex. 1234) will be returned.
365
366
366 The ``old node``, if not None, is guaranteed to be the last diff of
367 The ``old node``, if not None, is guaranteed to be the last diff of
367 corresponding Differential Revision, and exist in the repo.
368 corresponding Differential Revision, and exist in the repo.
368 """
369 """
369 unfi = repo.unfiltered()
370 unfi = repo.unfiltered()
370 nodemap = unfi.changelog.nodemap
371 nodemap = unfi.changelog.nodemap
371
372
372 result = {} # {node: (oldnode?, lastdiff?, drev)}
373 result = {} # {node: (oldnode?, lastdiff?, drev)}
373 toconfirm = {} # {node: (force, {precnode}, drev)}
374 toconfirm = {} # {node: (force, {precnode}, drev)}
374 for node in nodelist:
375 for node in nodelist:
375 ctx = unfi[node]
376 ctx = unfi[node]
376 # For tags like "D123", put them into "toconfirm" to verify later
377 # For tags like "D123", put them into "toconfirm" to verify later
377 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
378 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
378 for n in precnodes:
379 for n in precnodes:
379 if n in nodemap:
380 if n in nodemap:
380 for tag in unfi.nodetags(n):
381 for tag in unfi.nodetags(n):
381 m = _differentialrevisiontagre.match(tag)
382 m = _differentialrevisiontagre.match(tag)
382 if m:
383 if m:
383 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
384 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
384 continue
385 continue
385
386
386 # Check commit message
387 # Check commit message
387 m = _differentialrevisiondescre.search(ctx.description())
388 m = _differentialrevisiondescre.search(ctx.description())
388 if m:
389 if m:
389 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
390 toconfirm[node] = (1, set(precnodes), int(m.group(r'id')))
390
391
391 # Double check if tags are genuine by collecting all old nodes from
392 # Double check if tags are genuine by collecting all old nodes from
392 # Phabricator, and expect precursors overlap with it.
393 # Phabricator, and expect precursors overlap with it.
393 if toconfirm:
394 if toconfirm:
394 drevs = [drev for force, precs, drev in toconfirm.values()]
395 drevs = [drev for force, precs, drev in toconfirm.values()]
395 alldiffs = callconduit(
396 alldiffs = callconduit(
396 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
397 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
397 )
398 )
398 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
399 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
399 for newnode, (force, precset, drev) in toconfirm.items():
400 for newnode, (force, precset, drev) in toconfirm.items():
400 diffs = [
401 diffs = [
401 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
402 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
402 ]
403 ]
403
404
404 # "precursors" as known by Phabricator
405 # "precursors" as known by Phabricator
405 phprecset = set(getnode(d) for d in diffs)
406 phprecset = set(getnode(d) for d in diffs)
406
407
407 # Ignore if precursors (Phabricator and local repo) do not overlap,
408 # Ignore if precursors (Phabricator and local repo) do not overlap,
408 # and force is not set (when commit message says nothing)
409 # and force is not set (when commit message says nothing)
409 if not force and not bool(phprecset & precset):
410 if not force and not bool(phprecset & precset):
410 tagname = b'D%d' % drev
411 tagname = b'D%d' % drev
411 tags.tag(
412 tags.tag(
412 repo,
413 repo,
413 tagname,
414 tagname,
414 nullid,
415 nullid,
415 message=None,
416 message=None,
416 user=None,
417 user=None,
417 date=None,
418 date=None,
418 local=True,
419 local=True,
419 )
420 )
420 unfi.ui.warn(
421 unfi.ui.warn(
421 _(
422 _(
422 b'D%s: local tag removed - does not match '
423 b'D%s: local tag removed - does not match '
423 b'Differential history\n'
424 b'Differential history\n'
424 )
425 )
425 % drev
426 % drev
426 )
427 )
427 continue
428 continue
428
429
429 # Find the last node using Phabricator metadata, and make sure it
430 # Find the last node using Phabricator metadata, and make sure it
430 # exists in the repo
431 # exists in the repo
431 oldnode = lastdiff = None
432 oldnode = lastdiff = None
432 if diffs:
433 if diffs:
433 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
434 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
434 oldnode = getnode(lastdiff)
435 oldnode = getnode(lastdiff)
435 if oldnode and oldnode not in nodemap:
436 if oldnode and oldnode not in nodemap:
436 oldnode = None
437 oldnode = None
437
438
438 result[newnode] = (oldnode, lastdiff, drev)
439 result[newnode] = (oldnode, lastdiff, drev)
439
440
440 return result
441 return result
441
442
442
443
443 def getdiff(ctx, diffopts):
444 def getdiff(ctx, diffopts):
444 """plain-text diff without header (user, commit message, etc)"""
445 """plain-text diff without header (user, commit message, etc)"""
445 output = util.stringio()
446 output = util.stringio()
446 for chunk, _label in patch.diffui(
447 for chunk, _label in patch.diffui(
447 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
448 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
448 ):
449 ):
449 output.write(chunk)
450 output.write(chunk)
450 return output.getvalue()
451 return output.getvalue()
451
452
452
453
453 class DiffChangeType(object):
454 class DiffChangeType(object):
454 ADD = 1
455 ADD = 1
455 CHANGE = 2
456 CHANGE = 2
456 DELETE = 3
457 DELETE = 3
457 MOVE_AWAY = 4
458 MOVE_AWAY = 4
458 COPY_AWAY = 5
459 COPY_AWAY = 5
459 MOVE_HERE = 6
460 MOVE_HERE = 6
460 COPY_HERE = 7
461 COPY_HERE = 7
461 MULTICOPY = 8
462 MULTICOPY = 8
462
463
463
464
464 class DiffFileType(object):
465 class DiffFileType(object):
465 TEXT = 1
466 TEXT = 1
466 IMAGE = 2
467 IMAGE = 2
467 BINARY = 3
468 BINARY = 3
468
469
469
470
470 @attr.s
471 @attr.s
471 class phabhunk(dict):
472 class phabhunk(dict):
472 """Represents a Differential hunk, which is owned by a Differential change
473 """Represents a Differential hunk, which is owned by a Differential change
473 """
474 """
474
475
475 oldOffset = attr.ib(default=0) # camelcase-required
476 oldOffset = attr.ib(default=0) # camelcase-required
476 oldLength = attr.ib(default=0) # camelcase-required
477 oldLength = attr.ib(default=0) # camelcase-required
477 newOffset = attr.ib(default=0) # camelcase-required
478 newOffset = attr.ib(default=0) # camelcase-required
478 newLength = attr.ib(default=0) # camelcase-required
479 newLength = attr.ib(default=0) # camelcase-required
479 corpus = attr.ib(default='')
480 corpus = attr.ib(default='')
480 # These get added to the phabchange's equivalents
481 # These get added to the phabchange's equivalents
481 addLines = attr.ib(default=0) # camelcase-required
482 addLines = attr.ib(default=0) # camelcase-required
482 delLines = attr.ib(default=0) # camelcase-required
483 delLines = attr.ib(default=0) # camelcase-required
483
484
484
485
485 @attr.s
486 @attr.s
486 class phabchange(object):
487 class phabchange(object):
487 """Represents a Differential change, owns Differential hunks and owned by a
488 """Represents a Differential change, owns Differential hunks and owned by a
488 Differential diff. Each one represents one file in a diff.
489 Differential diff. Each one represents one file in a diff.
489 """
490 """
490
491
491 currentPath = attr.ib(default=None) # camelcase-required
492 currentPath = attr.ib(default=None) # camelcase-required
492 oldPath = attr.ib(default=None) # camelcase-required
493 oldPath = attr.ib(default=None) # camelcase-required
493 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
494 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
494 metadata = attr.ib(default=attr.Factory(dict))
495 metadata = attr.ib(default=attr.Factory(dict))
495 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
496 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
497 type = attr.ib(default=DiffChangeType.CHANGE)
498 type = attr.ib(default=DiffChangeType.CHANGE)
498 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
499 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
499 commitHash = attr.ib(default=None) # camelcase-required
500 commitHash = attr.ib(default=None) # camelcase-required
500 addLines = attr.ib(default=0) # camelcase-required
501 addLines = attr.ib(default=0) # camelcase-required
501 delLines = attr.ib(default=0) # camelcase-required
502 delLines = attr.ib(default=0) # camelcase-required
502 hunks = attr.ib(default=attr.Factory(list))
503 hunks = attr.ib(default=attr.Factory(list))
503
504
504 def copynewmetadatatoold(self):
505 def copynewmetadatatoold(self):
505 for key in list(self.metadata.keys()):
506 for key in list(self.metadata.keys()):
506 newkey = key.replace(b'new:', b'old:')
507 newkey = key.replace(b'new:', b'old:')
507 self.metadata[newkey] = self.metadata[key]
508 self.metadata[newkey] = self.metadata[key]
508
509
509 def addoldmode(self, value):
510 def addoldmode(self, value):
510 self.oldProperties[b'unix:filemode'] = value
511 self.oldProperties[b'unix:filemode'] = value
511
512
512 def addnewmode(self, value):
513 def addnewmode(self, value):
513 self.newProperties[b'unix:filemode'] = value
514 self.newProperties[b'unix:filemode'] = value
514
515
515 def addhunk(self, hunk):
516 def addhunk(self, hunk):
516 if not isinstance(hunk, phabhunk):
517 if not isinstance(hunk, phabhunk):
517 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
518 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
518 self.hunks.append(hunk)
519 self.hunks.append(hunk)
519 # It's useful to include these stats since the Phab web UI shows them,
520 # It's useful to include these stats since the Phab web UI shows them,
520 # and uses them to estimate how large a change a Revision is. Also used
521 # and uses them to estimate how large a change a Revision is. Also used
521 # in email subjects for the [+++--] bit.
522 # in email subjects for the [+++--] bit.
522 self.addLines += hunk.addLines
523 self.addLines += hunk.addLines
523 self.delLines += hunk.delLines
524 self.delLines += hunk.delLines
524
525
525
526
526 @attr.s
527 @attr.s
527 class phabdiff(object):
528 class phabdiff(object):
528 """Represents a Differential diff, owns Differential changes. Corresponds
529 """Represents a Differential diff, owns Differential changes. Corresponds
529 to a commit.
530 to a commit.
530 """
531 """
531
532
532 # Doesn't seem to be any reason to send this (output of uname -n)
533 # Doesn't seem to be any reason to send this (output of uname -n)
533 sourceMachine = attr.ib(default=b'') # camelcase-required
534 sourceMachine = attr.ib(default=b'') # camelcase-required
534 sourcePath = attr.ib(default=b'/') # camelcase-required
535 sourcePath = attr.ib(default=b'/') # camelcase-required
535 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
536 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
536 sourceControlPath = attr.ib(default=b'/') # camelcase-required
537 sourceControlPath = attr.ib(default=b'/') # camelcase-required
537 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
538 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
538 branch = attr.ib(default=b'default')
539 branch = attr.ib(default=b'default')
539 bookmark = attr.ib(default=None)
540 bookmark = attr.ib(default=None)
540 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
541 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
541 lintStatus = attr.ib(default=b'none') # camelcase-required
542 lintStatus = attr.ib(default=b'none') # camelcase-required
542 unitStatus = attr.ib(default=b'none') # camelcase-required
543 unitStatus = attr.ib(default=b'none') # camelcase-required
543 changes = attr.ib(default=attr.Factory(dict))
544 changes = attr.ib(default=attr.Factory(dict))
544 repositoryPHID = attr.ib(default=None) # camelcase-required
545 repositoryPHID = attr.ib(default=None) # camelcase-required
545
546
546 def addchange(self, change):
547 def addchange(self, change):
547 if not isinstance(change, phabchange):
548 if not isinstance(change, phabchange):
548 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
549 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
549 self.changes[change.currentPath] = change
550 self.changes[change.currentPath] = change
550
551
551
552
552 def maketext(pchange, ctx, fname):
553 def maketext(pchange, ctx, fname):
553 """populate the phabchange for a text file"""
554 """populate the phabchange for a text file"""
554 repo = ctx.repo()
555 repo = ctx.repo()
555 fmatcher = match.exact([fname])
556 fmatcher = match.exact([fname])
556 diffopts = mdiff.diffopts(git=True, context=32767)
557 diffopts = mdiff.diffopts(git=True, context=32767)
557 _pfctx, _fctx, header, fhunks = next(
558 _pfctx, _fctx, header, fhunks = next(
558 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
559 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
559 )
560 )
560
561
561 for fhunk in fhunks:
562 for fhunk in fhunks:
562 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
563 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
563 corpus = b''.join(lines[1:])
564 corpus = b''.join(lines[1:])
564 shunk = list(header)
565 shunk = list(header)
565 shunk.extend(lines)
566 shunk.extend(lines)
566 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
567 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
567 patch.diffstatdata(util.iterlines(shunk))
568 patch.diffstatdata(util.iterlines(shunk))
568 )
569 )
569 pchange.addhunk(
570 pchange.addhunk(
570 phabhunk(
571 phabhunk(
571 oldOffset,
572 oldOffset,
572 oldLength,
573 oldLength,
573 newOffset,
574 newOffset,
574 newLength,
575 newLength,
575 corpus,
576 corpus,
576 addLines,
577 addLines,
577 delLines,
578 delLines,
578 )
579 )
579 )
580 )
580
581
581
582
583 def uploadchunks(fctx, fphid):
584 """upload large binary files as separate chunks.
585 Phab requests chunking over 8MiB, and splits into 4MiB chunks
586 """
587 ui = fctx.repo().ui
588 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
589 progress = ui.makeprogress(
590 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
591 )
592 for chunk in chunks:
593 progress.increment()
594 if chunk[b'complete']:
595 continue
596 bstart = int(chunk[b'byteStart'])
597 bend = int(chunk[b'byteEnd'])
598 callconduit(
599 ui,
600 b'file.uploadchunk',
601 {
602 b'filePHID': fphid,
603 b'byteStart': bstart,
604 b'data': base64.b64encode(fctx.data()[bstart:bend]),
605 b'dataEncoding': b'base64',
606 },
607 )
608 progress.complete()
609
610
582 def creatediff(ctx):
611 def creatediff(ctx):
583 """create a Differential Diff"""
612 """create a Differential Diff"""
584 repo = ctx.repo()
613 repo = ctx.repo()
585 repophid = getrepophid(repo)
614 repophid = getrepophid(repo)
586 # Create a "Differential Diff" via "differential.createrawdiff" API
615 # Create a "Differential Diff" via "differential.createrawdiff" API
587 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
616 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
588 if repophid:
617 if repophid:
589 params[b'repositoryPHID'] = repophid
618 params[b'repositoryPHID'] = repophid
590 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
619 diff = callconduit(repo.ui, b'differential.createrawdiff', params)
591 if not diff:
620 if not diff:
592 raise error.Abort(_(b'cannot create diff for %s') % ctx)
621 raise error.Abort(_(b'cannot create diff for %s') % ctx)
593 return diff
622 return diff
594
623
595
624
596 def writediffproperties(ctx, diff):
625 def writediffproperties(ctx, diff):
597 """write metadata to diff so patches could be applied losslessly"""
626 """write metadata to diff so patches could be applied losslessly"""
598 params = {
627 params = {
599 b'diff_id': diff[b'id'],
628 b'diff_id': diff[b'id'],
600 b'name': b'hg:meta',
629 b'name': b'hg:meta',
601 b'data': templatefilters.json(
630 b'data': templatefilters.json(
602 {
631 {
603 b'user': ctx.user(),
632 b'user': ctx.user(),
604 b'date': b'%d %d' % ctx.date(),
633 b'date': b'%d %d' % ctx.date(),
605 b'branch': ctx.branch(),
634 b'branch': ctx.branch(),
606 b'node': ctx.hex(),
635 b'node': ctx.hex(),
607 b'parent': ctx.p1().hex(),
636 b'parent': ctx.p1().hex(),
608 }
637 }
609 ),
638 ),
610 }
639 }
611 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
640 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
612
641
613 params = {
642 params = {
614 b'diff_id': diff[b'id'],
643 b'diff_id': diff[b'id'],
615 b'name': b'local:commits',
644 b'name': b'local:commits',
616 b'data': templatefilters.json(
645 b'data': templatefilters.json(
617 {
646 {
618 ctx.hex(): {
647 ctx.hex(): {
619 b'author': stringutil.person(ctx.user()),
648 b'author': stringutil.person(ctx.user()),
620 b'authorEmail': stringutil.email(ctx.user()),
649 b'authorEmail': stringutil.email(ctx.user()),
621 b'time': int(ctx.date()[0]),
650 b'time': int(ctx.date()[0]),
622 b'commit': ctx.hex(),
651 b'commit': ctx.hex(),
623 b'parents': [ctx.p1().hex()],
652 b'parents': [ctx.p1().hex()],
624 b'branch': ctx.branch(),
653 b'branch': ctx.branch(),
625 },
654 },
626 }
655 }
627 ),
656 ),
628 }
657 }
629 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
658 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
630
659
631
660
632 def createdifferentialrevision(
661 def createdifferentialrevision(
633 ctx,
662 ctx,
634 revid=None,
663 revid=None,
635 parentrevphid=None,
664 parentrevphid=None,
636 oldnode=None,
665 oldnode=None,
637 olddiff=None,
666 olddiff=None,
638 actions=None,
667 actions=None,
639 comment=None,
668 comment=None,
640 ):
669 ):
641 """create or update a Differential Revision
670 """create or update a Differential Revision
642
671
643 If revid is None, create a new Differential Revision, otherwise update
672 If revid is None, create a new Differential Revision, otherwise update
644 revid. If parentrevphid is not None, set it as a dependency.
673 revid. If parentrevphid is not None, set it as a dependency.
645
674
646 If oldnode is not None, check if the patch content (without commit message
675 If oldnode is not None, check if the patch content (without commit message
647 and metadata) has changed before creating another diff.
676 and metadata) has changed before creating another diff.
648
677
649 If actions is not None, they will be appended to the transaction.
678 If actions is not None, they will be appended to the transaction.
650 """
679 """
651 repo = ctx.repo()
680 repo = ctx.repo()
652 if oldnode:
681 if oldnode:
653 diffopts = mdiff.diffopts(git=True, context=32767)
682 diffopts = mdiff.diffopts(git=True, context=32767)
654 oldctx = repo.unfiltered()[oldnode]
683 oldctx = repo.unfiltered()[oldnode]
655 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
684 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
656 else:
685 else:
657 neednewdiff = True
686 neednewdiff = True
658
687
659 transactions = []
688 transactions = []
660 if neednewdiff:
689 if neednewdiff:
661 diff = creatediff(ctx)
690 diff = creatediff(ctx)
662 transactions.append({b'type': b'update', b'value': diff[b'phid']})
691 transactions.append({b'type': b'update', b'value': diff[b'phid']})
663 if comment:
692 if comment:
664 transactions.append({b'type': b'comment', b'value': comment})
693 transactions.append({b'type': b'comment', b'value': comment})
665 else:
694 else:
666 # Even if we don't need to upload a new diff because the patch content
695 # Even if we don't need to upload a new diff because the patch content
667 # does not change. We might still need to update its metadata so
696 # does not change. We might still need to update its metadata so
668 # pushers could know the correct node metadata.
697 # pushers could know the correct node metadata.
669 assert olddiff
698 assert olddiff
670 diff = olddiff
699 diff = olddiff
671 writediffproperties(ctx, diff)
700 writediffproperties(ctx, diff)
672
701
673 # Set the parent Revision every time, so commit re-ordering is picked-up
702 # Set the parent Revision every time, so commit re-ordering is picked-up
674 if parentrevphid:
703 if parentrevphid:
675 transactions.append(
704 transactions.append(
676 {b'type': b'parents.set', b'value': [parentrevphid]}
705 {b'type': b'parents.set', b'value': [parentrevphid]}
677 )
706 )
678
707
679 if actions:
708 if actions:
680 transactions += actions
709 transactions += actions
681
710
682 # Parse commit message and update related fields.
711 # Parse commit message and update related fields.
683 desc = ctx.description()
712 desc = ctx.description()
684 info = callconduit(
713 info = callconduit(
685 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
714 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
686 )
715 )
687 for k, v in info[b'fields'].items():
716 for k, v in info[b'fields'].items():
688 if k in [b'title', b'summary', b'testPlan']:
717 if k in [b'title', b'summary', b'testPlan']:
689 transactions.append({b'type': k, b'value': v})
718 transactions.append({b'type': k, b'value': v})
690
719
691 params = {b'transactions': transactions}
720 params = {b'transactions': transactions}
692 if revid is not None:
721 if revid is not None:
693 # Update an existing Differential Revision
722 # Update an existing Differential Revision
694 params[b'objectIdentifier'] = revid
723 params[b'objectIdentifier'] = revid
695
724
696 revision = callconduit(repo.ui, b'differential.revision.edit', params)
725 revision = callconduit(repo.ui, b'differential.revision.edit', params)
697 if not revision:
726 if not revision:
698 raise error.Abort(_(b'cannot create revision for %s') % ctx)
727 raise error.Abort(_(b'cannot create revision for %s') % ctx)
699
728
700 return revision, diff
729 return revision, diff
701
730
702
731
703 def userphids(repo, names):
732 def userphids(repo, names):
704 """convert user names to PHIDs"""
733 """convert user names to PHIDs"""
705 names = [name.lower() for name in names]
734 names = [name.lower() for name in names]
706 query = {b'constraints': {b'usernames': names}}
735 query = {b'constraints': {b'usernames': names}}
707 result = callconduit(repo.ui, b'user.search', query)
736 result = callconduit(repo.ui, b'user.search', query)
708 # username not found is not an error of the API. So check if we have missed
737 # username not found is not an error of the API. So check if we have missed
709 # some names here.
738 # some names here.
710 data = result[b'data']
739 data = result[b'data']
711 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
740 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
712 unresolved = set(names) - resolved
741 unresolved = set(names) - resolved
713 if unresolved:
742 if unresolved:
714 raise error.Abort(
743 raise error.Abort(
715 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
744 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
716 )
745 )
717 return [entry[b'phid'] for entry in data]
746 return [entry[b'phid'] for entry in data]
718
747
719
748
720 @vcrcommand(
749 @vcrcommand(
721 b'phabsend',
750 b'phabsend',
722 [
751 [
723 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
752 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
724 (b'', b'amend', True, _(b'update commit messages')),
753 (b'', b'amend', True, _(b'update commit messages')),
725 (b'', b'reviewer', [], _(b'specify reviewers')),
754 (b'', b'reviewer', [], _(b'specify reviewers')),
726 (b'', b'blocker', [], _(b'specify blocking reviewers')),
755 (b'', b'blocker', [], _(b'specify blocking reviewers')),
727 (
756 (
728 b'm',
757 b'm',
729 b'comment',
758 b'comment',
730 b'',
759 b'',
731 _(b'add a comment to Revisions with new/updated Diffs'),
760 _(b'add a comment to Revisions with new/updated Diffs'),
732 ),
761 ),
733 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
762 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
734 ],
763 ],
735 _(b'REV [OPTIONS]'),
764 _(b'REV [OPTIONS]'),
736 helpcategory=command.CATEGORY_IMPORT_EXPORT,
765 helpcategory=command.CATEGORY_IMPORT_EXPORT,
737 )
766 )
738 def phabsend(ui, repo, *revs, **opts):
767 def phabsend(ui, repo, *revs, **opts):
739 """upload changesets to Phabricator
768 """upload changesets to Phabricator
740
769
741 If there are multiple revisions specified, they will be send as a stack
770 If there are multiple revisions specified, they will be send as a stack
742 with a linear dependencies relationship using the order specified by the
771 with a linear dependencies relationship using the order specified by the
743 revset.
772 revset.
744
773
745 For the first time uploading changesets, local tags will be created to
774 For the first time uploading changesets, local tags will be created to
746 maintain the association. After the first time, phabsend will check
775 maintain the association. After the first time, phabsend will check
747 obsstore and tags information so it can figure out whether to update an
776 obsstore and tags information so it can figure out whether to update an
748 existing Differential Revision, or create a new one.
777 existing Differential Revision, or create a new one.
749
778
750 If --amend is set, update commit messages so they have the
779 If --amend is set, update commit messages so they have the
751 ``Differential Revision`` URL, remove related tags. This is similar to what
780 ``Differential Revision`` URL, remove related tags. This is similar to what
752 arcanist will do, and is more desired in author-push workflows. Otherwise,
781 arcanist will do, and is more desired in author-push workflows. Otherwise,
753 use local tags to record the ``Differential Revision`` association.
782 use local tags to record the ``Differential Revision`` association.
754
783
755 The --confirm option lets you confirm changesets before sending them. You
784 The --confirm option lets you confirm changesets before sending them. You
756 can also add following to your configuration file to make it default
785 can also add following to your configuration file to make it default
757 behaviour::
786 behaviour::
758
787
759 [phabsend]
788 [phabsend]
760 confirm = true
789 confirm = true
761
790
762 phabsend will check obsstore and the above association to decide whether to
791 phabsend will check obsstore and the above association to decide whether to
763 update an existing Differential Revision, or create a new one.
792 update an existing Differential Revision, or create a new one.
764 """
793 """
765 opts = pycompat.byteskwargs(opts)
794 opts = pycompat.byteskwargs(opts)
766 revs = list(revs) + opts.get(b'rev', [])
795 revs = list(revs) + opts.get(b'rev', [])
767 revs = scmutil.revrange(repo, revs)
796 revs = scmutil.revrange(repo, revs)
768
797
769 if not revs:
798 if not revs:
770 raise error.Abort(_(b'phabsend requires at least one changeset'))
799 raise error.Abort(_(b'phabsend requires at least one changeset'))
771 if opts.get(b'amend'):
800 if opts.get(b'amend'):
772 cmdutil.checkunfinished(repo)
801 cmdutil.checkunfinished(repo)
773
802
774 # {newnode: (oldnode, olddiff, olddrev}
803 # {newnode: (oldnode, olddiff, olddrev}
775 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
804 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
776
805
777 confirm = ui.configbool(b'phabsend', b'confirm')
806 confirm = ui.configbool(b'phabsend', b'confirm')
778 confirm |= bool(opts.get(b'confirm'))
807 confirm |= bool(opts.get(b'confirm'))
779 if confirm:
808 if confirm:
780 confirmed = _confirmbeforesend(repo, revs, oldmap)
809 confirmed = _confirmbeforesend(repo, revs, oldmap)
781 if not confirmed:
810 if not confirmed:
782 raise error.Abort(_(b'phabsend cancelled'))
811 raise error.Abort(_(b'phabsend cancelled'))
783
812
784 actions = []
813 actions = []
785 reviewers = opts.get(b'reviewer', [])
814 reviewers = opts.get(b'reviewer', [])
786 blockers = opts.get(b'blocker', [])
815 blockers = opts.get(b'blocker', [])
787 phids = []
816 phids = []
788 if reviewers:
817 if reviewers:
789 phids.extend(userphids(repo, reviewers))
818 phids.extend(userphids(repo, reviewers))
790 if blockers:
819 if blockers:
791 phids.extend(
820 phids.extend(
792 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
821 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
793 )
822 )
794 if phids:
823 if phids:
795 actions.append({b'type': b'reviewers.add', b'value': phids})
824 actions.append({b'type': b'reviewers.add', b'value': phids})
796
825
797 drevids = [] # [int]
826 drevids = [] # [int]
798 diffmap = {} # {newnode: diff}
827 diffmap = {} # {newnode: diff}
799
828
800 # Send patches one by one so we know their Differential Revision PHIDs and
829 # Send patches one by one so we know their Differential Revision PHIDs and
801 # can provide dependency relationship
830 # can provide dependency relationship
802 lastrevphid = None
831 lastrevphid = None
803 for rev in revs:
832 for rev in revs:
804 ui.debug(b'sending rev %d\n' % rev)
833 ui.debug(b'sending rev %d\n' % rev)
805 ctx = repo[rev]
834 ctx = repo[rev]
806
835
807 # Get Differential Revision ID
836 # Get Differential Revision ID
808 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
837 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
809 if oldnode != ctx.node() or opts.get(b'amend'):
838 if oldnode != ctx.node() or opts.get(b'amend'):
810 # Create or update Differential Revision
839 # Create or update Differential Revision
811 revision, diff = createdifferentialrevision(
840 revision, diff = createdifferentialrevision(
812 ctx,
841 ctx,
813 revid,
842 revid,
814 lastrevphid,
843 lastrevphid,
815 oldnode,
844 oldnode,
816 olddiff,
845 olddiff,
817 actions,
846 actions,
818 opts.get(b'comment'),
847 opts.get(b'comment'),
819 )
848 )
820 diffmap[ctx.node()] = diff
849 diffmap[ctx.node()] = diff
821 newrevid = int(revision[b'object'][b'id'])
850 newrevid = int(revision[b'object'][b'id'])
822 newrevphid = revision[b'object'][b'phid']
851 newrevphid = revision[b'object'][b'phid']
823 if revid:
852 if revid:
824 action = b'updated'
853 action = b'updated'
825 else:
854 else:
826 action = b'created'
855 action = b'created'
827
856
828 # Create a local tag to note the association, if commit message
857 # Create a local tag to note the association, if commit message
829 # does not have it already
858 # does not have it already
830 m = _differentialrevisiondescre.search(ctx.description())
859 m = _differentialrevisiondescre.search(ctx.description())
831 if not m or int(m.group(r'id')) != newrevid:
860 if not m or int(m.group(r'id')) != newrevid:
832 tagname = b'D%d' % newrevid
861 tagname = b'D%d' % newrevid
833 tags.tag(
862 tags.tag(
834 repo,
863 repo,
835 tagname,
864 tagname,
836 ctx.node(),
865 ctx.node(),
837 message=None,
866 message=None,
838 user=None,
867 user=None,
839 date=None,
868 date=None,
840 local=True,
869 local=True,
841 )
870 )
842 else:
871 else:
843 # Nothing changed. But still set "newrevphid" so the next revision
872 # Nothing changed. But still set "newrevphid" so the next revision
844 # could depend on this one and "newrevid" for the summary line.
873 # could depend on this one and "newrevid" for the summary line.
845 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
874 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
846 newrevid = revid
875 newrevid = revid
847 action = b'skipped'
876 action = b'skipped'
848
877
849 actiondesc = ui.label(
878 actiondesc = ui.label(
850 {
879 {
851 b'created': _(b'created'),
880 b'created': _(b'created'),
852 b'skipped': _(b'skipped'),
881 b'skipped': _(b'skipped'),
853 b'updated': _(b'updated'),
882 b'updated': _(b'updated'),
854 }[action],
883 }[action],
855 b'phabricator.action.%s' % action,
884 b'phabricator.action.%s' % action,
856 )
885 )
857 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
886 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
858 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
887 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
859 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
888 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
860 ui.write(
889 ui.write(
861 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
890 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
862 )
891 )
863 drevids.append(newrevid)
892 drevids.append(newrevid)
864 lastrevphid = newrevphid
893 lastrevphid = newrevphid
865
894
866 # Update commit messages and remove tags
895 # Update commit messages and remove tags
867 if opts.get(b'amend'):
896 if opts.get(b'amend'):
868 unfi = repo.unfiltered()
897 unfi = repo.unfiltered()
869 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
898 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
870 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
899 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
871 wnode = unfi[b'.'].node()
900 wnode = unfi[b'.'].node()
872 mapping = {} # {oldnode: [newnode]}
901 mapping = {} # {oldnode: [newnode]}
873 for i, rev in enumerate(revs):
902 for i, rev in enumerate(revs):
874 old = unfi[rev]
903 old = unfi[rev]
875 drevid = drevids[i]
904 drevid = drevids[i]
876 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
905 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
877 newdesc = getdescfromdrev(drev)
906 newdesc = getdescfromdrev(drev)
878 # Make sure commit message contain "Differential Revision"
907 # Make sure commit message contain "Differential Revision"
879 if old.description() != newdesc:
908 if old.description() != newdesc:
880 if old.phase() == phases.public:
909 if old.phase() == phases.public:
881 ui.warn(
910 ui.warn(
882 _(b"warning: not updating public commit %s\n")
911 _(b"warning: not updating public commit %s\n")
883 % scmutil.formatchangeid(old)
912 % scmutil.formatchangeid(old)
884 )
913 )
885 continue
914 continue
886 parents = [
915 parents = [
887 mapping.get(old.p1().node(), (old.p1(),))[0],
916 mapping.get(old.p1().node(), (old.p1(),))[0],
888 mapping.get(old.p2().node(), (old.p2(),))[0],
917 mapping.get(old.p2().node(), (old.p2(),))[0],
889 ]
918 ]
890 new = context.metadataonlyctx(
919 new = context.metadataonlyctx(
891 repo,
920 repo,
892 old,
921 old,
893 parents=parents,
922 parents=parents,
894 text=newdesc,
923 text=newdesc,
895 user=old.user(),
924 user=old.user(),
896 date=old.date(),
925 date=old.date(),
897 extra=old.extra(),
926 extra=old.extra(),
898 )
927 )
899
928
900 newnode = new.commit()
929 newnode = new.commit()
901
930
902 mapping[old.node()] = [newnode]
931 mapping[old.node()] = [newnode]
903 # Update diff property
932 # Update diff property
904 # If it fails just warn and keep going, otherwise the DREV
933 # If it fails just warn and keep going, otherwise the DREV
905 # associations will be lost
934 # associations will be lost
906 try:
935 try:
907 writediffproperties(unfi[newnode], diffmap[old.node()])
936 writediffproperties(unfi[newnode], diffmap[old.node()])
908 except util.urlerr.urlerror:
937 except util.urlerr.urlerror:
909 ui.warnnoi18n(
938 ui.warnnoi18n(
910 b'Failed to update metadata for D%s\n' % drevid
939 b'Failed to update metadata for D%s\n' % drevid
911 )
940 )
912 # Remove local tags since it's no longer necessary
941 # Remove local tags since it's no longer necessary
913 tagname = b'D%d' % drevid
942 tagname = b'D%d' % drevid
914 if tagname in repo.tags():
943 if tagname in repo.tags():
915 tags.tag(
944 tags.tag(
916 repo,
945 repo,
917 tagname,
946 tagname,
918 nullid,
947 nullid,
919 message=None,
948 message=None,
920 user=None,
949 user=None,
921 date=None,
950 date=None,
922 local=True,
951 local=True,
923 )
952 )
924 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
953 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
925 if wnode in mapping:
954 if wnode in mapping:
926 unfi.setparents(mapping[wnode][0])
955 unfi.setparents(mapping[wnode][0])
927
956
928
957
929 # Map from "hg:meta" keys to header understood by "hg import". The order is
958 # Map from "hg:meta" keys to header understood by "hg import". The order is
930 # consistent with "hg export" output.
959 # consistent with "hg export" output.
931 _metanamemap = util.sortdict(
960 _metanamemap = util.sortdict(
932 [
961 [
933 (b'user', b'User'),
962 (b'user', b'User'),
934 (b'date', b'Date'),
963 (b'date', b'Date'),
935 (b'branch', b'Branch'),
964 (b'branch', b'Branch'),
936 (b'node', b'Node ID'),
965 (b'node', b'Node ID'),
937 (b'parent', b'Parent '),
966 (b'parent', b'Parent '),
938 ]
967 ]
939 )
968 )
940
969
941
970
942 def _confirmbeforesend(repo, revs, oldmap):
971 def _confirmbeforesend(repo, revs, oldmap):
943 url, token = readurltoken(repo.ui)
972 url, token = readurltoken(repo.ui)
944 ui = repo.ui
973 ui = repo.ui
945 for rev in revs:
974 for rev in revs:
946 ctx = repo[rev]
975 ctx = repo[rev]
947 desc = ctx.description().splitlines()[0]
976 desc = ctx.description().splitlines()[0]
948 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
977 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
949 if drevid:
978 if drevid:
950 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
979 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
951 else:
980 else:
952 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
981 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
953
982
954 ui.write(
983 ui.write(
955 _(b'%s - %s: %s\n')
984 _(b'%s - %s: %s\n')
956 % (
985 % (
957 drevdesc,
986 drevdesc,
958 ui.label(bytes(ctx), b'phabricator.node'),
987 ui.label(bytes(ctx), b'phabricator.node'),
959 ui.label(desc, b'phabricator.desc'),
988 ui.label(desc, b'phabricator.desc'),
960 )
989 )
961 )
990 )
962
991
963 if ui.promptchoice(
992 if ui.promptchoice(
964 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
993 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
965 ):
994 ):
966 return False
995 return False
967
996
968 return True
997 return True
969
998
970
999
971 _knownstatusnames = {
1000 _knownstatusnames = {
972 b'accepted',
1001 b'accepted',
973 b'needsreview',
1002 b'needsreview',
974 b'needsrevision',
1003 b'needsrevision',
975 b'closed',
1004 b'closed',
976 b'abandoned',
1005 b'abandoned',
977 }
1006 }
978
1007
979
1008
980 def _getstatusname(drev):
1009 def _getstatusname(drev):
981 """get normalized status name from a Differential Revision"""
1010 """get normalized status name from a Differential Revision"""
982 return drev[b'statusName'].replace(b' ', b'').lower()
1011 return drev[b'statusName'].replace(b' ', b'').lower()
983
1012
984
1013
985 # Small language to specify differential revisions. Support symbols: (), :X,
1014 # Small language to specify differential revisions. Support symbols: (), :X,
986 # +, and -.
1015 # +, and -.
987
1016
988 _elements = {
1017 _elements = {
989 # token-type: binding-strength, primary, prefix, infix, suffix
1018 # token-type: binding-strength, primary, prefix, infix, suffix
990 b'(': (12, None, (b'group', 1, b')'), None, None),
1019 b'(': (12, None, (b'group', 1, b')'), None, None),
991 b':': (8, None, (b'ancestors', 8), None, None),
1020 b':': (8, None, (b'ancestors', 8), None, None),
992 b'&': (5, None, None, (b'and_', 5), None),
1021 b'&': (5, None, None, (b'and_', 5), None),
993 b'+': (4, None, None, (b'add', 4), None),
1022 b'+': (4, None, None, (b'add', 4), None),
994 b'-': (4, None, None, (b'sub', 4), None),
1023 b'-': (4, None, None, (b'sub', 4), None),
995 b')': (0, None, None, None, None),
1024 b')': (0, None, None, None, None),
996 b'symbol': (0, b'symbol', None, None, None),
1025 b'symbol': (0, b'symbol', None, None, None),
997 b'end': (0, None, None, None, None),
1026 b'end': (0, None, None, None, None),
998 }
1027 }
999
1028
1000
1029
1001 def _tokenize(text):
1030 def _tokenize(text):
1002 view = memoryview(text) # zero-copy slice
1031 view = memoryview(text) # zero-copy slice
1003 special = b'():+-& '
1032 special = b'():+-& '
1004 pos = 0
1033 pos = 0
1005 length = len(text)
1034 length = len(text)
1006 while pos < length:
1035 while pos < length:
1007 symbol = b''.join(
1036 symbol = b''.join(
1008 itertools.takewhile(
1037 itertools.takewhile(
1009 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1038 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1010 )
1039 )
1011 )
1040 )
1012 if symbol:
1041 if symbol:
1013 yield (b'symbol', symbol, pos)
1042 yield (b'symbol', symbol, pos)
1014 pos += len(symbol)
1043 pos += len(symbol)
1015 else: # special char, ignore space
1044 else: # special char, ignore space
1016 if text[pos] != b' ':
1045 if text[pos] != b' ':
1017 yield (text[pos], None, pos)
1046 yield (text[pos], None, pos)
1018 pos += 1
1047 pos += 1
1019 yield (b'end', None, pos)
1048 yield (b'end', None, pos)
1020
1049
1021
1050
1022 def _parse(text):
1051 def _parse(text):
1023 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1052 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1024 if pos != len(text):
1053 if pos != len(text):
1025 raise error.ParseError(b'invalid token', pos)
1054 raise error.ParseError(b'invalid token', pos)
1026 return tree
1055 return tree
1027
1056
1028
1057
1029 def _parsedrev(symbol):
1058 def _parsedrev(symbol):
1030 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1059 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1031 if symbol.startswith(b'D') and symbol[1:].isdigit():
1060 if symbol.startswith(b'D') and symbol[1:].isdigit():
1032 return int(symbol[1:])
1061 return int(symbol[1:])
1033 if symbol.isdigit():
1062 if symbol.isdigit():
1034 return int(symbol)
1063 return int(symbol)
1035
1064
1036
1065
1037 def _prefetchdrevs(tree):
1066 def _prefetchdrevs(tree):
1038 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1067 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1039 drevs = set()
1068 drevs = set()
1040 ancestordrevs = set()
1069 ancestordrevs = set()
1041 op = tree[0]
1070 op = tree[0]
1042 if op == b'symbol':
1071 if op == b'symbol':
1043 r = _parsedrev(tree[1])
1072 r = _parsedrev(tree[1])
1044 if r:
1073 if r:
1045 drevs.add(r)
1074 drevs.add(r)
1046 elif op == b'ancestors':
1075 elif op == b'ancestors':
1047 r, a = _prefetchdrevs(tree[1])
1076 r, a = _prefetchdrevs(tree[1])
1048 drevs.update(r)
1077 drevs.update(r)
1049 ancestordrevs.update(r)
1078 ancestordrevs.update(r)
1050 ancestordrevs.update(a)
1079 ancestordrevs.update(a)
1051 else:
1080 else:
1052 for t in tree[1:]:
1081 for t in tree[1:]:
1053 r, a = _prefetchdrevs(t)
1082 r, a = _prefetchdrevs(t)
1054 drevs.update(r)
1083 drevs.update(r)
1055 ancestordrevs.update(a)
1084 ancestordrevs.update(a)
1056 return drevs, ancestordrevs
1085 return drevs, ancestordrevs
1057
1086
1058
1087
1059 def querydrev(repo, spec):
1088 def querydrev(repo, spec):
1060 """return a list of "Differential Revision" dicts
1089 """return a list of "Differential Revision" dicts
1061
1090
1062 spec is a string using a simple query language, see docstring in phabread
1091 spec is a string using a simple query language, see docstring in phabread
1063 for details.
1092 for details.
1064
1093
1065 A "Differential Revision dict" looks like:
1094 A "Differential Revision dict" looks like:
1066
1095
1067 {
1096 {
1068 "id": "2",
1097 "id": "2",
1069 "phid": "PHID-DREV-672qvysjcczopag46qty",
1098 "phid": "PHID-DREV-672qvysjcczopag46qty",
1070 "title": "example",
1099 "title": "example",
1071 "uri": "https://phab.example.com/D2",
1100 "uri": "https://phab.example.com/D2",
1072 "dateCreated": "1499181406",
1101 "dateCreated": "1499181406",
1073 "dateModified": "1499182103",
1102 "dateModified": "1499182103",
1074 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1103 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1075 "status": "0",
1104 "status": "0",
1076 "statusName": "Needs Review",
1105 "statusName": "Needs Review",
1077 "properties": [],
1106 "properties": [],
1078 "branch": null,
1107 "branch": null,
1079 "summary": "",
1108 "summary": "",
1080 "testPlan": "",
1109 "testPlan": "",
1081 "lineCount": "2",
1110 "lineCount": "2",
1082 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1111 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1083 "diffs": [
1112 "diffs": [
1084 "3",
1113 "3",
1085 "4",
1114 "4",
1086 ],
1115 ],
1087 "commits": [],
1116 "commits": [],
1088 "reviewers": [],
1117 "reviewers": [],
1089 "ccs": [],
1118 "ccs": [],
1090 "hashes": [],
1119 "hashes": [],
1091 "auxiliary": {
1120 "auxiliary": {
1092 "phabricator:projects": [],
1121 "phabricator:projects": [],
1093 "phabricator:depends-on": [
1122 "phabricator:depends-on": [
1094 "PHID-DREV-gbapp366kutjebt7agcd"
1123 "PHID-DREV-gbapp366kutjebt7agcd"
1095 ]
1124 ]
1096 },
1125 },
1097 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1126 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1098 "sourcePath": null
1127 "sourcePath": null
1099 }
1128 }
1100 """
1129 """
1101
1130
1102 def fetch(params):
1131 def fetch(params):
1103 """params -> single drev or None"""
1132 """params -> single drev or None"""
1104 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1133 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1105 if key in prefetched:
1134 if key in prefetched:
1106 return prefetched[key]
1135 return prefetched[key]
1107 drevs = callconduit(repo.ui, b'differential.query', params)
1136 drevs = callconduit(repo.ui, b'differential.query', params)
1108 # Fill prefetched with the result
1137 # Fill prefetched with the result
1109 for drev in drevs:
1138 for drev in drevs:
1110 prefetched[drev[b'phid']] = drev
1139 prefetched[drev[b'phid']] = drev
1111 prefetched[int(drev[b'id'])] = drev
1140 prefetched[int(drev[b'id'])] = drev
1112 if key not in prefetched:
1141 if key not in prefetched:
1113 raise error.Abort(
1142 raise error.Abort(
1114 _(b'cannot get Differential Revision %r') % params
1143 _(b'cannot get Differential Revision %r') % params
1115 )
1144 )
1116 return prefetched[key]
1145 return prefetched[key]
1117
1146
1118 def getstack(topdrevids):
1147 def getstack(topdrevids):
1119 """given a top, get a stack from the bottom, [id] -> [id]"""
1148 """given a top, get a stack from the bottom, [id] -> [id]"""
1120 visited = set()
1149 visited = set()
1121 result = []
1150 result = []
1122 queue = [{b'ids': [i]} for i in topdrevids]
1151 queue = [{b'ids': [i]} for i in topdrevids]
1123 while queue:
1152 while queue:
1124 params = queue.pop()
1153 params = queue.pop()
1125 drev = fetch(params)
1154 drev = fetch(params)
1126 if drev[b'id'] in visited:
1155 if drev[b'id'] in visited:
1127 continue
1156 continue
1128 visited.add(drev[b'id'])
1157 visited.add(drev[b'id'])
1129 result.append(int(drev[b'id']))
1158 result.append(int(drev[b'id']))
1130 auxiliary = drev.get(b'auxiliary', {})
1159 auxiliary = drev.get(b'auxiliary', {})
1131 depends = auxiliary.get(b'phabricator:depends-on', [])
1160 depends = auxiliary.get(b'phabricator:depends-on', [])
1132 for phid in depends:
1161 for phid in depends:
1133 queue.append({b'phids': [phid]})
1162 queue.append({b'phids': [phid]})
1134 result.reverse()
1163 result.reverse()
1135 return smartset.baseset(result)
1164 return smartset.baseset(result)
1136
1165
1137 # Initialize prefetch cache
1166 # Initialize prefetch cache
1138 prefetched = {} # {id or phid: drev}
1167 prefetched = {} # {id or phid: drev}
1139
1168
1140 tree = _parse(spec)
1169 tree = _parse(spec)
1141 drevs, ancestordrevs = _prefetchdrevs(tree)
1170 drevs, ancestordrevs = _prefetchdrevs(tree)
1142
1171
1143 # developer config: phabricator.batchsize
1172 # developer config: phabricator.batchsize
1144 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1173 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1145
1174
1146 # Prefetch Differential Revisions in batch
1175 # Prefetch Differential Revisions in batch
1147 tofetch = set(drevs)
1176 tofetch = set(drevs)
1148 for r in ancestordrevs:
1177 for r in ancestordrevs:
1149 tofetch.update(range(max(1, r - batchsize), r + 1))
1178 tofetch.update(range(max(1, r - batchsize), r + 1))
1150 if drevs:
1179 if drevs:
1151 fetch({b'ids': list(tofetch)})
1180 fetch({b'ids': list(tofetch)})
1152 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1181 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1153
1182
1154 # Walk through the tree, return smartsets
1183 # Walk through the tree, return smartsets
1155 def walk(tree):
1184 def walk(tree):
1156 op = tree[0]
1185 op = tree[0]
1157 if op == b'symbol':
1186 if op == b'symbol':
1158 drev = _parsedrev(tree[1])
1187 drev = _parsedrev(tree[1])
1159 if drev:
1188 if drev:
1160 return smartset.baseset([drev])
1189 return smartset.baseset([drev])
1161 elif tree[1] in _knownstatusnames:
1190 elif tree[1] in _knownstatusnames:
1162 drevs = [
1191 drevs = [
1163 r
1192 r
1164 for r in validids
1193 for r in validids
1165 if _getstatusname(prefetched[r]) == tree[1]
1194 if _getstatusname(prefetched[r]) == tree[1]
1166 ]
1195 ]
1167 return smartset.baseset(drevs)
1196 return smartset.baseset(drevs)
1168 else:
1197 else:
1169 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1198 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1170 elif op in {b'and_', b'add', b'sub'}:
1199 elif op in {b'and_', b'add', b'sub'}:
1171 assert len(tree) == 3
1200 assert len(tree) == 3
1172 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1201 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1173 elif op == b'group':
1202 elif op == b'group':
1174 return walk(tree[1])
1203 return walk(tree[1])
1175 elif op == b'ancestors':
1204 elif op == b'ancestors':
1176 return getstack(walk(tree[1]))
1205 return getstack(walk(tree[1]))
1177 else:
1206 else:
1178 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1207 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1179
1208
1180 return [prefetched[r] for r in walk(tree)]
1209 return [prefetched[r] for r in walk(tree)]
1181
1210
1182
1211
1183 def getdescfromdrev(drev):
1212 def getdescfromdrev(drev):
1184 """get description (commit message) from "Differential Revision"
1213 """get description (commit message) from "Differential Revision"
1185
1214
1186 This is similar to differential.getcommitmessage API. But we only care
1215 This is similar to differential.getcommitmessage API. But we only care
1187 about limited fields: title, summary, test plan, and URL.
1216 about limited fields: title, summary, test plan, and URL.
1188 """
1217 """
1189 title = drev[b'title']
1218 title = drev[b'title']
1190 summary = drev[b'summary'].rstrip()
1219 summary = drev[b'summary'].rstrip()
1191 testplan = drev[b'testPlan'].rstrip()
1220 testplan = drev[b'testPlan'].rstrip()
1192 if testplan:
1221 if testplan:
1193 testplan = b'Test Plan:\n%s' % testplan
1222 testplan = b'Test Plan:\n%s' % testplan
1194 uri = b'Differential Revision: %s' % drev[b'uri']
1223 uri = b'Differential Revision: %s' % drev[b'uri']
1195 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1224 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1196
1225
1197
1226
1198 def getdiffmeta(diff):
1227 def getdiffmeta(diff):
1199 """get commit metadata (date, node, user, p1) from a diff object
1228 """get commit metadata (date, node, user, p1) from a diff object
1200
1229
1201 The metadata could be "hg:meta", sent by phabsend, like:
1230 The metadata could be "hg:meta", sent by phabsend, like:
1202
1231
1203 "properties": {
1232 "properties": {
1204 "hg:meta": {
1233 "hg:meta": {
1205 "date": "1499571514 25200",
1234 "date": "1499571514 25200",
1206 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1235 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1207 "user": "Foo Bar <foo@example.com>",
1236 "user": "Foo Bar <foo@example.com>",
1208 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1237 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1209 }
1238 }
1210 }
1239 }
1211
1240
1212 Or converted from "local:commits", sent by "arc", like:
1241 Or converted from "local:commits", sent by "arc", like:
1213
1242
1214 "properties": {
1243 "properties": {
1215 "local:commits": {
1244 "local:commits": {
1216 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1245 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1217 "author": "Foo Bar",
1246 "author": "Foo Bar",
1218 "time": 1499546314,
1247 "time": 1499546314,
1219 "branch": "default",
1248 "branch": "default",
1220 "tag": "",
1249 "tag": "",
1221 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1250 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1222 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1251 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1223 "local": "1000",
1252 "local": "1000",
1224 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1253 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1225 "summary": "...",
1254 "summary": "...",
1226 "message": "...",
1255 "message": "...",
1227 "authorEmail": "foo@example.com"
1256 "authorEmail": "foo@example.com"
1228 }
1257 }
1229 }
1258 }
1230 }
1259 }
1231
1260
1232 Note: metadata extracted from "local:commits" will lose time zone
1261 Note: metadata extracted from "local:commits" will lose time zone
1233 information.
1262 information.
1234 """
1263 """
1235 props = diff.get(b'properties') or {}
1264 props = diff.get(b'properties') or {}
1236 meta = props.get(b'hg:meta')
1265 meta = props.get(b'hg:meta')
1237 if not meta:
1266 if not meta:
1238 if props.get(b'local:commits'):
1267 if props.get(b'local:commits'):
1239 commit = sorted(props[b'local:commits'].values())[0]
1268 commit = sorted(props[b'local:commits'].values())[0]
1240 meta = {}
1269 meta = {}
1241 if b'author' in commit and b'authorEmail' in commit:
1270 if b'author' in commit and b'authorEmail' in commit:
1242 meta[b'user'] = b'%s <%s>' % (
1271 meta[b'user'] = b'%s <%s>' % (
1243 commit[b'author'],
1272 commit[b'author'],
1244 commit[b'authorEmail'],
1273 commit[b'authorEmail'],
1245 )
1274 )
1246 if b'time' in commit:
1275 if b'time' in commit:
1247 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1276 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1248 if b'branch' in commit:
1277 if b'branch' in commit:
1249 meta[b'branch'] = commit[b'branch']
1278 meta[b'branch'] = commit[b'branch']
1250 node = commit.get(b'commit', commit.get(b'rev'))
1279 node = commit.get(b'commit', commit.get(b'rev'))
1251 if node:
1280 if node:
1252 meta[b'node'] = node
1281 meta[b'node'] = node
1253 if len(commit.get(b'parents', ())) >= 1:
1282 if len(commit.get(b'parents', ())) >= 1:
1254 meta[b'parent'] = commit[b'parents'][0]
1283 meta[b'parent'] = commit[b'parents'][0]
1255 else:
1284 else:
1256 meta = {}
1285 meta = {}
1257 if b'date' not in meta and b'dateCreated' in diff:
1286 if b'date' not in meta and b'dateCreated' in diff:
1258 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1287 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1259 if b'branch' not in meta and diff.get(b'branch'):
1288 if b'branch' not in meta and diff.get(b'branch'):
1260 meta[b'branch'] = diff[b'branch']
1289 meta[b'branch'] = diff[b'branch']
1261 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1290 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1262 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1291 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1263 return meta
1292 return meta
1264
1293
1265
1294
1266 def readpatch(repo, drevs, write):
1295 def readpatch(repo, drevs, write):
1267 """generate plain-text patch readable by 'hg import'
1296 """generate plain-text patch readable by 'hg import'
1268
1297
1269 write is usually ui.write. drevs is what "querydrev" returns, results of
1298 write is usually ui.write. drevs is what "querydrev" returns, results of
1270 "differential.query".
1299 "differential.query".
1271 """
1300 """
1272 # Prefetch hg:meta property for all diffs
1301 # Prefetch hg:meta property for all diffs
1273 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1302 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1274 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1303 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1275
1304
1276 # Generate patch for each drev
1305 # Generate patch for each drev
1277 for drev in drevs:
1306 for drev in drevs:
1278 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1307 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1279
1308
1280 diffid = max(int(v) for v in drev[b'diffs'])
1309 diffid = max(int(v) for v in drev[b'diffs'])
1281 body = callconduit(
1310 body = callconduit(
1282 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1311 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1283 )
1312 )
1284 desc = getdescfromdrev(drev)
1313 desc = getdescfromdrev(drev)
1285 header = b'# HG changeset patch\n'
1314 header = b'# HG changeset patch\n'
1286
1315
1287 # Try to preserve metadata from hg:meta property. Write hg patch
1316 # Try to preserve metadata from hg:meta property. Write hg patch
1288 # headers that can be read by the "import" command. See patchheadermap
1317 # headers that can be read by the "import" command. See patchheadermap
1289 # and extract in mercurial/patch.py for supported headers.
1318 # and extract in mercurial/patch.py for supported headers.
1290 meta = getdiffmeta(diffs[b'%d' % diffid])
1319 meta = getdiffmeta(diffs[b'%d' % diffid])
1291 for k in _metanamemap.keys():
1320 for k in _metanamemap.keys():
1292 if k in meta:
1321 if k in meta:
1293 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1322 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1294
1323
1295 content = b'%s%s\n%s' % (header, desc, body)
1324 content = b'%s%s\n%s' % (header, desc, body)
1296 write(content)
1325 write(content)
1297
1326
1298
1327
1299 @vcrcommand(
1328 @vcrcommand(
1300 b'phabread',
1329 b'phabread',
1301 [(b'', b'stack', False, _(b'read dependencies'))],
1330 [(b'', b'stack', False, _(b'read dependencies'))],
1302 _(b'DREVSPEC [OPTIONS]'),
1331 _(b'DREVSPEC [OPTIONS]'),
1303 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1332 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1304 )
1333 )
1305 def phabread(ui, repo, spec, **opts):
1334 def phabread(ui, repo, spec, **opts):
1306 """print patches from Phabricator suitable for importing
1335 """print patches from Phabricator suitable for importing
1307
1336
1308 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1337 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1309 the number ``123``. It could also have common operators like ``+``, ``-``,
1338 the number ``123``. It could also have common operators like ``+``, ``-``,
1310 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1339 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1311 select a stack.
1340 select a stack.
1312
1341
1313 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1342 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1314 could be used to filter patches by status. For performance reason, they
1343 could be used to filter patches by status. For performance reason, they
1315 only represent a subset of non-status selections and cannot be used alone.
1344 only represent a subset of non-status selections and cannot be used alone.
1316
1345
1317 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1346 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1318 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1347 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1319 stack up to D9.
1348 stack up to D9.
1320
1349
1321 If --stack is given, follow dependencies information and read all patches.
1350 If --stack is given, follow dependencies information and read all patches.
1322 It is equivalent to the ``:`` operator.
1351 It is equivalent to the ``:`` operator.
1323 """
1352 """
1324 opts = pycompat.byteskwargs(opts)
1353 opts = pycompat.byteskwargs(opts)
1325 if opts.get(b'stack'):
1354 if opts.get(b'stack'):
1326 spec = b':(%s)' % spec
1355 spec = b':(%s)' % spec
1327 drevs = querydrev(repo, spec)
1356 drevs = querydrev(repo, spec)
1328 readpatch(repo, drevs, ui.write)
1357 readpatch(repo, drevs, ui.write)
1329
1358
1330
1359
1331 @vcrcommand(
1360 @vcrcommand(
1332 b'phabupdate',
1361 b'phabupdate',
1333 [
1362 [
1334 (b'', b'accept', False, _(b'accept revisions')),
1363 (b'', b'accept', False, _(b'accept revisions')),
1335 (b'', b'reject', False, _(b'reject revisions')),
1364 (b'', b'reject', False, _(b'reject revisions')),
1336 (b'', b'abandon', False, _(b'abandon revisions')),
1365 (b'', b'abandon', False, _(b'abandon revisions')),
1337 (b'', b'reclaim', False, _(b'reclaim revisions')),
1366 (b'', b'reclaim', False, _(b'reclaim revisions')),
1338 (b'm', b'comment', b'', _(b'comment on the last revision')),
1367 (b'm', b'comment', b'', _(b'comment on the last revision')),
1339 ],
1368 ],
1340 _(b'DREVSPEC [OPTIONS]'),
1369 _(b'DREVSPEC [OPTIONS]'),
1341 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1370 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1342 )
1371 )
1343 def phabupdate(ui, repo, spec, **opts):
1372 def phabupdate(ui, repo, spec, **opts):
1344 """update Differential Revision in batch
1373 """update Differential Revision in batch
1345
1374
1346 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1375 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1347 """
1376 """
1348 opts = pycompat.byteskwargs(opts)
1377 opts = pycompat.byteskwargs(opts)
1349 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1378 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1350 if len(flags) > 1:
1379 if len(flags) > 1:
1351 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1380 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1352
1381
1353 actions = []
1382 actions = []
1354 for f in flags:
1383 for f in flags:
1355 actions.append({b'type': f, b'value': b'true'})
1384 actions.append({b'type': f, b'value': b'true'})
1356
1385
1357 drevs = querydrev(repo, spec)
1386 drevs = querydrev(repo, spec)
1358 for i, drev in enumerate(drevs):
1387 for i, drev in enumerate(drevs):
1359 if i + 1 == len(drevs) and opts.get(b'comment'):
1388 if i + 1 == len(drevs) and opts.get(b'comment'):
1360 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1389 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1361 if actions:
1390 if actions:
1362 params = {
1391 params = {
1363 b'objectIdentifier': drev[b'phid'],
1392 b'objectIdentifier': drev[b'phid'],
1364 b'transactions': actions,
1393 b'transactions': actions,
1365 }
1394 }
1366 callconduit(ui, b'differential.revision.edit', params)
1395 callconduit(ui, b'differential.revision.edit', params)
1367
1396
1368
1397
1369 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1398 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1370 def template_review(context, mapping):
1399 def template_review(context, mapping):
1371 """:phabreview: Object describing the review for this changeset.
1400 """:phabreview: Object describing the review for this changeset.
1372 Has attributes `url` and `id`.
1401 Has attributes `url` and `id`.
1373 """
1402 """
1374 ctx = context.resource(mapping, b'ctx')
1403 ctx = context.resource(mapping, b'ctx')
1375 m = _differentialrevisiondescre.search(ctx.description())
1404 m = _differentialrevisiondescre.search(ctx.description())
1376 if m:
1405 if m:
1377 return templateutil.hybriddict(
1406 return templateutil.hybriddict(
1378 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1407 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
1379 )
1408 )
1380 else:
1409 else:
1381 tags = ctx.repo().nodetags(ctx.node())
1410 tags = ctx.repo().nodetags(ctx.node())
1382 for t in tags:
1411 for t in tags:
1383 if _differentialrevisiontagre.match(t):
1412 if _differentialrevisiontagre.match(t):
1384 url = ctx.repo().ui.config(b'phabricator', b'url')
1413 url = ctx.repo().ui.config(b'phabricator', b'url')
1385 if not url.endswith(b'/'):
1414 if not url.endswith(b'/'):
1386 url += b'/'
1415 url += b'/'
1387 url += t
1416 url += t
1388
1417
1389 return templateutil.hybriddict({b'url': url, b'id': t,})
1418 return templateutil.hybriddict({b'url': url, b'id': t,})
1390 return None
1419 return None
General Comments 0
You need to be logged in to leave comments. Login now