##// END OF EJS Templates
phabricator: extract the logic to amend diff properties to a function...
Matt Harbison -
r45137:99fa161a default
parent child Browse files
Show More
@@ -1,2055 +1,2062 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid
57 from mercurial.node import bin, nullid
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.repophid
118 # developer config: phabricator.repophid
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'repophid', default=None,
120 b'phabricator', b'repophid', default=None,
121 )
121 )
122 eh.configitem(
122 eh.configitem(
123 b'phabricator', b'url', default=None,
123 b'phabricator', b'url', default=None,
124 )
124 )
125 eh.configitem(
125 eh.configitem(
126 b'phabsend', b'confirm', default=False,
126 b'phabsend', b'confirm', default=False,
127 )
127 )
128 eh.configitem(
128 eh.configitem(
129 b'phabimport', b'secret', default=False,
129 b'phabimport', b'secret', default=False,
130 )
130 )
131 eh.configitem(
131 eh.configitem(
132 b'phabimport', b'obsolete', default=False,
132 b'phabimport', b'obsolete', default=False,
133 )
133 )
134
134
135 colortable = {
135 colortable = {
136 b'phabricator.action.created': b'green',
136 b'phabricator.action.created': b'green',
137 b'phabricator.action.skipped': b'magenta',
137 b'phabricator.action.skipped': b'magenta',
138 b'phabricator.action.updated': b'magenta',
138 b'phabricator.action.updated': b'magenta',
139 b'phabricator.desc': b'',
139 b'phabricator.desc': b'',
140 b'phabricator.drev': b'bold',
140 b'phabricator.drev': b'bold',
141 b'phabricator.node': b'',
141 b'phabricator.node': b'',
142 b'phabricator.status.abandoned': b'magenta dim',
142 b'phabricator.status.abandoned': b'magenta dim',
143 b'phabricator.status.accepted': b'green bold',
143 b'phabricator.status.accepted': b'green bold',
144 b'phabricator.status.closed': b'green',
144 b'phabricator.status.closed': b'green',
145 b'phabricator.status.needsreview': b'yellow',
145 b'phabricator.status.needsreview': b'yellow',
146 b'phabricator.status.needsrevision': b'red',
146 b'phabricator.status.needsrevision': b'red',
147 b'phabricator.status.changesplanned': b'red',
147 b'phabricator.status.changesplanned': b'red',
148 }
148 }
149
149
150 _VCR_FLAGS = [
150 _VCR_FLAGS = [
151 (
151 (
152 b'',
152 b'',
153 b'test-vcr',
153 b'test-vcr',
154 b'',
154 b'',
155 _(
155 _(
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
156 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
157 b', otherwise will mock all http requests using the specified vcr file.'
157 b', otherwise will mock all http requests using the specified vcr file.'
158 b' (ADVANCED)'
158 b' (ADVANCED)'
159 ),
159 ),
160 ),
160 ),
161 ]
161 ]
162
162
163
163
164 @eh.wrapfunction(localrepo, "loadhgrc")
164 @eh.wrapfunction(localrepo, "loadhgrc")
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
165 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
166 """Load ``.arcconfig`` content into a ui instance on repository open.
166 """Load ``.arcconfig`` content into a ui instance on repository open.
167 """
167 """
168 result = False
168 result = False
169 arcconfig = {}
169 arcconfig = {}
170
170
171 try:
171 try:
172 # json.loads only accepts bytes from 3.6+
172 # json.loads only accepts bytes from 3.6+
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
173 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
174 # json.loads only returns unicode strings
174 # json.loads only returns unicode strings
175 arcconfig = pycompat.rapply(
175 arcconfig = pycompat.rapply(
176 lambda x: encoding.unitolocal(x)
176 lambda x: encoding.unitolocal(x)
177 if isinstance(x, pycompat.unicode)
177 if isinstance(x, pycompat.unicode)
178 else x,
178 else x,
179 pycompat.json_loads(rawparams),
179 pycompat.json_loads(rawparams),
180 )
180 )
181
181
182 result = True
182 result = True
183 except ValueError:
183 except ValueError:
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
184 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
185 except IOError:
185 except IOError:
186 pass
186 pass
187
187
188 cfg = util.sortdict()
188 cfg = util.sortdict()
189
189
190 if b"repository.callsign" in arcconfig:
190 if b"repository.callsign" in arcconfig:
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
191 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
192
192
193 if b"phabricator.uri" in arcconfig:
193 if b"phabricator.uri" in arcconfig:
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
194 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
195
195
196 if cfg:
196 if cfg:
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
197 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
198
198
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
199 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
200
200
201
201
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
202 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
203 fullflags = flags + _VCR_FLAGS
203 fullflags = flags + _VCR_FLAGS
204
204
205 def hgmatcher(r1, r2):
205 def hgmatcher(r1, r2):
206 if r1.uri != r2.uri or r1.method != r2.method:
206 if r1.uri != r2.uri or r1.method != r2.method:
207 return False
207 return False
208 r1params = util.urlreq.parseqs(r1.body)
208 r1params = util.urlreq.parseqs(r1.body)
209 r2params = util.urlreq.parseqs(r2.body)
209 r2params = util.urlreq.parseqs(r2.body)
210 for key in r1params:
210 for key in r1params:
211 if key not in r2params:
211 if key not in r2params:
212 return False
212 return False
213 value = r1params[key][0]
213 value = r1params[key][0]
214 # we want to compare json payloads without worrying about ordering
214 # we want to compare json payloads without worrying about ordering
215 if value.startswith(b'{') and value.endswith(b'}'):
215 if value.startswith(b'{') and value.endswith(b'}'):
216 r1json = pycompat.json_loads(value)
216 r1json = pycompat.json_loads(value)
217 r2json = pycompat.json_loads(r2params[key][0])
217 r2json = pycompat.json_loads(r2params[key][0])
218 if r1json != r2json:
218 if r1json != r2json:
219 return False
219 return False
220 elif r2params[key][0] != value:
220 elif r2params[key][0] != value:
221 return False
221 return False
222 return True
222 return True
223
223
224 def sanitiserequest(request):
224 def sanitiserequest(request):
225 request.body = re.sub(
225 request.body = re.sub(
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
226 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
227 )
227 )
228 return request
228 return request
229
229
230 def sanitiseresponse(response):
230 def sanitiseresponse(response):
231 if 'set-cookie' in response['headers']:
231 if 'set-cookie' in response['headers']:
232 del response['headers']['set-cookie']
232 del response['headers']['set-cookie']
233 return response
233 return response
234
234
235 def decorate(fn):
235 def decorate(fn):
236 def inner(*args, **kwargs):
236 def inner(*args, **kwargs):
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
237 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
238 if cassette:
238 if cassette:
239 import hgdemandimport
239 import hgdemandimport
240
240
241 with hgdemandimport.deactivated():
241 with hgdemandimport.deactivated():
242 import vcr as vcrmod
242 import vcr as vcrmod
243 import vcr.stubs as stubs
243 import vcr.stubs as stubs
244
244
245 vcr = vcrmod.VCR(
245 vcr = vcrmod.VCR(
246 serializer='json',
246 serializer='json',
247 before_record_request=sanitiserequest,
247 before_record_request=sanitiserequest,
248 before_record_response=sanitiseresponse,
248 before_record_response=sanitiseresponse,
249 custom_patches=[
249 custom_patches=[
250 (
250 (
251 urlmod,
251 urlmod,
252 'httpconnection',
252 'httpconnection',
253 stubs.VCRHTTPConnection,
253 stubs.VCRHTTPConnection,
254 ),
254 ),
255 (
255 (
256 urlmod,
256 urlmod,
257 'httpsconnection',
257 'httpsconnection',
258 stubs.VCRHTTPSConnection,
258 stubs.VCRHTTPSConnection,
259 ),
259 ),
260 ],
260 ],
261 )
261 )
262 vcr.register_matcher('hgmatcher', hgmatcher)
262 vcr.register_matcher('hgmatcher', hgmatcher)
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
263 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
264 return fn(*args, **kwargs)
264 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
265 return fn(*args, **kwargs)
266
266
267 cmd = util.checksignature(inner, depth=2)
267 cmd = util.checksignature(inner, depth=2)
268 cmd.__name__ = fn.__name__
268 cmd.__name__ = fn.__name__
269 cmd.__doc__ = fn.__doc__
269 cmd.__doc__ = fn.__doc__
270
270
271 return command(
271 return command(
272 name,
272 name,
273 fullflags,
273 fullflags,
274 spec,
274 spec,
275 helpcategory=helpcategory,
275 helpcategory=helpcategory,
276 optionalrepo=optionalrepo,
276 optionalrepo=optionalrepo,
277 )(cmd)
277 )(cmd)
278
278
279 return decorate
279 return decorate
280
280
281
281
282 def urlencodenested(params):
282 def urlencodenested(params):
283 """like urlencode, but works with nested parameters.
283 """like urlencode, but works with nested parameters.
284
284
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
285 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
286 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
287 urlencode. Note: the encoding is consistent with PHP's http_build_query.
288 """
288 """
289 flatparams = util.sortdict()
289 flatparams = util.sortdict()
290
290
291 def process(prefix, obj):
291 def process(prefix, obj):
292 if isinstance(obj, bool):
292 if isinstance(obj, bool):
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
293 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
294 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
295 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
296 if items is None:
296 if items is None:
297 flatparams[prefix] = obj
297 flatparams[prefix] = obj
298 else:
298 else:
299 for k, v in items(obj):
299 for k, v in items(obj):
300 if prefix:
300 if prefix:
301 process(b'%s[%s]' % (prefix, k), v)
301 process(b'%s[%s]' % (prefix, k), v)
302 else:
302 else:
303 process(k, v)
303 process(k, v)
304
304
305 process(b'', params)
305 process(b'', params)
306 return util.urlreq.urlencode(flatparams)
306 return util.urlreq.urlencode(flatparams)
307
307
308
308
309 def readurltoken(ui):
309 def readurltoken(ui):
310 """return conduit url, token and make sure they exist
310 """return conduit url, token and make sure they exist
311
311
312 Currently read from [auth] config section. In the future, it might
312 Currently read from [auth] config section. In the future, it might
313 make sense to read from .arcconfig and .arcrc as well.
313 make sense to read from .arcconfig and .arcrc as well.
314 """
314 """
315 url = ui.config(b'phabricator', b'url')
315 url = ui.config(b'phabricator', b'url')
316 if not url:
316 if not url:
317 raise error.Abort(
317 raise error.Abort(
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
318 _(b'config %s.%s is required') % (b'phabricator', b'url')
319 )
319 )
320
320
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
321 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
322 token = None
322 token = None
323
323
324 if res:
324 if res:
325 group, auth = res
325 group, auth = res
326
326
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
327 ui.debug(b"using auth.%s.* for authentication\n" % group)
328
328
329 token = auth.get(b'phabtoken')
329 token = auth.get(b'phabtoken')
330
330
331 if not token:
331 if not token:
332 raise error.Abort(
332 raise error.Abort(
333 _(b'Can\'t find conduit token associated to %s') % (url,)
333 _(b'Can\'t find conduit token associated to %s') % (url,)
334 )
334 )
335
335
336 return url, token
336 return url, token
337
337
338
338
339 def callconduit(ui, name, params):
339 def callconduit(ui, name, params):
340 """call Conduit API, params is a dict. return json.loads result, or None"""
340 """call Conduit API, params is a dict. return json.loads result, or None"""
341 host, token = readurltoken(ui)
341 host, token = readurltoken(ui)
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
342 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
343 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
344 params = params.copy()
344 params = params.copy()
345 params[b'__conduit__'] = {
345 params[b'__conduit__'] = {
346 b'token': token,
346 b'token': token,
347 }
347 }
348 rawdata = {
348 rawdata = {
349 b'params': templatefilters.json(params),
349 b'params': templatefilters.json(params),
350 b'output': b'json',
350 b'output': b'json',
351 b'__conduit__': 1,
351 b'__conduit__': 1,
352 }
352 }
353 data = urlencodenested(rawdata)
353 data = urlencodenested(rawdata)
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
354 curlcmd = ui.config(b'phabricator', b'curlcmd')
355 if curlcmd:
355 if curlcmd:
356 sin, sout = procutil.popen2(
356 sin, sout = procutil.popen2(
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
357 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
358 )
358 )
359 sin.write(data)
359 sin.write(data)
360 sin.close()
360 sin.close()
361 body = sout.read()
361 body = sout.read()
362 else:
362 else:
363 urlopener = urlmod.opener(ui, authinfo)
363 urlopener = urlmod.opener(ui, authinfo)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
364 request = util.urlreq.request(pycompat.strurl(url), data=data)
365 with contextlib.closing(urlopener.open(request)) as rsp:
365 with contextlib.closing(urlopener.open(request)) as rsp:
366 body = rsp.read()
366 body = rsp.read()
367 ui.debug(b'Conduit Response: %s\n' % body)
367 ui.debug(b'Conduit Response: %s\n' % body)
368 parsed = pycompat.rapply(
368 parsed = pycompat.rapply(
369 lambda x: encoding.unitolocal(x)
369 lambda x: encoding.unitolocal(x)
370 if isinstance(x, pycompat.unicode)
370 if isinstance(x, pycompat.unicode)
371 else x,
371 else x,
372 # json.loads only accepts bytes from py3.6+
372 # json.loads only accepts bytes from py3.6+
373 pycompat.json_loads(encoding.unifromlocal(body)),
373 pycompat.json_loads(encoding.unifromlocal(body)),
374 )
374 )
375 if parsed.get(b'error_code'):
375 if parsed.get(b'error_code'):
376 msg = _(b'Conduit Error (%s): %s') % (
376 msg = _(b'Conduit Error (%s): %s') % (
377 parsed[b'error_code'],
377 parsed[b'error_code'],
378 parsed[b'error_info'],
378 parsed[b'error_info'],
379 )
379 )
380 raise error.Abort(msg)
380 raise error.Abort(msg)
381 return parsed[b'result']
381 return parsed[b'result']
382
382
383
383
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
384 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
385 def debugcallconduit(ui, repo, name):
385 def debugcallconduit(ui, repo, name):
386 """call Conduit API
386 """call Conduit API
387
387
388 Call parameters are read from stdin as a JSON blob. Result will be written
388 Call parameters are read from stdin as a JSON blob. Result will be written
389 to stdout as a JSON blob.
389 to stdout as a JSON blob.
390 """
390 """
391 # json.loads only accepts bytes from 3.6+
391 # json.loads only accepts bytes from 3.6+
392 rawparams = encoding.unifromlocal(ui.fin.read())
392 rawparams = encoding.unifromlocal(ui.fin.read())
393 # json.loads only returns unicode strings
393 # json.loads only returns unicode strings
394 params = pycompat.rapply(
394 params = pycompat.rapply(
395 lambda x: encoding.unitolocal(x)
395 lambda x: encoding.unitolocal(x)
396 if isinstance(x, pycompat.unicode)
396 if isinstance(x, pycompat.unicode)
397 else x,
397 else x,
398 pycompat.json_loads(rawparams),
398 pycompat.json_loads(rawparams),
399 )
399 )
400 # json.dumps only accepts unicode strings
400 # json.dumps only accepts unicode strings
401 result = pycompat.rapply(
401 result = pycompat.rapply(
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
402 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
403 callconduit(ui, name, params),
403 callconduit(ui, name, params),
404 )
404 )
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
405 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
406 ui.write(b'%s\n' % encoding.unitolocal(s))
407
407
408
408
409 def getrepophid(repo):
409 def getrepophid(repo):
410 """given callsign, return repository PHID or None"""
410 """given callsign, return repository PHID or None"""
411 # developer config: phabricator.repophid
411 # developer config: phabricator.repophid
412 repophid = repo.ui.config(b'phabricator', b'repophid')
412 repophid = repo.ui.config(b'phabricator', b'repophid')
413 if repophid:
413 if repophid:
414 return repophid
414 return repophid
415 callsign = repo.ui.config(b'phabricator', b'callsign')
415 callsign = repo.ui.config(b'phabricator', b'callsign')
416 if not callsign:
416 if not callsign:
417 return None
417 return None
418 query = callconduit(
418 query = callconduit(
419 repo.ui,
419 repo.ui,
420 b'diffusion.repository.search',
420 b'diffusion.repository.search',
421 {b'constraints': {b'callsigns': [callsign]}},
421 {b'constraints': {b'callsigns': [callsign]}},
422 )
422 )
423 if len(query[b'data']) == 0:
423 if len(query[b'data']) == 0:
424 return None
424 return None
425 repophid = query[b'data'][0][b'phid']
425 repophid = query[b'data'][0][b'phid']
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
426 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
427 return repophid
427 return repophid
428
428
429
429
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
430 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
431 _differentialrevisiondescre = re.compile(
431 _differentialrevisiondescre = re.compile(
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
432 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
433 )
433 )
434
434
435
435
436 def getoldnodedrevmap(repo, nodelist):
436 def getoldnodedrevmap(repo, nodelist):
437 """find previous nodes that has been sent to Phabricator
437 """find previous nodes that has been sent to Phabricator
438
438
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
439 return {node: (oldnode, Differential diff, Differential Revision ID)}
440 for node in nodelist with known previous sent versions, or associated
440 for node in nodelist with known previous sent versions, or associated
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
441 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
442 be ``None``.
442 be ``None``.
443
443
444 Examines commit messages like "Differential Revision:" to get the
444 Examines commit messages like "Differential Revision:" to get the
445 association information.
445 association information.
446
446
447 If such commit message line is not found, examines all precursors and their
447 If such commit message line is not found, examines all precursors and their
448 tags. Tags with format like "D1234" are considered a match and the node
448 tags. Tags with format like "D1234" are considered a match and the node
449 with that tag, and the number after "D" (ex. 1234) will be returned.
449 with that tag, and the number after "D" (ex. 1234) will be returned.
450
450
451 The ``old node``, if not None, is guaranteed to be the last diff of
451 The ``old node``, if not None, is guaranteed to be the last diff of
452 corresponding Differential Revision, and exist in the repo.
452 corresponding Differential Revision, and exist in the repo.
453 """
453 """
454 unfi = repo.unfiltered()
454 unfi = repo.unfiltered()
455 has_node = unfi.changelog.index.has_node
455 has_node = unfi.changelog.index.has_node
456
456
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
457 result = {} # {node: (oldnode?, lastdiff?, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
458 toconfirm = {} # {node: (force, {precnode}, drev)}
459 for node in nodelist:
459 for node in nodelist:
460 ctx = unfi[node]
460 ctx = unfi[node]
461 # For tags like "D123", put them into "toconfirm" to verify later
461 # For tags like "D123", put them into "toconfirm" to verify later
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
462 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
463 for n in precnodes:
463 for n in precnodes:
464 if has_node(n):
464 if has_node(n):
465 for tag in unfi.nodetags(n):
465 for tag in unfi.nodetags(n):
466 m = _differentialrevisiontagre.match(tag)
466 m = _differentialrevisiontagre.match(tag)
467 if m:
467 if m:
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
468 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
469 break
469 break
470 else:
470 else:
471 continue # move to next predecessor
471 continue # move to next predecessor
472 break # found a tag, stop
472 break # found a tag, stop
473 else:
473 else:
474 # Check commit message
474 # Check commit message
475 m = _differentialrevisiondescre.search(ctx.description())
475 m = _differentialrevisiondescre.search(ctx.description())
476 if m:
476 if m:
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
477 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
478
478
479 # Double check if tags are genuine by collecting all old nodes from
479 # Double check if tags are genuine by collecting all old nodes from
480 # Phabricator, and expect precursors overlap with it.
480 # Phabricator, and expect precursors overlap with it.
481 if toconfirm:
481 if toconfirm:
482 drevs = [drev for force, precs, drev in toconfirm.values()]
482 drevs = [drev for force, precs, drev in toconfirm.values()]
483 alldiffs = callconduit(
483 alldiffs = callconduit(
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
484 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
485 )
485 )
486
486
487 def getnodes(d, precset):
487 def getnodes(d, precset):
488 # Ignore other nodes that were combined into the Differential
488 # Ignore other nodes that were combined into the Differential
489 # that aren't predecessors of the current local node.
489 # that aren't predecessors of the current local node.
490 return [n for n in getlocalcommits(d) if n in precset]
490 return [n for n in getlocalcommits(d) if n in precset]
491
491
492 for newnode, (force, precset, drev) in toconfirm.items():
492 for newnode, (force, precset, drev) in toconfirm.items():
493 diffs = [
493 diffs = [
494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
494 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
495 ]
495 ]
496
496
497 # local predecessors known by Phabricator
497 # local predecessors known by Phabricator
498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
498 phprecset = {n for d in diffs for n in getnodes(d, precset)}
499
499
500 # Ignore if precursors (Phabricator and local repo) do not overlap,
500 # Ignore if precursors (Phabricator and local repo) do not overlap,
501 # and force is not set (when commit message says nothing)
501 # and force is not set (when commit message says nothing)
502 if not force and not phprecset:
502 if not force and not phprecset:
503 tagname = b'D%d' % drev
503 tagname = b'D%d' % drev
504 tags.tag(
504 tags.tag(
505 repo,
505 repo,
506 tagname,
506 tagname,
507 nullid,
507 nullid,
508 message=None,
508 message=None,
509 user=None,
509 user=None,
510 date=None,
510 date=None,
511 local=True,
511 local=True,
512 )
512 )
513 unfi.ui.warn(
513 unfi.ui.warn(
514 _(
514 _(
515 b'D%d: local tag removed - does not match '
515 b'D%d: local tag removed - does not match '
516 b'Differential history\n'
516 b'Differential history\n'
517 )
517 )
518 % drev
518 % drev
519 )
519 )
520 continue
520 continue
521
521
522 # Find the last node using Phabricator metadata, and make sure it
522 # Find the last node using Phabricator metadata, and make sure it
523 # exists in the repo
523 # exists in the repo
524 oldnode = lastdiff = None
524 oldnode = lastdiff = None
525 if diffs:
525 if diffs:
526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
526 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
527 oldnodes = getnodes(lastdiff, precset)
527 oldnodes = getnodes(lastdiff, precset)
528
528
529 # If this commit was the result of `hg fold` after submission,
529 # If this commit was the result of `hg fold` after submission,
530 # and now resubmitted with --fold, the easiest thing to do is
530 # and now resubmitted with --fold, the easiest thing to do is
531 # to leave the node clear. This only results in creating a new
531 # to leave the node clear. This only results in creating a new
532 # diff for the _same_ Differential Revision if this commit is
532 # diff for the _same_ Differential Revision if this commit is
533 # the first or last in the selected range.
533 # the first or last in the selected range.
534 # If this commit is the result of `hg split` in the same
534 # If this commit is the result of `hg split` in the same
535 # scenario, there is a single oldnode here (and multiple
535 # scenario, there is a single oldnode here (and multiple
536 # newnodes mapped to it). That makes it the same as the normal
536 # newnodes mapped to it). That makes it the same as the normal
537 # case, as the edges of the newnode range cleanly maps to one
537 # case, as the edges of the newnode range cleanly maps to one
538 # oldnode each.
538 # oldnode each.
539 if len(oldnodes) == 1:
539 if len(oldnodes) == 1:
540 oldnode = oldnodes[0]
540 oldnode = oldnodes[0]
541 if oldnode and not has_node(oldnode):
541 if oldnode and not has_node(oldnode):
542 oldnode = None
542 oldnode = None
543
543
544 result[newnode] = (oldnode, lastdiff, drev)
544 result[newnode] = (oldnode, lastdiff, drev)
545
545
546 return result
546 return result
547
547
548
548
549 def getdrevmap(repo, revs):
549 def getdrevmap(repo, revs):
550 """Return a dict mapping each rev in `revs` to their Differential Revision
550 """Return a dict mapping each rev in `revs` to their Differential Revision
551 ID or None.
551 ID or None.
552 """
552 """
553 result = {}
553 result = {}
554 for rev in revs:
554 for rev in revs:
555 result[rev] = None
555 result[rev] = None
556 ctx = repo[rev]
556 ctx = repo[rev]
557 # Check commit message
557 # Check commit message
558 m = _differentialrevisiondescre.search(ctx.description())
558 m = _differentialrevisiondescre.search(ctx.description())
559 if m:
559 if m:
560 result[rev] = int(m.group('id'))
560 result[rev] = int(m.group('id'))
561 continue
561 continue
562 # Check tags
562 # Check tags
563 for tag in repo.nodetags(ctx.node()):
563 for tag in repo.nodetags(ctx.node()):
564 m = _differentialrevisiontagre.match(tag)
564 m = _differentialrevisiontagre.match(tag)
565 if m:
565 if m:
566 result[rev] = int(m.group(1))
566 result[rev] = int(m.group(1))
567 break
567 break
568
568
569 return result
569 return result
570
570
571
571
572 def getdiff(basectx, ctx, diffopts):
572 def getdiff(basectx, ctx, diffopts):
573 """plain-text diff without header (user, commit message, etc)"""
573 """plain-text diff without header (user, commit message, etc)"""
574 output = util.stringio()
574 output = util.stringio()
575 for chunk, _label in patch.diffui(
575 for chunk, _label in patch.diffui(
576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
576 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
577 ):
577 ):
578 output.write(chunk)
578 output.write(chunk)
579 return output.getvalue()
579 return output.getvalue()
580
580
581
581
582 class DiffChangeType(object):
582 class DiffChangeType(object):
583 ADD = 1
583 ADD = 1
584 CHANGE = 2
584 CHANGE = 2
585 DELETE = 3
585 DELETE = 3
586 MOVE_AWAY = 4
586 MOVE_AWAY = 4
587 COPY_AWAY = 5
587 COPY_AWAY = 5
588 MOVE_HERE = 6
588 MOVE_HERE = 6
589 COPY_HERE = 7
589 COPY_HERE = 7
590 MULTICOPY = 8
590 MULTICOPY = 8
591
591
592
592
593 class DiffFileType(object):
593 class DiffFileType(object):
594 TEXT = 1
594 TEXT = 1
595 IMAGE = 2
595 IMAGE = 2
596 BINARY = 3
596 BINARY = 3
597
597
598
598
599 @attr.s
599 @attr.s
600 class phabhunk(dict):
600 class phabhunk(dict):
601 """Represents a Differential hunk, which is owned by a Differential change
601 """Represents a Differential hunk, which is owned by a Differential change
602 """
602 """
603
603
604 oldOffset = attr.ib(default=0) # camelcase-required
604 oldOffset = attr.ib(default=0) # camelcase-required
605 oldLength = attr.ib(default=0) # camelcase-required
605 oldLength = attr.ib(default=0) # camelcase-required
606 newOffset = attr.ib(default=0) # camelcase-required
606 newOffset = attr.ib(default=0) # camelcase-required
607 newLength = attr.ib(default=0) # camelcase-required
607 newLength = attr.ib(default=0) # camelcase-required
608 corpus = attr.ib(default='')
608 corpus = attr.ib(default='')
609 # These get added to the phabchange's equivalents
609 # These get added to the phabchange's equivalents
610 addLines = attr.ib(default=0) # camelcase-required
610 addLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
611 delLines = attr.ib(default=0) # camelcase-required
612
612
613
613
614 @attr.s
614 @attr.s
615 class phabchange(object):
615 class phabchange(object):
616 """Represents a Differential change, owns Differential hunks and owned by a
616 """Represents a Differential change, owns Differential hunks and owned by a
617 Differential diff. Each one represents one file in a diff.
617 Differential diff. Each one represents one file in a diff.
618 """
618 """
619
619
620 currentPath = attr.ib(default=None) # camelcase-required
620 currentPath = attr.ib(default=None) # camelcase-required
621 oldPath = attr.ib(default=None) # camelcase-required
621 oldPath = attr.ib(default=None) # camelcase-required
622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
622 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
623 metadata = attr.ib(default=attr.Factory(dict))
623 metadata = attr.ib(default=attr.Factory(dict))
624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
624 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
625 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
626 type = attr.ib(default=DiffChangeType.CHANGE)
626 type = attr.ib(default=DiffChangeType.CHANGE)
627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
627 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
628 commitHash = attr.ib(default=None) # camelcase-required
628 commitHash = attr.ib(default=None) # camelcase-required
629 addLines = attr.ib(default=0) # camelcase-required
629 addLines = attr.ib(default=0) # camelcase-required
630 delLines = attr.ib(default=0) # camelcase-required
630 delLines = attr.ib(default=0) # camelcase-required
631 hunks = attr.ib(default=attr.Factory(list))
631 hunks = attr.ib(default=attr.Factory(list))
632
632
633 def copynewmetadatatoold(self):
633 def copynewmetadatatoold(self):
634 for key in list(self.metadata.keys()):
634 for key in list(self.metadata.keys()):
635 newkey = key.replace(b'new:', b'old:')
635 newkey = key.replace(b'new:', b'old:')
636 self.metadata[newkey] = self.metadata[key]
636 self.metadata[newkey] = self.metadata[key]
637
637
638 def addoldmode(self, value):
638 def addoldmode(self, value):
639 self.oldProperties[b'unix:filemode'] = value
639 self.oldProperties[b'unix:filemode'] = value
640
640
641 def addnewmode(self, value):
641 def addnewmode(self, value):
642 self.newProperties[b'unix:filemode'] = value
642 self.newProperties[b'unix:filemode'] = value
643
643
644 def addhunk(self, hunk):
644 def addhunk(self, hunk):
645 if not isinstance(hunk, phabhunk):
645 if not isinstance(hunk, phabhunk):
646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
646 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
647 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
648 # It's useful to include these stats since the Phab web UI shows them,
648 # It's useful to include these stats since the Phab web UI shows them,
649 # and uses them to estimate how large a change a Revision is. Also used
649 # and uses them to estimate how large a change a Revision is. Also used
650 # in email subjects for the [+++--] bit.
650 # in email subjects for the [+++--] bit.
651 self.addLines += hunk.addLines
651 self.addLines += hunk.addLines
652 self.delLines += hunk.delLines
652 self.delLines += hunk.delLines
653
653
654
654
655 @attr.s
655 @attr.s
656 class phabdiff(object):
656 class phabdiff(object):
657 """Represents a Differential diff, owns Differential changes. Corresponds
657 """Represents a Differential diff, owns Differential changes. Corresponds
658 to a commit.
658 to a commit.
659 """
659 """
660
660
661 # Doesn't seem to be any reason to send this (output of uname -n)
661 # Doesn't seem to be any reason to send this (output of uname -n)
662 sourceMachine = attr.ib(default=b'') # camelcase-required
662 sourceMachine = attr.ib(default=b'') # camelcase-required
663 sourcePath = attr.ib(default=b'/') # camelcase-required
663 sourcePath = attr.ib(default=b'/') # camelcase-required
664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
664 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
665 sourceControlPath = attr.ib(default=b'/') # camelcase-required
666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
666 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
667 branch = attr.ib(default=b'default')
667 branch = attr.ib(default=b'default')
668 bookmark = attr.ib(default=None)
668 bookmark = attr.ib(default=None)
669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
669 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
670 lintStatus = attr.ib(default=b'none') # camelcase-required
670 lintStatus = attr.ib(default=b'none') # camelcase-required
671 unitStatus = attr.ib(default=b'none') # camelcase-required
671 unitStatus = attr.ib(default=b'none') # camelcase-required
672 changes = attr.ib(default=attr.Factory(dict))
672 changes = attr.ib(default=attr.Factory(dict))
673 repositoryPHID = attr.ib(default=None) # camelcase-required
673 repositoryPHID = attr.ib(default=None) # camelcase-required
674
674
675 def addchange(self, change):
675 def addchange(self, change):
676 if not isinstance(change, phabchange):
676 if not isinstance(change, phabchange):
677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
677 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
678 self.changes[change.currentPath] = pycompat.byteskwargs(
678 self.changes[change.currentPath] = pycompat.byteskwargs(
679 attr.asdict(change)
679 attr.asdict(change)
680 )
680 )
681
681
682
682
683 def maketext(pchange, basectx, ctx, fname):
683 def maketext(pchange, basectx, ctx, fname):
684 """populate the phabchange for a text file"""
684 """populate the phabchange for a text file"""
685 repo = ctx.repo()
685 repo = ctx.repo()
686 fmatcher = match.exact([fname])
686 fmatcher = match.exact([fname])
687 diffopts = mdiff.diffopts(git=True, context=32767)
687 diffopts = mdiff.diffopts(git=True, context=32767)
688 _pfctx, _fctx, header, fhunks = next(
688 _pfctx, _fctx, header, fhunks = next(
689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
689 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
690 )
690 )
691
691
692 for fhunk in fhunks:
692 for fhunk in fhunks:
693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
693 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
694 corpus = b''.join(lines[1:])
694 corpus = b''.join(lines[1:])
695 shunk = list(header)
695 shunk = list(header)
696 shunk.extend(lines)
696 shunk.extend(lines)
697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
697 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
698 patch.diffstatdata(util.iterlines(shunk))
698 patch.diffstatdata(util.iterlines(shunk))
699 )
699 )
700 pchange.addhunk(
700 pchange.addhunk(
701 phabhunk(
701 phabhunk(
702 oldOffset,
702 oldOffset,
703 oldLength,
703 oldLength,
704 newOffset,
704 newOffset,
705 newLength,
705 newLength,
706 corpus,
706 corpus,
707 addLines,
707 addLines,
708 delLines,
708 delLines,
709 )
709 )
710 )
710 )
711
711
712
712
713 def uploadchunks(fctx, fphid):
713 def uploadchunks(fctx, fphid):
714 """upload large binary files as separate chunks.
714 """upload large binary files as separate chunks.
715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
715 Phab requests chunking over 8MiB, and splits into 4MiB chunks
716 """
716 """
717 ui = fctx.repo().ui
717 ui = fctx.repo().ui
718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
718 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
719 with ui.makeprogress(
719 with ui.makeprogress(
720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
720 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
721 ) as progress:
721 ) as progress:
722 for chunk in chunks:
722 for chunk in chunks:
723 progress.increment()
723 progress.increment()
724 if chunk[b'complete']:
724 if chunk[b'complete']:
725 continue
725 continue
726 bstart = int(chunk[b'byteStart'])
726 bstart = int(chunk[b'byteStart'])
727 bend = int(chunk[b'byteEnd'])
727 bend = int(chunk[b'byteEnd'])
728 callconduit(
728 callconduit(
729 ui,
729 ui,
730 b'file.uploadchunk',
730 b'file.uploadchunk',
731 {
731 {
732 b'filePHID': fphid,
732 b'filePHID': fphid,
733 b'byteStart': bstart,
733 b'byteStart': bstart,
734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
734 b'data': base64.b64encode(fctx.data()[bstart:bend]),
735 b'dataEncoding': b'base64',
735 b'dataEncoding': b'base64',
736 },
736 },
737 )
737 )
738
738
739
739
740 def uploadfile(fctx):
740 def uploadfile(fctx):
741 """upload binary files to Phabricator"""
741 """upload binary files to Phabricator"""
742 repo = fctx.repo()
742 repo = fctx.repo()
743 ui = repo.ui
743 ui = repo.ui
744 fname = fctx.path()
744 fname = fctx.path()
745 size = fctx.size()
745 size = fctx.size()
746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
746 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
747
747
748 # an allocate call is required first to see if an upload is even required
748 # an allocate call is required first to see if an upload is even required
749 # (Phab might already have it) and to determine if chunking is needed
749 # (Phab might already have it) and to determine if chunking is needed
750 allocateparams = {
750 allocateparams = {
751 b'name': fname,
751 b'name': fname,
752 b'contentLength': size,
752 b'contentLength': size,
753 b'contentHash': fhash,
753 b'contentHash': fhash,
754 }
754 }
755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
755 filealloc = callconduit(ui, b'file.allocate', allocateparams)
756 fphid = filealloc[b'filePHID']
756 fphid = filealloc[b'filePHID']
757
757
758 if filealloc[b'upload']:
758 if filealloc[b'upload']:
759 ui.write(_(b'uploading %s\n') % bytes(fctx))
759 ui.write(_(b'uploading %s\n') % bytes(fctx))
760 if not fphid:
760 if not fphid:
761 uploadparams = {
761 uploadparams = {
762 b'name': fname,
762 b'name': fname,
763 b'data_base64': base64.b64encode(fctx.data()),
763 b'data_base64': base64.b64encode(fctx.data()),
764 }
764 }
765 fphid = callconduit(ui, b'file.upload', uploadparams)
765 fphid = callconduit(ui, b'file.upload', uploadparams)
766 else:
766 else:
767 uploadchunks(fctx, fphid)
767 uploadchunks(fctx, fphid)
768 else:
768 else:
769 ui.debug(b'server already has %s\n' % bytes(fctx))
769 ui.debug(b'server already has %s\n' % bytes(fctx))
770
770
771 if not fphid:
771 if not fphid:
772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
772 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
773
773
774 return fphid
774 return fphid
775
775
776
776
777 def addoldbinary(pchange, oldfctx, fctx):
777 def addoldbinary(pchange, oldfctx, fctx):
778 """add the metadata for the previous version of a binary file to the
778 """add the metadata for the previous version of a binary file to the
779 phabchange for the new version
779 phabchange for the new version
780
780
781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
781 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
782 version of the file, or None if the file is being removed.
782 version of the file, or None if the file is being removed.
783 """
783 """
784 if not fctx or fctx.cmp(oldfctx):
784 if not fctx or fctx.cmp(oldfctx):
785 # Files differ, add the old one
785 # Files differ, add the old one
786 pchange.metadata[b'old:file:size'] = oldfctx.size()
786 pchange.metadata[b'old:file:size'] = oldfctx.size()
787 mimeguess, _enc = mimetypes.guess_type(
787 mimeguess, _enc = mimetypes.guess_type(
788 encoding.unifromlocal(oldfctx.path())
788 encoding.unifromlocal(oldfctx.path())
789 )
789 )
790 if mimeguess:
790 if mimeguess:
791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
791 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
792 mimeguess
792 mimeguess
793 )
793 )
794 fphid = uploadfile(oldfctx)
794 fphid = uploadfile(oldfctx)
795 pchange.metadata[b'old:binary-phid'] = fphid
795 pchange.metadata[b'old:binary-phid'] = fphid
796 else:
796 else:
797 # If it's left as IMAGE/BINARY web UI might try to display it
797 # If it's left as IMAGE/BINARY web UI might try to display it
798 pchange.fileType = DiffFileType.TEXT
798 pchange.fileType = DiffFileType.TEXT
799 pchange.copynewmetadatatoold()
799 pchange.copynewmetadatatoold()
800
800
801
801
802 def makebinary(pchange, fctx):
802 def makebinary(pchange, fctx):
803 """populate the phabchange for a binary file"""
803 """populate the phabchange for a binary file"""
804 pchange.fileType = DiffFileType.BINARY
804 pchange.fileType = DiffFileType.BINARY
805 fphid = uploadfile(fctx)
805 fphid = uploadfile(fctx)
806 pchange.metadata[b'new:binary-phid'] = fphid
806 pchange.metadata[b'new:binary-phid'] = fphid
807 pchange.metadata[b'new:file:size'] = fctx.size()
807 pchange.metadata[b'new:file:size'] = fctx.size()
808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
808 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
809 if mimeguess:
809 if mimeguess:
810 mimeguess = pycompat.bytestr(mimeguess)
810 mimeguess = pycompat.bytestr(mimeguess)
811 pchange.metadata[b'new:file:mime-type'] = mimeguess
811 pchange.metadata[b'new:file:mime-type'] = mimeguess
812 if mimeguess.startswith(b'image/'):
812 if mimeguess.startswith(b'image/'):
813 pchange.fileType = DiffFileType.IMAGE
813 pchange.fileType = DiffFileType.IMAGE
814
814
815
815
816 # Copied from mercurial/patch.py
816 # Copied from mercurial/patch.py
817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
817 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
818
818
819
819
820 def notutf8(fctx):
820 def notutf8(fctx):
821 """detect non-UTF-8 text files since Phabricator requires them to be marked
821 """detect non-UTF-8 text files since Phabricator requires them to be marked
822 as binary
822 as binary
823 """
823 """
824 try:
824 try:
825 fctx.data().decode('utf-8')
825 fctx.data().decode('utf-8')
826 return False
826 return False
827 except UnicodeDecodeError:
827 except UnicodeDecodeError:
828 fctx.repo().ui.write(
828 fctx.repo().ui.write(
829 _(b'file %s detected as non-UTF-8, marked as binary\n')
829 _(b'file %s detected as non-UTF-8, marked as binary\n')
830 % fctx.path()
830 % fctx.path()
831 )
831 )
832 return True
832 return True
833
833
834
834
835 def addremoved(pdiff, basectx, ctx, removed):
835 def addremoved(pdiff, basectx, ctx, removed):
836 """add removed files to the phabdiff. Shouldn't include moves"""
836 """add removed files to the phabdiff. Shouldn't include moves"""
837 for fname in removed:
837 for fname in removed:
838 pchange = phabchange(
838 pchange = phabchange(
839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
839 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
840 )
840 )
841 oldfctx = basectx.p1()[fname]
841 oldfctx = basectx.p1()[fname]
842 pchange.addoldmode(gitmode[oldfctx.flags()])
842 pchange.addoldmode(gitmode[oldfctx.flags()])
843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
843 if not (oldfctx.isbinary() or notutf8(oldfctx)):
844 maketext(pchange, basectx, ctx, fname)
844 maketext(pchange, basectx, ctx, fname)
845
845
846 pdiff.addchange(pchange)
846 pdiff.addchange(pchange)
847
847
848
848
849 def addmodified(pdiff, basectx, ctx, modified):
849 def addmodified(pdiff, basectx, ctx, modified):
850 """add modified files to the phabdiff"""
850 """add modified files to the phabdiff"""
851 for fname in modified:
851 for fname in modified:
852 fctx = ctx[fname]
852 fctx = ctx[fname]
853 oldfctx = basectx.p1()[fname]
853 oldfctx = basectx.p1()[fname]
854 pchange = phabchange(currentPath=fname, oldPath=fname)
854 pchange = phabchange(currentPath=fname, oldPath=fname)
855 filemode = gitmode[fctx.flags()]
855 filemode = gitmode[fctx.flags()]
856 originalmode = gitmode[oldfctx.flags()]
856 originalmode = gitmode[oldfctx.flags()]
857 if filemode != originalmode:
857 if filemode != originalmode:
858 pchange.addoldmode(originalmode)
858 pchange.addoldmode(originalmode)
859 pchange.addnewmode(filemode)
859 pchange.addnewmode(filemode)
860
860
861 if (
861 if (
862 fctx.isbinary()
862 fctx.isbinary()
863 or notutf8(fctx)
863 or notutf8(fctx)
864 or oldfctx.isbinary()
864 or oldfctx.isbinary()
865 or notutf8(oldfctx)
865 or notutf8(oldfctx)
866 ):
866 ):
867 makebinary(pchange, fctx)
867 makebinary(pchange, fctx)
868 addoldbinary(pchange, oldfctx, fctx)
868 addoldbinary(pchange, oldfctx, fctx)
869 else:
869 else:
870 maketext(pchange, basectx, ctx, fname)
870 maketext(pchange, basectx, ctx, fname)
871
871
872 pdiff.addchange(pchange)
872 pdiff.addchange(pchange)
873
873
874
874
875 def addadded(pdiff, basectx, ctx, added, removed):
875 def addadded(pdiff, basectx, ctx, added, removed):
876 """add file adds to the phabdiff, both new files and copies/moves"""
876 """add file adds to the phabdiff, both new files and copies/moves"""
877 # Keep track of files that've been recorded as moved/copied, so if there are
877 # Keep track of files that've been recorded as moved/copied, so if there are
878 # additional copies we can mark them (moves get removed from removed)
878 # additional copies we can mark them (moves get removed from removed)
879 copiedchanges = {}
879 copiedchanges = {}
880 movedchanges = {}
880 movedchanges = {}
881
881
882 copy = {}
882 copy = {}
883 if basectx != ctx:
883 if basectx != ctx:
884 copy = copies.pathcopies(basectx.p1(), ctx)
884 copy = copies.pathcopies(basectx.p1(), ctx)
885
885
886 for fname in added:
886 for fname in added:
887 fctx = ctx[fname]
887 fctx = ctx[fname]
888 oldfctx = None
888 oldfctx = None
889 pchange = phabchange(currentPath=fname)
889 pchange = phabchange(currentPath=fname)
890
890
891 filemode = gitmode[fctx.flags()]
891 filemode = gitmode[fctx.flags()]
892
892
893 if copy:
893 if copy:
894 originalfname = copy.get(fname, fname)
894 originalfname = copy.get(fname, fname)
895 else:
895 else:
896 originalfname = fname
896 originalfname = fname
897 if fctx.renamed():
897 if fctx.renamed():
898 originalfname = fctx.renamed()[0]
898 originalfname = fctx.renamed()[0]
899
899
900 renamed = fname != originalfname
900 renamed = fname != originalfname
901
901
902 if renamed:
902 if renamed:
903 oldfctx = basectx.p1()[originalfname]
903 oldfctx = basectx.p1()[originalfname]
904 originalmode = gitmode[oldfctx.flags()]
904 originalmode = gitmode[oldfctx.flags()]
905 pchange.oldPath = originalfname
905 pchange.oldPath = originalfname
906
906
907 if originalfname in removed:
907 if originalfname in removed:
908 origpchange = phabchange(
908 origpchange = phabchange(
909 currentPath=originalfname,
909 currentPath=originalfname,
910 oldPath=originalfname,
910 oldPath=originalfname,
911 type=DiffChangeType.MOVE_AWAY,
911 type=DiffChangeType.MOVE_AWAY,
912 awayPaths=[fname],
912 awayPaths=[fname],
913 )
913 )
914 movedchanges[originalfname] = origpchange
914 movedchanges[originalfname] = origpchange
915 removed.remove(originalfname)
915 removed.remove(originalfname)
916 pchange.type = DiffChangeType.MOVE_HERE
916 pchange.type = DiffChangeType.MOVE_HERE
917 elif originalfname in movedchanges:
917 elif originalfname in movedchanges:
918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
918 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
919 movedchanges[originalfname].awayPaths.append(fname)
919 movedchanges[originalfname].awayPaths.append(fname)
920 pchange.type = DiffChangeType.COPY_HERE
920 pchange.type = DiffChangeType.COPY_HERE
921 else: # pure copy
921 else: # pure copy
922 if originalfname not in copiedchanges:
922 if originalfname not in copiedchanges:
923 origpchange = phabchange(
923 origpchange = phabchange(
924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
924 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
925 )
925 )
926 copiedchanges[originalfname] = origpchange
926 copiedchanges[originalfname] = origpchange
927 else:
927 else:
928 origpchange = copiedchanges[originalfname]
928 origpchange = copiedchanges[originalfname]
929 origpchange.awayPaths.append(fname)
929 origpchange.awayPaths.append(fname)
930 pchange.type = DiffChangeType.COPY_HERE
930 pchange.type = DiffChangeType.COPY_HERE
931
931
932 if filemode != originalmode:
932 if filemode != originalmode:
933 pchange.addoldmode(originalmode)
933 pchange.addoldmode(originalmode)
934 pchange.addnewmode(filemode)
934 pchange.addnewmode(filemode)
935 else: # Brand-new file
935 else: # Brand-new file
936 pchange.addnewmode(gitmode[fctx.flags()])
936 pchange.addnewmode(gitmode[fctx.flags()])
937 pchange.type = DiffChangeType.ADD
937 pchange.type = DiffChangeType.ADD
938
938
939 if (
939 if (
940 fctx.isbinary()
940 fctx.isbinary()
941 or notutf8(fctx)
941 or notutf8(fctx)
942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
942 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
943 ):
943 ):
944 makebinary(pchange, fctx)
944 makebinary(pchange, fctx)
945 if renamed:
945 if renamed:
946 addoldbinary(pchange, oldfctx, fctx)
946 addoldbinary(pchange, oldfctx, fctx)
947 else:
947 else:
948 maketext(pchange, basectx, ctx, fname)
948 maketext(pchange, basectx, ctx, fname)
949
949
950 pdiff.addchange(pchange)
950 pdiff.addchange(pchange)
951
951
952 for _path, copiedchange in copiedchanges.items():
952 for _path, copiedchange in copiedchanges.items():
953 pdiff.addchange(copiedchange)
953 pdiff.addchange(copiedchange)
954 for _path, movedchange in movedchanges.items():
954 for _path, movedchange in movedchanges.items():
955 pdiff.addchange(movedchange)
955 pdiff.addchange(movedchange)
956
956
957
957
958 def creatediff(basectx, ctx):
958 def creatediff(basectx, ctx):
959 """create a Differential Diff"""
959 """create a Differential Diff"""
960 repo = ctx.repo()
960 repo = ctx.repo()
961 repophid = getrepophid(repo)
961 repophid = getrepophid(repo)
962 # Create a "Differential Diff" via "differential.creatediff" API
962 # Create a "Differential Diff" via "differential.creatediff" API
963 pdiff = phabdiff(
963 pdiff = phabdiff(
964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
964 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
965 branch=b'%s' % ctx.branch(),
965 branch=b'%s' % ctx.branch(),
966 )
966 )
967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
967 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
968 # addadded will remove moved files from removed, so addremoved won't get
968 # addadded will remove moved files from removed, so addremoved won't get
969 # them
969 # them
970 addadded(pdiff, basectx, ctx, added, removed)
970 addadded(pdiff, basectx, ctx, added, removed)
971 addmodified(pdiff, basectx, ctx, modified)
971 addmodified(pdiff, basectx, ctx, modified)
972 addremoved(pdiff, basectx, ctx, removed)
972 addremoved(pdiff, basectx, ctx, removed)
973 if repophid:
973 if repophid:
974 pdiff.repositoryPHID = repophid
974 pdiff.repositoryPHID = repophid
975 diff = callconduit(
975 diff = callconduit(
976 repo.ui,
976 repo.ui,
977 b'differential.creatediff',
977 b'differential.creatediff',
978 pycompat.byteskwargs(attr.asdict(pdiff)),
978 pycompat.byteskwargs(attr.asdict(pdiff)),
979 )
979 )
980 if not diff:
980 if not diff:
981 if basectx != ctx:
981 if basectx != ctx:
982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
982 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
983 else:
983 else:
984 msg = _(b'cannot create diff for %s') % ctx
984 msg = _(b'cannot create diff for %s') % ctx
985 raise error.Abort(msg)
985 raise error.Abort(msg)
986 return diff
986 return diff
987
987
988
988
989 def writediffproperties(ctxs, diff):
989 def writediffproperties(ctxs, diff):
990 """write metadata to diff so patches could be applied losslessly
990 """write metadata to diff so patches could be applied losslessly
991
991
992 ``ctxs`` is the list of commits that created the diff, in ascending order.
992 ``ctxs`` is the list of commits that created the diff, in ascending order.
993 The list is generally a single commit, but may be several when using
993 The list is generally a single commit, but may be several when using
994 ``phabsend --fold``.
994 ``phabsend --fold``.
995 """
995 """
996 # creatediff returns with a diffid but query returns with an id
996 # creatediff returns with a diffid but query returns with an id
997 diffid = diff.get(b'diffid', diff.get(b'id'))
997 diffid = diff.get(b'diffid', diff.get(b'id'))
998 basectx = ctxs[0]
998 basectx = ctxs[0]
999 tipctx = ctxs[-1]
999 tipctx = ctxs[-1]
1000
1000
1001 params = {
1001 params = {
1002 b'diff_id': diffid,
1002 b'diff_id': diffid,
1003 b'name': b'hg:meta',
1003 b'name': b'hg:meta',
1004 b'data': templatefilters.json(
1004 b'data': templatefilters.json(
1005 {
1005 {
1006 b'user': tipctx.user(),
1006 b'user': tipctx.user(),
1007 b'date': b'%d %d' % tipctx.date(),
1007 b'date': b'%d %d' % tipctx.date(),
1008 b'branch': tipctx.branch(),
1008 b'branch': tipctx.branch(),
1009 b'node': tipctx.hex(),
1009 b'node': tipctx.hex(),
1010 b'parent': basectx.p1().hex(),
1010 b'parent': basectx.p1().hex(),
1011 }
1011 }
1012 ),
1012 ),
1013 }
1013 }
1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1014 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1015
1015
1016 commits = {}
1016 commits = {}
1017 for ctx in ctxs:
1017 for ctx in ctxs:
1018 commits[ctx.hex()] = {
1018 commits[ctx.hex()] = {
1019 b'author': stringutil.person(ctx.user()),
1019 b'author': stringutil.person(ctx.user()),
1020 b'authorEmail': stringutil.email(ctx.user()),
1020 b'authorEmail': stringutil.email(ctx.user()),
1021 b'time': int(ctx.date()[0]),
1021 b'time': int(ctx.date()[0]),
1022 b'commit': ctx.hex(),
1022 b'commit': ctx.hex(),
1023 b'parents': [ctx.p1().hex()],
1023 b'parents': [ctx.p1().hex()],
1024 b'branch': ctx.branch(),
1024 b'branch': ctx.branch(),
1025 }
1025 }
1026 params = {
1026 params = {
1027 b'diff_id': diffid,
1027 b'diff_id': diffid,
1028 b'name': b'local:commits',
1028 b'name': b'local:commits',
1029 b'data': templatefilters.json(commits),
1029 b'data': templatefilters.json(commits),
1030 }
1030 }
1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1031 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1032
1032
1033
1033
1034 def createdifferentialrevision(
1034 def createdifferentialrevision(
1035 ctxs,
1035 ctxs,
1036 revid=None,
1036 revid=None,
1037 parentrevphid=None,
1037 parentrevphid=None,
1038 oldbasenode=None,
1038 oldbasenode=None,
1039 oldnode=None,
1039 oldnode=None,
1040 olddiff=None,
1040 olddiff=None,
1041 actions=None,
1041 actions=None,
1042 comment=None,
1042 comment=None,
1043 ):
1043 ):
1044 """create or update a Differential Revision
1044 """create or update a Differential Revision
1045
1045
1046 If revid is None, create a new Differential Revision, otherwise update
1046 If revid is None, create a new Differential Revision, otherwise update
1047 revid. If parentrevphid is not None, set it as a dependency.
1047 revid. If parentrevphid is not None, set it as a dependency.
1048
1048
1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1049 If there is a single commit for the new Differential Revision, ``ctxs`` will
1050 be a list of that single context. Otherwise, it is a list that covers the
1050 be a list of that single context. Otherwise, it is a list that covers the
1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1051 range of changes for the differential, where ``ctxs[0]`` is the first change
1052 to include and ``ctxs[-1]`` is the last.
1052 to include and ``ctxs[-1]`` is the last.
1053
1053
1054 If oldnode is not None, check if the patch content (without commit message
1054 If oldnode is not None, check if the patch content (without commit message
1055 and metadata) has changed before creating another diff. For a Revision with
1055 and metadata) has changed before creating another diff. For a Revision with
1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1056 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1057 Revision covering multiple commits, ``oldbasenode`` corresponds to
1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1058 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1059 corresponds to ``ctxs[-1]``.
1059 corresponds to ``ctxs[-1]``.
1060
1060
1061 If actions is not None, they will be appended to the transaction.
1061 If actions is not None, they will be appended to the transaction.
1062 """
1062 """
1063 ctx = ctxs[-1]
1063 ctx = ctxs[-1]
1064 basectx = ctxs[0]
1064 basectx = ctxs[0]
1065
1065
1066 repo = ctx.repo()
1066 repo = ctx.repo()
1067 if oldnode:
1067 if oldnode:
1068 diffopts = mdiff.diffopts(git=True, context=32767)
1068 diffopts = mdiff.diffopts(git=True, context=32767)
1069 unfi = repo.unfiltered()
1069 unfi = repo.unfiltered()
1070 oldctx = unfi[oldnode]
1070 oldctx = unfi[oldnode]
1071 oldbasectx = unfi[oldbasenode]
1071 oldbasectx = unfi[oldbasenode]
1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1072 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1073 oldbasectx, oldctx, diffopts
1073 oldbasectx, oldctx, diffopts
1074 )
1074 )
1075 else:
1075 else:
1076 neednewdiff = True
1076 neednewdiff = True
1077
1077
1078 transactions = []
1078 transactions = []
1079 if neednewdiff:
1079 if neednewdiff:
1080 diff = creatediff(basectx, ctx)
1080 diff = creatediff(basectx, ctx)
1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1081 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1082 if comment:
1082 if comment:
1083 transactions.append({b'type': b'comment', b'value': comment})
1083 transactions.append({b'type': b'comment', b'value': comment})
1084 else:
1084 else:
1085 # Even if we don't need to upload a new diff because the patch content
1085 # Even if we don't need to upload a new diff because the patch content
1086 # does not change. We might still need to update its metadata so
1086 # does not change. We might still need to update its metadata so
1087 # pushers could know the correct node metadata.
1087 # pushers could know the correct node metadata.
1088 assert olddiff
1088 assert olddiff
1089 diff = olddiff
1089 diff = olddiff
1090 writediffproperties(ctxs, diff)
1090 writediffproperties(ctxs, diff)
1091
1091
1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1092 # Set the parent Revision every time, so commit re-ordering is picked-up
1093 if parentrevphid:
1093 if parentrevphid:
1094 transactions.append(
1094 transactions.append(
1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1095 {b'type': b'parents.set', b'value': [parentrevphid]}
1096 )
1096 )
1097
1097
1098 if actions:
1098 if actions:
1099 transactions += actions
1099 transactions += actions
1100
1100
1101 # When folding multiple local commits into a single review, arcanist will
1101 # When folding multiple local commits into a single review, arcanist will
1102 # take the summary line of the first commit as the title, and then
1102 # take the summary line of the first commit as the title, and then
1103 # concatenate the rest of the remaining messages (including each of their
1103 # concatenate the rest of the remaining messages (including each of their
1104 # first lines) to the rest of the first commit message (each separated by
1104 # first lines) to the rest of the first commit message (each separated by
1105 # an empty line), and use that as the summary field. Do the same here.
1105 # an empty line), and use that as the summary field. Do the same here.
1106 # For commits with only a one line message, there is no summary field, as
1106 # For commits with only a one line message, there is no summary field, as
1107 # this gets assigned to the title.
1107 # this gets assigned to the title.
1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1108 fields = util.sortdict() # sorted for stable wire protocol in tests
1109
1109
1110 for i, _ctx in enumerate(ctxs):
1110 for i, _ctx in enumerate(ctxs):
1111 # Parse commit message and update related fields.
1111 # Parse commit message and update related fields.
1112 desc = _ctx.description()
1112 desc = _ctx.description()
1113 info = callconduit(
1113 info = callconduit(
1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1114 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1115 )
1115 )
1116
1116
1117 for k in [b'title', b'summary', b'testPlan']:
1117 for k in [b'title', b'summary', b'testPlan']:
1118 v = info[b'fields'].get(k)
1118 v = info[b'fields'].get(k)
1119 if not v:
1119 if not v:
1120 continue
1120 continue
1121
1121
1122 if i == 0:
1122 if i == 0:
1123 # Title, summary and test plan (if present) are taken verbatim
1123 # Title, summary and test plan (if present) are taken verbatim
1124 # for the first commit.
1124 # for the first commit.
1125 fields[k] = v.rstrip()
1125 fields[k] = v.rstrip()
1126 continue
1126 continue
1127 elif k == b'title':
1127 elif k == b'title':
1128 # Add subsequent titles (i.e. the first line of the commit
1128 # Add subsequent titles (i.e. the first line of the commit
1129 # message) back to the summary.
1129 # message) back to the summary.
1130 k = b'summary'
1130 k = b'summary'
1131
1131
1132 # Append any current field to the existing composite field
1132 # Append any current field to the existing composite field
1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1133 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1134
1134
1135 for k, v in fields.items():
1135 for k, v in fields.items():
1136 transactions.append({b'type': k, b'value': v})
1136 transactions.append({b'type': k, b'value': v})
1137
1137
1138 params = {b'transactions': transactions}
1138 params = {b'transactions': transactions}
1139 if revid is not None:
1139 if revid is not None:
1140 # Update an existing Differential Revision
1140 # Update an existing Differential Revision
1141 params[b'objectIdentifier'] = revid
1141 params[b'objectIdentifier'] = revid
1142
1142
1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1143 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1144 if not revision:
1144 if not revision:
1145 if len(ctxs) == 1:
1145 if len(ctxs) == 1:
1146 msg = _(b'cannot create revision for %s') % ctx
1146 msg = _(b'cannot create revision for %s') % ctx
1147 else:
1147 else:
1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1148 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1149 raise error.Abort(msg)
1149 raise error.Abort(msg)
1150
1150
1151 return revision, diff
1151 return revision, diff
1152
1152
1153
1153
1154 def userphids(ui, names):
1154 def userphids(ui, names):
1155 """convert user names to PHIDs"""
1155 """convert user names to PHIDs"""
1156 names = [name.lower() for name in names]
1156 names = [name.lower() for name in names]
1157 query = {b'constraints': {b'usernames': names}}
1157 query = {b'constraints': {b'usernames': names}}
1158 result = callconduit(ui, b'user.search', query)
1158 result = callconduit(ui, b'user.search', query)
1159 # username not found is not an error of the API. So check if we have missed
1159 # username not found is not an error of the API. So check if we have missed
1160 # some names here.
1160 # some names here.
1161 data = result[b'data']
1161 data = result[b'data']
1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1162 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1163 unresolved = set(names) - resolved
1163 unresolved = set(names) - resolved
1164 if unresolved:
1164 if unresolved:
1165 raise error.Abort(
1165 raise error.Abort(
1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1166 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1167 )
1167 )
1168 return [entry[b'phid'] for entry in data]
1168 return [entry[b'phid'] for entry in data]
1169
1169
1170
1170
1171 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1172 """update the local commit list for the ``diff`` associated with ``drevid``
1173
1174 This is a utility function for the amend phase of ``phabsend``, which
1175 converts failures to warning messages.
1176 """
1177 try:
1178 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1179 except util.urlerr.urlerror:
1180 # If it fails just warn and keep going, otherwise the DREV
1181 # associations will be lost
1182 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1183
1184
1171 @vcrcommand(
1185 @vcrcommand(
1172 b'phabsend',
1186 b'phabsend',
1173 [
1187 [
1174 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1188 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1175 (b'', b'amend', True, _(b'update commit messages')),
1189 (b'', b'amend', True, _(b'update commit messages')),
1176 (b'', b'reviewer', [], _(b'specify reviewers')),
1190 (b'', b'reviewer', [], _(b'specify reviewers')),
1177 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1191 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1178 (
1192 (
1179 b'm',
1193 b'm',
1180 b'comment',
1194 b'comment',
1181 b'',
1195 b'',
1182 _(b'add a comment to Revisions with new/updated Diffs'),
1196 _(b'add a comment to Revisions with new/updated Diffs'),
1183 ),
1197 ),
1184 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1198 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1185 ],
1199 ],
1186 _(b'REV [OPTIONS]'),
1200 _(b'REV [OPTIONS]'),
1187 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1201 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1188 )
1202 )
1189 def phabsend(ui, repo, *revs, **opts):
1203 def phabsend(ui, repo, *revs, **opts):
1190 """upload changesets to Phabricator
1204 """upload changesets to Phabricator
1191
1205
1192 If there are multiple revisions specified, they will be send as a stack
1206 If there are multiple revisions specified, they will be send as a stack
1193 with a linear dependencies relationship using the order specified by the
1207 with a linear dependencies relationship using the order specified by the
1194 revset.
1208 revset.
1195
1209
1196 For the first time uploading changesets, local tags will be created to
1210 For the first time uploading changesets, local tags will be created to
1197 maintain the association. After the first time, phabsend will check
1211 maintain the association. After the first time, phabsend will check
1198 obsstore and tags information so it can figure out whether to update an
1212 obsstore and tags information so it can figure out whether to update an
1199 existing Differential Revision, or create a new one.
1213 existing Differential Revision, or create a new one.
1200
1214
1201 If --amend is set, update commit messages so they have the
1215 If --amend is set, update commit messages so they have the
1202 ``Differential Revision`` URL, remove related tags. This is similar to what
1216 ``Differential Revision`` URL, remove related tags. This is similar to what
1203 arcanist will do, and is more desired in author-push workflows. Otherwise,
1217 arcanist will do, and is more desired in author-push workflows. Otherwise,
1204 use local tags to record the ``Differential Revision`` association.
1218 use local tags to record the ``Differential Revision`` association.
1205
1219
1206 The --confirm option lets you confirm changesets before sending them. You
1220 The --confirm option lets you confirm changesets before sending them. You
1207 can also add following to your configuration file to make it default
1221 can also add following to your configuration file to make it default
1208 behaviour::
1222 behaviour::
1209
1223
1210 [phabsend]
1224 [phabsend]
1211 confirm = true
1225 confirm = true
1212
1226
1213 phabsend will check obsstore and the above association to decide whether to
1227 phabsend will check obsstore and the above association to decide whether to
1214 update an existing Differential Revision, or create a new one.
1228 update an existing Differential Revision, or create a new one.
1215 """
1229 """
1216 opts = pycompat.byteskwargs(opts)
1230 opts = pycompat.byteskwargs(opts)
1217 revs = list(revs) + opts.get(b'rev', [])
1231 revs = list(revs) + opts.get(b'rev', [])
1218 revs = scmutil.revrange(repo, revs)
1232 revs = scmutil.revrange(repo, revs)
1219 revs.sort() # ascending order to preserve topological parent/child in phab
1233 revs.sort() # ascending order to preserve topological parent/child in phab
1220
1234
1221 if not revs:
1235 if not revs:
1222 raise error.Abort(_(b'phabsend requires at least one changeset'))
1236 raise error.Abort(_(b'phabsend requires at least one changeset'))
1223 if opts.get(b'amend'):
1237 if opts.get(b'amend'):
1224 cmdutil.checkunfinished(repo)
1238 cmdutil.checkunfinished(repo)
1225
1239
1226 # {newnode: (oldnode, olddiff, olddrev}
1240 # {newnode: (oldnode, olddiff, olddrev}
1227 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1241 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1228
1242
1229 confirm = ui.configbool(b'phabsend', b'confirm')
1243 confirm = ui.configbool(b'phabsend', b'confirm')
1230 confirm |= bool(opts.get(b'confirm'))
1244 confirm |= bool(opts.get(b'confirm'))
1231 if confirm:
1245 if confirm:
1232 confirmed = _confirmbeforesend(repo, revs, oldmap)
1246 confirmed = _confirmbeforesend(repo, revs, oldmap)
1233 if not confirmed:
1247 if not confirmed:
1234 raise error.Abort(_(b'phabsend cancelled'))
1248 raise error.Abort(_(b'phabsend cancelled'))
1235
1249
1236 actions = []
1250 actions = []
1237 reviewers = opts.get(b'reviewer', [])
1251 reviewers = opts.get(b'reviewer', [])
1238 blockers = opts.get(b'blocker', [])
1252 blockers = opts.get(b'blocker', [])
1239 phids = []
1253 phids = []
1240 if reviewers:
1254 if reviewers:
1241 phids.extend(userphids(repo.ui, reviewers))
1255 phids.extend(userphids(repo.ui, reviewers))
1242 if blockers:
1256 if blockers:
1243 phids.extend(
1257 phids.extend(
1244 map(
1258 map(
1245 lambda phid: b'blocking(%s)' % phid,
1259 lambda phid: b'blocking(%s)' % phid,
1246 userphids(repo.ui, blockers),
1260 userphids(repo.ui, blockers),
1247 )
1261 )
1248 )
1262 )
1249 if phids:
1263 if phids:
1250 actions.append({b'type': b'reviewers.add', b'value': phids})
1264 actions.append({b'type': b'reviewers.add', b'value': phids})
1251
1265
1252 drevids = [] # [int]
1266 drevids = [] # [int]
1253 diffmap = {} # {newnode: diff}
1267 diffmap = {} # {newnode: diff}
1254
1268
1255 # Send patches one by one so we know their Differential Revision PHIDs and
1269 # Send patches one by one so we know their Differential Revision PHIDs and
1256 # can provide dependency relationship
1270 # can provide dependency relationship
1257 lastrevphid = None
1271 lastrevphid = None
1258 for rev in revs:
1272 for rev in revs:
1259 ui.debug(b'sending rev %d\n' % rev)
1273 ui.debug(b'sending rev %d\n' % rev)
1260 ctx = repo[rev]
1274 ctx = repo[rev]
1261
1275
1262 # Get Differential Revision ID
1276 # Get Differential Revision ID
1263 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1277 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1264 oldbasenode = oldnode
1278 oldbasenode = oldnode
1265 if oldnode != ctx.node() or opts.get(b'amend'):
1279 if oldnode != ctx.node() or opts.get(b'amend'):
1266 # Create or update Differential Revision
1280 # Create or update Differential Revision
1267 revision, diff = createdifferentialrevision(
1281 revision, diff = createdifferentialrevision(
1268 [ctx],
1282 [ctx],
1269 revid,
1283 revid,
1270 lastrevphid,
1284 lastrevphid,
1271 oldbasenode,
1285 oldbasenode,
1272 oldnode,
1286 oldnode,
1273 olddiff,
1287 olddiff,
1274 actions,
1288 actions,
1275 opts.get(b'comment'),
1289 opts.get(b'comment'),
1276 )
1290 )
1277 diffmap[ctx.node()] = diff
1291 diffmap[ctx.node()] = diff
1278 newrevid = int(revision[b'object'][b'id'])
1292 newrevid = int(revision[b'object'][b'id'])
1279 newrevphid = revision[b'object'][b'phid']
1293 newrevphid = revision[b'object'][b'phid']
1280 if revid:
1294 if revid:
1281 action = b'updated'
1295 action = b'updated'
1282 else:
1296 else:
1283 action = b'created'
1297 action = b'created'
1284
1298
1285 # Create a local tag to note the association, if commit message
1299 # Create a local tag to note the association, if commit message
1286 # does not have it already
1300 # does not have it already
1287 m = _differentialrevisiondescre.search(ctx.description())
1301 m = _differentialrevisiondescre.search(ctx.description())
1288 if not m or int(m.group('id')) != newrevid:
1302 if not m or int(m.group('id')) != newrevid:
1289 tagname = b'D%d' % newrevid
1303 tagname = b'D%d' % newrevid
1290 tags.tag(
1304 tags.tag(
1291 repo,
1305 repo,
1292 tagname,
1306 tagname,
1293 ctx.node(),
1307 ctx.node(),
1294 message=None,
1308 message=None,
1295 user=None,
1309 user=None,
1296 date=None,
1310 date=None,
1297 local=True,
1311 local=True,
1298 )
1312 )
1299 else:
1313 else:
1300 # Nothing changed. But still set "newrevphid" so the next revision
1314 # Nothing changed. But still set "newrevphid" so the next revision
1301 # could depend on this one and "newrevid" for the summary line.
1315 # could depend on this one and "newrevid" for the summary line.
1302 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1316 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1303 newrevid = revid
1317 newrevid = revid
1304 action = b'skipped'
1318 action = b'skipped'
1305
1319
1306 actiondesc = ui.label(
1320 actiondesc = ui.label(
1307 {
1321 {
1308 b'created': _(b'created'),
1322 b'created': _(b'created'),
1309 b'skipped': _(b'skipped'),
1323 b'skipped': _(b'skipped'),
1310 b'updated': _(b'updated'),
1324 b'updated': _(b'updated'),
1311 }[action],
1325 }[action],
1312 b'phabricator.action.%s' % action,
1326 b'phabricator.action.%s' % action,
1313 )
1327 )
1314 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1328 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1315 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1329 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1316 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1330 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1317 ui.write(
1331 ui.write(
1318 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1332 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1319 )
1333 )
1320 drevids.append(newrevid)
1334 drevids.append(newrevid)
1321 lastrevphid = newrevphid
1335 lastrevphid = newrevphid
1322
1336
1323 # Update commit messages and remove tags
1337 # Update commit messages and remove tags
1324 if opts.get(b'amend'):
1338 if opts.get(b'amend'):
1325 unfi = repo.unfiltered()
1339 unfi = repo.unfiltered()
1326 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1340 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1327 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1341 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1328 wnode = unfi[b'.'].node()
1342 wnode = unfi[b'.'].node()
1329 mapping = {} # {oldnode: [newnode]}
1343 mapping = {} # {oldnode: [newnode]}
1330 for i, rev in enumerate(revs):
1344 for i, rev in enumerate(revs):
1331 old = unfi[rev]
1345 old = unfi[rev]
1332 drevid = drevids[i]
1346 drevid = drevids[i]
1333 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1347 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1334 newdesc = get_amended_desc(drev, old, False)
1348 newdesc = get_amended_desc(drev, old, False)
1335 # Make sure commit message contain "Differential Revision"
1349 # Make sure commit message contain "Differential Revision"
1336 if old.description() != newdesc:
1350 if old.description() != newdesc:
1337 if old.phase() == phases.public:
1351 if old.phase() == phases.public:
1338 ui.warn(
1352 ui.warn(
1339 _(b"warning: not updating public commit %s\n")
1353 _(b"warning: not updating public commit %s\n")
1340 % scmutil.formatchangeid(old)
1354 % scmutil.formatchangeid(old)
1341 )
1355 )
1342 continue
1356 continue
1343 parents = [
1357 parents = [
1344 mapping.get(old.p1().node(), (old.p1(),))[0],
1358 mapping.get(old.p1().node(), (old.p1(),))[0],
1345 mapping.get(old.p2().node(), (old.p2(),))[0],
1359 mapping.get(old.p2().node(), (old.p2(),))[0],
1346 ]
1360 ]
1347 new = context.metadataonlyctx(
1361 new = context.metadataonlyctx(
1348 repo,
1362 repo,
1349 old,
1363 old,
1350 parents=parents,
1364 parents=parents,
1351 text=newdesc,
1365 text=newdesc,
1352 user=old.user(),
1366 user=old.user(),
1353 date=old.date(),
1367 date=old.date(),
1354 extra=old.extra(),
1368 extra=old.extra(),
1355 )
1369 )
1356
1370
1357 newnode = new.commit()
1371 newnode = new.commit()
1358
1372
1359 mapping[old.node()] = [newnode]
1373 mapping[old.node()] = [newnode]
1360 # Update diff property
1374
1361 # If it fails just warn and keep going, otherwise the DREV
1375 _amend_diff_properties(
1362 # associations will be lost
1376 unfi, drevid, [newnode], diffmap[old.node()]
1363 try:
1377 )
1364 writediffproperties(
1365 [unfi[newnode]], diffmap[old.node()]
1366 )
1367 except util.urlerr.urlerror:
1368 ui.warnnoi18n(
1369 b'Failed to update metadata for D%d\n' % drevid
1370 )
1371 # Remove local tags since it's no longer necessary
1378 # Remove local tags since it's no longer necessary
1372 tagname = b'D%d' % drevid
1379 tagname = b'D%d' % drevid
1373 if tagname in repo.tags():
1380 if tagname in repo.tags():
1374 tags.tag(
1381 tags.tag(
1375 repo,
1382 repo,
1376 tagname,
1383 tagname,
1377 nullid,
1384 nullid,
1378 message=None,
1385 message=None,
1379 user=None,
1386 user=None,
1380 date=None,
1387 date=None,
1381 local=True,
1388 local=True,
1382 )
1389 )
1383 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1390 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1384 if wnode in mapping:
1391 if wnode in mapping:
1385 unfi.setparents(mapping[wnode][0])
1392 unfi.setparents(mapping[wnode][0])
1386
1393
1387
1394
1388 # Map from "hg:meta" keys to header understood by "hg import". The order is
1395 # Map from "hg:meta" keys to header understood by "hg import". The order is
1389 # consistent with "hg export" output.
1396 # consistent with "hg export" output.
1390 _metanamemap = util.sortdict(
1397 _metanamemap = util.sortdict(
1391 [
1398 [
1392 (b'user', b'User'),
1399 (b'user', b'User'),
1393 (b'date', b'Date'),
1400 (b'date', b'Date'),
1394 (b'branch', b'Branch'),
1401 (b'branch', b'Branch'),
1395 (b'node', b'Node ID'),
1402 (b'node', b'Node ID'),
1396 (b'parent', b'Parent '),
1403 (b'parent', b'Parent '),
1397 ]
1404 ]
1398 )
1405 )
1399
1406
1400
1407
1401 def _confirmbeforesend(repo, revs, oldmap):
1408 def _confirmbeforesend(repo, revs, oldmap):
1402 url, token = readurltoken(repo.ui)
1409 url, token = readurltoken(repo.ui)
1403 ui = repo.ui
1410 ui = repo.ui
1404 for rev in revs:
1411 for rev in revs:
1405 ctx = repo[rev]
1412 ctx = repo[rev]
1406 desc = ctx.description().splitlines()[0]
1413 desc = ctx.description().splitlines()[0]
1407 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1414 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1408 if drevid:
1415 if drevid:
1409 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1416 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1410 else:
1417 else:
1411 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1418 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1412
1419
1413 ui.write(
1420 ui.write(
1414 _(b'%s - %s: %s\n')
1421 _(b'%s - %s: %s\n')
1415 % (
1422 % (
1416 drevdesc,
1423 drevdesc,
1417 ui.label(bytes(ctx), b'phabricator.node'),
1424 ui.label(bytes(ctx), b'phabricator.node'),
1418 ui.label(desc, b'phabricator.desc'),
1425 ui.label(desc, b'phabricator.desc'),
1419 )
1426 )
1420 )
1427 )
1421
1428
1422 if ui.promptchoice(
1429 if ui.promptchoice(
1423 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1430 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1424 ):
1431 ):
1425 return False
1432 return False
1426
1433
1427 return True
1434 return True
1428
1435
1429
1436
1430 _knownstatusnames = {
1437 _knownstatusnames = {
1431 b'accepted',
1438 b'accepted',
1432 b'needsreview',
1439 b'needsreview',
1433 b'needsrevision',
1440 b'needsrevision',
1434 b'closed',
1441 b'closed',
1435 b'abandoned',
1442 b'abandoned',
1436 b'changesplanned',
1443 b'changesplanned',
1437 }
1444 }
1438
1445
1439
1446
1440 def _getstatusname(drev):
1447 def _getstatusname(drev):
1441 """get normalized status name from a Differential Revision"""
1448 """get normalized status name from a Differential Revision"""
1442 return drev[b'statusName'].replace(b' ', b'').lower()
1449 return drev[b'statusName'].replace(b' ', b'').lower()
1443
1450
1444
1451
1445 # Small language to specify differential revisions. Support symbols: (), :X,
1452 # Small language to specify differential revisions. Support symbols: (), :X,
1446 # +, and -.
1453 # +, and -.
1447
1454
1448 _elements = {
1455 _elements = {
1449 # token-type: binding-strength, primary, prefix, infix, suffix
1456 # token-type: binding-strength, primary, prefix, infix, suffix
1450 b'(': (12, None, (b'group', 1, b')'), None, None),
1457 b'(': (12, None, (b'group', 1, b')'), None, None),
1451 b':': (8, None, (b'ancestors', 8), None, None),
1458 b':': (8, None, (b'ancestors', 8), None, None),
1452 b'&': (5, None, None, (b'and_', 5), None),
1459 b'&': (5, None, None, (b'and_', 5), None),
1453 b'+': (4, None, None, (b'add', 4), None),
1460 b'+': (4, None, None, (b'add', 4), None),
1454 b'-': (4, None, None, (b'sub', 4), None),
1461 b'-': (4, None, None, (b'sub', 4), None),
1455 b')': (0, None, None, None, None),
1462 b')': (0, None, None, None, None),
1456 b'symbol': (0, b'symbol', None, None, None),
1463 b'symbol': (0, b'symbol', None, None, None),
1457 b'end': (0, None, None, None, None),
1464 b'end': (0, None, None, None, None),
1458 }
1465 }
1459
1466
1460
1467
1461 def _tokenize(text):
1468 def _tokenize(text):
1462 view = memoryview(text) # zero-copy slice
1469 view = memoryview(text) # zero-copy slice
1463 special = b'():+-& '
1470 special = b'():+-& '
1464 pos = 0
1471 pos = 0
1465 length = len(text)
1472 length = len(text)
1466 while pos < length:
1473 while pos < length:
1467 symbol = b''.join(
1474 symbol = b''.join(
1468 itertools.takewhile(
1475 itertools.takewhile(
1469 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1476 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1470 )
1477 )
1471 )
1478 )
1472 if symbol:
1479 if symbol:
1473 yield (b'symbol', symbol, pos)
1480 yield (b'symbol', symbol, pos)
1474 pos += len(symbol)
1481 pos += len(symbol)
1475 else: # special char, ignore space
1482 else: # special char, ignore space
1476 if text[pos : pos + 1] != b' ':
1483 if text[pos : pos + 1] != b' ':
1477 yield (text[pos : pos + 1], None, pos)
1484 yield (text[pos : pos + 1], None, pos)
1478 pos += 1
1485 pos += 1
1479 yield (b'end', None, pos)
1486 yield (b'end', None, pos)
1480
1487
1481
1488
1482 def _parse(text):
1489 def _parse(text):
1483 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1490 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1484 if pos != len(text):
1491 if pos != len(text):
1485 raise error.ParseError(b'invalid token', pos)
1492 raise error.ParseError(b'invalid token', pos)
1486 return tree
1493 return tree
1487
1494
1488
1495
1489 def _parsedrev(symbol):
1496 def _parsedrev(symbol):
1490 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1497 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1491 if symbol.startswith(b'D') and symbol[1:].isdigit():
1498 if symbol.startswith(b'D') and symbol[1:].isdigit():
1492 return int(symbol[1:])
1499 return int(symbol[1:])
1493 if symbol.isdigit():
1500 if symbol.isdigit():
1494 return int(symbol)
1501 return int(symbol)
1495
1502
1496
1503
1497 def _prefetchdrevs(tree):
1504 def _prefetchdrevs(tree):
1498 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1505 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1499 drevs = set()
1506 drevs = set()
1500 ancestordrevs = set()
1507 ancestordrevs = set()
1501 op = tree[0]
1508 op = tree[0]
1502 if op == b'symbol':
1509 if op == b'symbol':
1503 r = _parsedrev(tree[1])
1510 r = _parsedrev(tree[1])
1504 if r:
1511 if r:
1505 drevs.add(r)
1512 drevs.add(r)
1506 elif op == b'ancestors':
1513 elif op == b'ancestors':
1507 r, a = _prefetchdrevs(tree[1])
1514 r, a = _prefetchdrevs(tree[1])
1508 drevs.update(r)
1515 drevs.update(r)
1509 ancestordrevs.update(r)
1516 ancestordrevs.update(r)
1510 ancestordrevs.update(a)
1517 ancestordrevs.update(a)
1511 else:
1518 else:
1512 for t in tree[1:]:
1519 for t in tree[1:]:
1513 r, a = _prefetchdrevs(t)
1520 r, a = _prefetchdrevs(t)
1514 drevs.update(r)
1521 drevs.update(r)
1515 ancestordrevs.update(a)
1522 ancestordrevs.update(a)
1516 return drevs, ancestordrevs
1523 return drevs, ancestordrevs
1517
1524
1518
1525
1519 def querydrev(ui, spec):
1526 def querydrev(ui, spec):
1520 """return a list of "Differential Revision" dicts
1527 """return a list of "Differential Revision" dicts
1521
1528
1522 spec is a string using a simple query language, see docstring in phabread
1529 spec is a string using a simple query language, see docstring in phabread
1523 for details.
1530 for details.
1524
1531
1525 A "Differential Revision dict" looks like:
1532 A "Differential Revision dict" looks like:
1526
1533
1527 {
1534 {
1528 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1535 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1529 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1536 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1530 "auxiliary": {
1537 "auxiliary": {
1531 "phabricator:depends-on": [
1538 "phabricator:depends-on": [
1532 "PHID-DREV-gbapp366kutjebt7agcd"
1539 "PHID-DREV-gbapp366kutjebt7agcd"
1533 ]
1540 ]
1534 "phabricator:projects": [],
1541 "phabricator:projects": [],
1535 },
1542 },
1536 "branch": "default",
1543 "branch": "default",
1537 "ccs": [],
1544 "ccs": [],
1538 "commits": [],
1545 "commits": [],
1539 "dateCreated": "1499181406",
1546 "dateCreated": "1499181406",
1540 "dateModified": "1499182103",
1547 "dateModified": "1499182103",
1541 "diffs": [
1548 "diffs": [
1542 "3",
1549 "3",
1543 "4",
1550 "4",
1544 ],
1551 ],
1545 "hashes": [],
1552 "hashes": [],
1546 "id": "2",
1553 "id": "2",
1547 "lineCount": "2",
1554 "lineCount": "2",
1548 "phid": "PHID-DREV-672qvysjcczopag46qty",
1555 "phid": "PHID-DREV-672qvysjcczopag46qty",
1549 "properties": {},
1556 "properties": {},
1550 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1557 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1551 "reviewers": [],
1558 "reviewers": [],
1552 "sourcePath": null
1559 "sourcePath": null
1553 "status": "0",
1560 "status": "0",
1554 "statusName": "Needs Review",
1561 "statusName": "Needs Review",
1555 "summary": "",
1562 "summary": "",
1556 "testPlan": "",
1563 "testPlan": "",
1557 "title": "example",
1564 "title": "example",
1558 "uri": "https://phab.example.com/D2",
1565 "uri": "https://phab.example.com/D2",
1559 }
1566 }
1560 """
1567 """
1561 # TODO: replace differential.query and differential.querydiffs with
1568 # TODO: replace differential.query and differential.querydiffs with
1562 # differential.diff.search because the former (and their output) are
1569 # differential.diff.search because the former (and their output) are
1563 # frozen, and planned to be deprecated and removed.
1570 # frozen, and planned to be deprecated and removed.
1564
1571
1565 def fetch(params):
1572 def fetch(params):
1566 """params -> single drev or None"""
1573 """params -> single drev or None"""
1567 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1574 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1568 if key in prefetched:
1575 if key in prefetched:
1569 return prefetched[key]
1576 return prefetched[key]
1570 drevs = callconduit(ui, b'differential.query', params)
1577 drevs = callconduit(ui, b'differential.query', params)
1571 # Fill prefetched with the result
1578 # Fill prefetched with the result
1572 for drev in drevs:
1579 for drev in drevs:
1573 prefetched[drev[b'phid']] = drev
1580 prefetched[drev[b'phid']] = drev
1574 prefetched[int(drev[b'id'])] = drev
1581 prefetched[int(drev[b'id'])] = drev
1575 if key not in prefetched:
1582 if key not in prefetched:
1576 raise error.Abort(
1583 raise error.Abort(
1577 _(b'cannot get Differential Revision %r') % params
1584 _(b'cannot get Differential Revision %r') % params
1578 )
1585 )
1579 return prefetched[key]
1586 return prefetched[key]
1580
1587
1581 def getstack(topdrevids):
1588 def getstack(topdrevids):
1582 """given a top, get a stack from the bottom, [id] -> [id]"""
1589 """given a top, get a stack from the bottom, [id] -> [id]"""
1583 visited = set()
1590 visited = set()
1584 result = []
1591 result = []
1585 queue = [{b'ids': [i]} for i in topdrevids]
1592 queue = [{b'ids': [i]} for i in topdrevids]
1586 while queue:
1593 while queue:
1587 params = queue.pop()
1594 params = queue.pop()
1588 drev = fetch(params)
1595 drev = fetch(params)
1589 if drev[b'id'] in visited:
1596 if drev[b'id'] in visited:
1590 continue
1597 continue
1591 visited.add(drev[b'id'])
1598 visited.add(drev[b'id'])
1592 result.append(int(drev[b'id']))
1599 result.append(int(drev[b'id']))
1593 auxiliary = drev.get(b'auxiliary', {})
1600 auxiliary = drev.get(b'auxiliary', {})
1594 depends = auxiliary.get(b'phabricator:depends-on', [])
1601 depends = auxiliary.get(b'phabricator:depends-on', [])
1595 for phid in depends:
1602 for phid in depends:
1596 queue.append({b'phids': [phid]})
1603 queue.append({b'phids': [phid]})
1597 result.reverse()
1604 result.reverse()
1598 return smartset.baseset(result)
1605 return smartset.baseset(result)
1599
1606
1600 # Initialize prefetch cache
1607 # Initialize prefetch cache
1601 prefetched = {} # {id or phid: drev}
1608 prefetched = {} # {id or phid: drev}
1602
1609
1603 tree = _parse(spec)
1610 tree = _parse(spec)
1604 drevs, ancestordrevs = _prefetchdrevs(tree)
1611 drevs, ancestordrevs = _prefetchdrevs(tree)
1605
1612
1606 # developer config: phabricator.batchsize
1613 # developer config: phabricator.batchsize
1607 batchsize = ui.configint(b'phabricator', b'batchsize')
1614 batchsize = ui.configint(b'phabricator', b'batchsize')
1608
1615
1609 # Prefetch Differential Revisions in batch
1616 # Prefetch Differential Revisions in batch
1610 tofetch = set(drevs)
1617 tofetch = set(drevs)
1611 for r in ancestordrevs:
1618 for r in ancestordrevs:
1612 tofetch.update(range(max(1, r - batchsize), r + 1))
1619 tofetch.update(range(max(1, r - batchsize), r + 1))
1613 if drevs:
1620 if drevs:
1614 fetch({b'ids': list(tofetch)})
1621 fetch({b'ids': list(tofetch)})
1615 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1622 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1616
1623
1617 # Walk through the tree, return smartsets
1624 # Walk through the tree, return smartsets
1618 def walk(tree):
1625 def walk(tree):
1619 op = tree[0]
1626 op = tree[0]
1620 if op == b'symbol':
1627 if op == b'symbol':
1621 drev = _parsedrev(tree[1])
1628 drev = _parsedrev(tree[1])
1622 if drev:
1629 if drev:
1623 return smartset.baseset([drev])
1630 return smartset.baseset([drev])
1624 elif tree[1] in _knownstatusnames:
1631 elif tree[1] in _knownstatusnames:
1625 drevs = [
1632 drevs = [
1626 r
1633 r
1627 for r in validids
1634 for r in validids
1628 if _getstatusname(prefetched[r]) == tree[1]
1635 if _getstatusname(prefetched[r]) == tree[1]
1629 ]
1636 ]
1630 return smartset.baseset(drevs)
1637 return smartset.baseset(drevs)
1631 else:
1638 else:
1632 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1639 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1633 elif op in {b'and_', b'add', b'sub'}:
1640 elif op in {b'and_', b'add', b'sub'}:
1634 assert len(tree) == 3
1641 assert len(tree) == 3
1635 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1642 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1636 elif op == b'group':
1643 elif op == b'group':
1637 return walk(tree[1])
1644 return walk(tree[1])
1638 elif op == b'ancestors':
1645 elif op == b'ancestors':
1639 return getstack(walk(tree[1]))
1646 return getstack(walk(tree[1]))
1640 else:
1647 else:
1641 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1648 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1642
1649
1643 return [prefetched[r] for r in walk(tree)]
1650 return [prefetched[r] for r in walk(tree)]
1644
1651
1645
1652
1646 def getdescfromdrev(drev):
1653 def getdescfromdrev(drev):
1647 """get description (commit message) from "Differential Revision"
1654 """get description (commit message) from "Differential Revision"
1648
1655
1649 This is similar to differential.getcommitmessage API. But we only care
1656 This is similar to differential.getcommitmessage API. But we only care
1650 about limited fields: title, summary, test plan, and URL.
1657 about limited fields: title, summary, test plan, and URL.
1651 """
1658 """
1652 title = drev[b'title']
1659 title = drev[b'title']
1653 summary = drev[b'summary'].rstrip()
1660 summary = drev[b'summary'].rstrip()
1654 testplan = drev[b'testPlan'].rstrip()
1661 testplan = drev[b'testPlan'].rstrip()
1655 if testplan:
1662 if testplan:
1656 testplan = b'Test Plan:\n%s' % testplan
1663 testplan = b'Test Plan:\n%s' % testplan
1657 uri = b'Differential Revision: %s' % drev[b'uri']
1664 uri = b'Differential Revision: %s' % drev[b'uri']
1658 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1665 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1659
1666
1660
1667
1661 def get_amended_desc(drev, ctx, folded):
1668 def get_amended_desc(drev, ctx, folded):
1662 """similar to ``getdescfromdrev``, but supports a folded series of commits
1669 """similar to ``getdescfromdrev``, but supports a folded series of commits
1663
1670
1664 This is used when determining if an individual commit needs to have its
1671 This is used when determining if an individual commit needs to have its
1665 message amended after posting it for review. The determination is made for
1672 message amended after posting it for review. The determination is made for
1666 each individual commit, even when they were folded into one review.
1673 each individual commit, even when they were folded into one review.
1667 """
1674 """
1668 if not folded:
1675 if not folded:
1669 return getdescfromdrev(drev)
1676 return getdescfromdrev(drev)
1670
1677
1671 uri = b'Differential Revision: %s' % drev[b'uri']
1678 uri = b'Differential Revision: %s' % drev[b'uri']
1672
1679
1673 # Since the commit messages were combined when posting multiple commits
1680 # Since the commit messages were combined when posting multiple commits
1674 # with --fold, the fields can't be read from Phabricator here, or *all*
1681 # with --fold, the fields can't be read from Phabricator here, or *all*
1675 # affected local revisions will end up with the same commit message after
1682 # affected local revisions will end up with the same commit message after
1676 # the URI is amended in. Append in the DREV line, or update it if it
1683 # the URI is amended in. Append in the DREV line, or update it if it
1677 # exists. At worst, this means commit message or test plan updates on
1684 # exists. At worst, this means commit message or test plan updates on
1678 # Phabricator aren't propagated back to the repository, but that seems
1685 # Phabricator aren't propagated back to the repository, but that seems
1679 # reasonable for the case where local commits are effectively combined
1686 # reasonable for the case where local commits are effectively combined
1680 # in Phabricator.
1687 # in Phabricator.
1681 m = _differentialrevisiondescre.search(ctx.description())
1688 m = _differentialrevisiondescre.search(ctx.description())
1682 if not m:
1689 if not m:
1683 return b'\n\n'.join([ctx.description(), uri])
1690 return b'\n\n'.join([ctx.description(), uri])
1684
1691
1685 return _differentialrevisiondescre.sub(uri, ctx.description())
1692 return _differentialrevisiondescre.sub(uri, ctx.description())
1686
1693
1687
1694
1688 def getlocalcommits(diff):
1695 def getlocalcommits(diff):
1689 """get the set of local commits from a diff object
1696 """get the set of local commits from a diff object
1690
1697
1691 See ``getdiffmeta()`` for an example diff object.
1698 See ``getdiffmeta()`` for an example diff object.
1692 """
1699 """
1693 props = diff.get(b'properties') or {}
1700 props = diff.get(b'properties') or {}
1694 commits = props.get(b'local:commits') or {}
1701 commits = props.get(b'local:commits') or {}
1695 if len(commits) > 1:
1702 if len(commits) > 1:
1696 return {bin(c) for c in commits.keys()}
1703 return {bin(c) for c in commits.keys()}
1697
1704
1698 # Storing the diff metadata predates storing `local:commits`, so continue
1705 # Storing the diff metadata predates storing `local:commits`, so continue
1699 # to use that in the --no-fold case.
1706 # to use that in the --no-fold case.
1700 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1707 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1701
1708
1702
1709
1703 def getdiffmeta(diff):
1710 def getdiffmeta(diff):
1704 """get commit metadata (date, node, user, p1) from a diff object
1711 """get commit metadata (date, node, user, p1) from a diff object
1705
1712
1706 The metadata could be "hg:meta", sent by phabsend, like:
1713 The metadata could be "hg:meta", sent by phabsend, like:
1707
1714
1708 "properties": {
1715 "properties": {
1709 "hg:meta": {
1716 "hg:meta": {
1710 "branch": "default",
1717 "branch": "default",
1711 "date": "1499571514 25200",
1718 "date": "1499571514 25200",
1712 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1719 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1713 "user": "Foo Bar <foo@example.com>",
1720 "user": "Foo Bar <foo@example.com>",
1714 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1721 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1715 }
1722 }
1716 }
1723 }
1717
1724
1718 Or converted from "local:commits", sent by "arc", like:
1725 Or converted from "local:commits", sent by "arc", like:
1719
1726
1720 "properties": {
1727 "properties": {
1721 "local:commits": {
1728 "local:commits": {
1722 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1729 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1723 "author": "Foo Bar",
1730 "author": "Foo Bar",
1724 "authorEmail": "foo@example.com"
1731 "authorEmail": "foo@example.com"
1725 "branch": "default",
1732 "branch": "default",
1726 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1733 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1727 "local": "1000",
1734 "local": "1000",
1728 "message": "...",
1735 "message": "...",
1729 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1736 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1730 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1737 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1731 "summary": "...",
1738 "summary": "...",
1732 "tag": "",
1739 "tag": "",
1733 "time": 1499546314,
1740 "time": 1499546314,
1734 }
1741 }
1735 }
1742 }
1736 }
1743 }
1737
1744
1738 Note: metadata extracted from "local:commits" will lose time zone
1745 Note: metadata extracted from "local:commits" will lose time zone
1739 information.
1746 information.
1740 """
1747 """
1741 props = diff.get(b'properties') or {}
1748 props = diff.get(b'properties') or {}
1742 meta = props.get(b'hg:meta')
1749 meta = props.get(b'hg:meta')
1743 if not meta:
1750 if not meta:
1744 if props.get(b'local:commits'):
1751 if props.get(b'local:commits'):
1745 commit = sorted(props[b'local:commits'].values())[0]
1752 commit = sorted(props[b'local:commits'].values())[0]
1746 meta = {}
1753 meta = {}
1747 if b'author' in commit and b'authorEmail' in commit:
1754 if b'author' in commit and b'authorEmail' in commit:
1748 meta[b'user'] = b'%s <%s>' % (
1755 meta[b'user'] = b'%s <%s>' % (
1749 commit[b'author'],
1756 commit[b'author'],
1750 commit[b'authorEmail'],
1757 commit[b'authorEmail'],
1751 )
1758 )
1752 if b'time' in commit:
1759 if b'time' in commit:
1753 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1760 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1754 if b'branch' in commit:
1761 if b'branch' in commit:
1755 meta[b'branch'] = commit[b'branch']
1762 meta[b'branch'] = commit[b'branch']
1756 node = commit.get(b'commit', commit.get(b'rev'))
1763 node = commit.get(b'commit', commit.get(b'rev'))
1757 if node:
1764 if node:
1758 meta[b'node'] = node
1765 meta[b'node'] = node
1759 if len(commit.get(b'parents', ())) >= 1:
1766 if len(commit.get(b'parents', ())) >= 1:
1760 meta[b'parent'] = commit[b'parents'][0]
1767 meta[b'parent'] = commit[b'parents'][0]
1761 else:
1768 else:
1762 meta = {}
1769 meta = {}
1763 if b'date' not in meta and b'dateCreated' in diff:
1770 if b'date' not in meta and b'dateCreated' in diff:
1764 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1771 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1765 if b'branch' not in meta and diff.get(b'branch'):
1772 if b'branch' not in meta and diff.get(b'branch'):
1766 meta[b'branch'] = diff[b'branch']
1773 meta[b'branch'] = diff[b'branch']
1767 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1774 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1768 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1775 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1769 return meta
1776 return meta
1770
1777
1771
1778
1772 def _getdrevs(ui, stack, specs):
1779 def _getdrevs(ui, stack, specs):
1773 """convert user supplied DREVSPECs into "Differential Revision" dicts
1780 """convert user supplied DREVSPECs into "Differential Revision" dicts
1774
1781
1775 See ``hg help phabread`` for how to specify each DREVSPEC.
1782 See ``hg help phabread`` for how to specify each DREVSPEC.
1776 """
1783 """
1777 if len(specs) > 0:
1784 if len(specs) > 0:
1778
1785
1779 def _formatspec(s):
1786 def _formatspec(s):
1780 if stack:
1787 if stack:
1781 s = b':(%s)' % s
1788 s = b':(%s)' % s
1782 return b'(%s)' % s
1789 return b'(%s)' % s
1783
1790
1784 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1791 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1785
1792
1786 drevs = querydrev(ui, spec)
1793 drevs = querydrev(ui, spec)
1787 if drevs:
1794 if drevs:
1788 return drevs
1795 return drevs
1789
1796
1790 raise error.Abort(_(b"empty DREVSPEC set"))
1797 raise error.Abort(_(b"empty DREVSPEC set"))
1791
1798
1792
1799
1793 def readpatch(ui, drevs, write):
1800 def readpatch(ui, drevs, write):
1794 """generate plain-text patch readable by 'hg import'
1801 """generate plain-text patch readable by 'hg import'
1795
1802
1796 write takes a list of (DREV, bytes), where DREV is the differential number
1803 write takes a list of (DREV, bytes), where DREV is the differential number
1797 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1804 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1798 to be imported. drevs is what "querydrev" returns, results of
1805 to be imported. drevs is what "querydrev" returns, results of
1799 "differential.query".
1806 "differential.query".
1800 """
1807 """
1801 # Prefetch hg:meta property for all diffs
1808 # Prefetch hg:meta property for all diffs
1802 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1809 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1803 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1810 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1804
1811
1805 patches = []
1812 patches = []
1806
1813
1807 # Generate patch for each drev
1814 # Generate patch for each drev
1808 for drev in drevs:
1815 for drev in drevs:
1809 ui.note(_(b'reading D%s\n') % drev[b'id'])
1816 ui.note(_(b'reading D%s\n') % drev[b'id'])
1810
1817
1811 diffid = max(int(v) for v in drev[b'diffs'])
1818 diffid = max(int(v) for v in drev[b'diffs'])
1812 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1819 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1813 desc = getdescfromdrev(drev)
1820 desc = getdescfromdrev(drev)
1814 header = b'# HG changeset patch\n'
1821 header = b'# HG changeset patch\n'
1815
1822
1816 # Try to preserve metadata from hg:meta property. Write hg patch
1823 # Try to preserve metadata from hg:meta property. Write hg patch
1817 # headers that can be read by the "import" command. See patchheadermap
1824 # headers that can be read by the "import" command. See patchheadermap
1818 # and extract in mercurial/patch.py for supported headers.
1825 # and extract in mercurial/patch.py for supported headers.
1819 meta = getdiffmeta(diffs[b'%d' % diffid])
1826 meta = getdiffmeta(diffs[b'%d' % diffid])
1820 for k in _metanamemap.keys():
1827 for k in _metanamemap.keys():
1821 if k in meta:
1828 if k in meta:
1822 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1829 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1823
1830
1824 content = b'%s%s\n%s' % (header, desc, body)
1831 content = b'%s%s\n%s' % (header, desc, body)
1825 patches.append((drev[b'id'], content))
1832 patches.append((drev[b'id'], content))
1826
1833
1827 # Write patches to the supplied callback
1834 # Write patches to the supplied callback
1828 write(patches)
1835 write(patches)
1829
1836
1830
1837
1831 @vcrcommand(
1838 @vcrcommand(
1832 b'phabread',
1839 b'phabread',
1833 [(b'', b'stack', False, _(b'read dependencies'))],
1840 [(b'', b'stack', False, _(b'read dependencies'))],
1834 _(b'DREVSPEC... [OPTIONS]'),
1841 _(b'DREVSPEC... [OPTIONS]'),
1835 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1842 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1836 optionalrepo=True,
1843 optionalrepo=True,
1837 )
1844 )
1838 def phabread(ui, repo, *specs, **opts):
1845 def phabread(ui, repo, *specs, **opts):
1839 """print patches from Phabricator suitable for importing
1846 """print patches from Phabricator suitable for importing
1840
1847
1841 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1848 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1842 the number ``123``. It could also have common operators like ``+``, ``-``,
1849 the number ``123``. It could also have common operators like ``+``, ``-``,
1843 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1850 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1844 select a stack. If multiple DREVSPEC values are given, the result is the
1851 select a stack. If multiple DREVSPEC values are given, the result is the
1845 union of each individually evaluated value. No attempt is currently made
1852 union of each individually evaluated value. No attempt is currently made
1846 to reorder the values to run from parent to child.
1853 to reorder the values to run from parent to child.
1847
1854
1848 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1855 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1849 could be used to filter patches by status. For performance reason, they
1856 could be used to filter patches by status. For performance reason, they
1850 only represent a subset of non-status selections and cannot be used alone.
1857 only represent a subset of non-status selections and cannot be used alone.
1851
1858
1852 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1859 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1853 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1860 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1854 stack up to D9.
1861 stack up to D9.
1855
1862
1856 If --stack is given, follow dependencies information and read all patches.
1863 If --stack is given, follow dependencies information and read all patches.
1857 It is equivalent to the ``:`` operator.
1864 It is equivalent to the ``:`` operator.
1858 """
1865 """
1859 opts = pycompat.byteskwargs(opts)
1866 opts = pycompat.byteskwargs(opts)
1860 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1867 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1861
1868
1862 def _write(patches):
1869 def _write(patches):
1863 for drev, content in patches:
1870 for drev, content in patches:
1864 ui.write(content)
1871 ui.write(content)
1865
1872
1866 readpatch(ui, drevs, _write)
1873 readpatch(ui, drevs, _write)
1867
1874
1868
1875
1869 @vcrcommand(
1876 @vcrcommand(
1870 b'phabimport',
1877 b'phabimport',
1871 [(b'', b'stack', False, _(b'import dependencies as well'))],
1878 [(b'', b'stack', False, _(b'import dependencies as well'))],
1872 _(b'DREVSPEC... [OPTIONS]'),
1879 _(b'DREVSPEC... [OPTIONS]'),
1873 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1880 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1874 )
1881 )
1875 def phabimport(ui, repo, *specs, **opts):
1882 def phabimport(ui, repo, *specs, **opts):
1876 """import patches from Phabricator for the specified Differential Revisions
1883 """import patches from Phabricator for the specified Differential Revisions
1877
1884
1878 The patches are read and applied starting at the parent of the working
1885 The patches are read and applied starting at the parent of the working
1879 directory.
1886 directory.
1880
1887
1881 See ``hg help phabread`` for how to specify DREVSPEC.
1888 See ``hg help phabread`` for how to specify DREVSPEC.
1882 """
1889 """
1883 opts = pycompat.byteskwargs(opts)
1890 opts = pycompat.byteskwargs(opts)
1884
1891
1885 # --bypass avoids losing exec and symlink bits when importing on Windows,
1892 # --bypass avoids losing exec and symlink bits when importing on Windows,
1886 # and allows importing with a dirty wdir. It also aborts instead of leaving
1893 # and allows importing with a dirty wdir. It also aborts instead of leaving
1887 # rejects.
1894 # rejects.
1888 opts[b'bypass'] = True
1895 opts[b'bypass'] = True
1889
1896
1890 # Mandatory default values, synced with commands.import
1897 # Mandatory default values, synced with commands.import
1891 opts[b'strip'] = 1
1898 opts[b'strip'] = 1
1892 opts[b'prefix'] = b''
1899 opts[b'prefix'] = b''
1893 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1900 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1894 opts[b'obsolete'] = False
1901 opts[b'obsolete'] = False
1895
1902
1896 if ui.configbool(b'phabimport', b'secret'):
1903 if ui.configbool(b'phabimport', b'secret'):
1897 opts[b'secret'] = True
1904 opts[b'secret'] = True
1898 if ui.configbool(b'phabimport', b'obsolete'):
1905 if ui.configbool(b'phabimport', b'obsolete'):
1899 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1906 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
1900
1907
1901 def _write(patches):
1908 def _write(patches):
1902 parents = repo[None].parents()
1909 parents = repo[None].parents()
1903
1910
1904 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1911 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1905 for drev, contents in patches:
1912 for drev, contents in patches:
1906 ui.status(_(b'applying patch from D%s\n') % drev)
1913 ui.status(_(b'applying patch from D%s\n') % drev)
1907
1914
1908 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1915 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1909 msg, node, rej = cmdutil.tryimportone(
1916 msg, node, rej = cmdutil.tryimportone(
1910 ui,
1917 ui,
1911 repo,
1918 repo,
1912 patchdata,
1919 patchdata,
1913 parents,
1920 parents,
1914 opts,
1921 opts,
1915 [],
1922 [],
1916 None, # Never update wdir to another revision
1923 None, # Never update wdir to another revision
1917 )
1924 )
1918
1925
1919 if not node:
1926 if not node:
1920 raise error.Abort(_(b'D%s: no diffs found') % drev)
1927 raise error.Abort(_(b'D%s: no diffs found') % drev)
1921
1928
1922 ui.note(msg + b'\n')
1929 ui.note(msg + b'\n')
1923 parents = [repo[node]]
1930 parents = [repo[node]]
1924
1931
1925 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1932 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1926
1933
1927 readpatch(repo.ui, drevs, _write)
1934 readpatch(repo.ui, drevs, _write)
1928
1935
1929
1936
1930 @vcrcommand(
1937 @vcrcommand(
1931 b'phabupdate',
1938 b'phabupdate',
1932 [
1939 [
1933 (b'', b'accept', False, _(b'accept revisions')),
1940 (b'', b'accept', False, _(b'accept revisions')),
1934 (b'', b'reject', False, _(b'reject revisions')),
1941 (b'', b'reject', False, _(b'reject revisions')),
1935 (b'', b'abandon', False, _(b'abandon revisions')),
1942 (b'', b'abandon', False, _(b'abandon revisions')),
1936 (b'', b'reclaim', False, _(b'reclaim revisions')),
1943 (b'', b'reclaim', False, _(b'reclaim revisions')),
1937 (b'm', b'comment', b'', _(b'comment on the last revision')),
1944 (b'm', b'comment', b'', _(b'comment on the last revision')),
1938 ],
1945 ],
1939 _(b'DREVSPEC... [OPTIONS]'),
1946 _(b'DREVSPEC... [OPTIONS]'),
1940 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1947 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1941 optionalrepo=True,
1948 optionalrepo=True,
1942 )
1949 )
1943 def phabupdate(ui, repo, *specs, **opts):
1950 def phabupdate(ui, repo, *specs, **opts):
1944 """update Differential Revision in batch
1951 """update Differential Revision in batch
1945
1952
1946 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1953 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1947 """
1954 """
1948 opts = pycompat.byteskwargs(opts)
1955 opts = pycompat.byteskwargs(opts)
1949 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1956 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1950 if len(flags) > 1:
1957 if len(flags) > 1:
1951 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1958 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1952
1959
1953 actions = []
1960 actions = []
1954 for f in flags:
1961 for f in flags:
1955 actions.append({b'type': f, b'value': True})
1962 actions.append({b'type': f, b'value': True})
1956
1963
1957 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1964 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
1958 for i, drev in enumerate(drevs):
1965 for i, drev in enumerate(drevs):
1959 if i + 1 == len(drevs) and opts.get(b'comment'):
1966 if i + 1 == len(drevs) and opts.get(b'comment'):
1960 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1967 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1961 if actions:
1968 if actions:
1962 params = {
1969 params = {
1963 b'objectIdentifier': drev[b'phid'],
1970 b'objectIdentifier': drev[b'phid'],
1964 b'transactions': actions,
1971 b'transactions': actions,
1965 }
1972 }
1966 callconduit(ui, b'differential.revision.edit', params)
1973 callconduit(ui, b'differential.revision.edit', params)
1967
1974
1968
1975
1969 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1976 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1970 def template_review(context, mapping):
1977 def template_review(context, mapping):
1971 """:phabreview: Object describing the review for this changeset.
1978 """:phabreview: Object describing the review for this changeset.
1972 Has attributes `url` and `id`.
1979 Has attributes `url` and `id`.
1973 """
1980 """
1974 ctx = context.resource(mapping, b'ctx')
1981 ctx = context.resource(mapping, b'ctx')
1975 m = _differentialrevisiondescre.search(ctx.description())
1982 m = _differentialrevisiondescre.search(ctx.description())
1976 if m:
1983 if m:
1977 return templateutil.hybriddict(
1984 return templateutil.hybriddict(
1978 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1985 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1979 )
1986 )
1980 else:
1987 else:
1981 tags = ctx.repo().nodetags(ctx.node())
1988 tags = ctx.repo().nodetags(ctx.node())
1982 for t in tags:
1989 for t in tags:
1983 if _differentialrevisiontagre.match(t):
1990 if _differentialrevisiontagre.match(t):
1984 url = ctx.repo().ui.config(b'phabricator', b'url')
1991 url = ctx.repo().ui.config(b'phabricator', b'url')
1985 if not url.endswith(b'/'):
1992 if not url.endswith(b'/'):
1986 url += b'/'
1993 url += b'/'
1987 url += t
1994 url += t
1988
1995
1989 return templateutil.hybriddict({b'url': url, b'id': t,})
1996 return templateutil.hybriddict({b'url': url, b'id': t,})
1990 return None
1997 return None
1991
1998
1992
1999
1993 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2000 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1994 def template_status(context, mapping):
2001 def template_status(context, mapping):
1995 """:phabstatus: String. Status of Phabricator differential.
2002 """:phabstatus: String. Status of Phabricator differential.
1996 """
2003 """
1997 ctx = context.resource(mapping, b'ctx')
2004 ctx = context.resource(mapping, b'ctx')
1998 repo = context.resource(mapping, b'repo')
2005 repo = context.resource(mapping, b'repo')
1999 ui = context.resource(mapping, b'ui')
2006 ui = context.resource(mapping, b'ui')
2000
2007
2001 rev = ctx.rev()
2008 rev = ctx.rev()
2002 try:
2009 try:
2003 drevid = getdrevmap(repo, [rev])[rev]
2010 drevid = getdrevmap(repo, [rev])[rev]
2004 except KeyError:
2011 except KeyError:
2005 return None
2012 return None
2006 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2013 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2007 for drev in drevs:
2014 for drev in drevs:
2008 if int(drev[b'id']) == drevid:
2015 if int(drev[b'id']) == drevid:
2009 return templateutil.hybriddict(
2016 return templateutil.hybriddict(
2010 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2017 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2011 )
2018 )
2012 return None
2019 return None
2013
2020
2014
2021
2015 @show.showview(b'phabstatus', csettopic=b'work')
2022 @show.showview(b'phabstatus', csettopic=b'work')
2016 def phabstatusshowview(ui, repo, displayer):
2023 def phabstatusshowview(ui, repo, displayer):
2017 """Phabricator differiential status"""
2024 """Phabricator differiential status"""
2018 revs = repo.revs('sort(_underway(), topo)')
2025 revs = repo.revs('sort(_underway(), topo)')
2019 drevmap = getdrevmap(repo, revs)
2026 drevmap = getdrevmap(repo, revs)
2020 unknownrevs, drevids, revsbydrevid = [], set(), {}
2027 unknownrevs, drevids, revsbydrevid = [], set(), {}
2021 for rev, drevid in pycompat.iteritems(drevmap):
2028 for rev, drevid in pycompat.iteritems(drevmap):
2022 if drevid is not None:
2029 if drevid is not None:
2023 drevids.add(drevid)
2030 drevids.add(drevid)
2024 revsbydrevid.setdefault(drevid, set()).add(rev)
2031 revsbydrevid.setdefault(drevid, set()).add(rev)
2025 else:
2032 else:
2026 unknownrevs.append(rev)
2033 unknownrevs.append(rev)
2027
2034
2028 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2035 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2029 drevsbyrev = {}
2036 drevsbyrev = {}
2030 for drev in drevs:
2037 for drev in drevs:
2031 for rev in revsbydrevid[int(drev[b'id'])]:
2038 for rev in revsbydrevid[int(drev[b'id'])]:
2032 drevsbyrev[rev] = drev
2039 drevsbyrev[rev] = drev
2033
2040
2034 def phabstatus(ctx):
2041 def phabstatus(ctx):
2035 drev = drevsbyrev[ctx.rev()]
2042 drev = drevsbyrev[ctx.rev()]
2036 status = ui.label(
2043 status = ui.label(
2037 b'%(statusName)s' % drev,
2044 b'%(statusName)s' % drev,
2038 b'phabricator.status.%s' % _getstatusname(drev),
2045 b'phabricator.status.%s' % _getstatusname(drev),
2039 )
2046 )
2040 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2047 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2041
2048
2042 revs -= smartset.baseset(unknownrevs)
2049 revs -= smartset.baseset(unknownrevs)
2043 revdag = graphmod.dagwalker(repo, revs)
2050 revdag = graphmod.dagwalker(repo, revs)
2044
2051
2045 ui.setconfig(b'experimental', b'graphshorten', True)
2052 ui.setconfig(b'experimental', b'graphshorten', True)
2046 displayer._exthook = phabstatus
2053 displayer._exthook = phabstatus
2047 nodelen = show.longestshortest(repo, revs)
2054 nodelen = show.longestshortest(repo, revs)
2048 logcmdutil.displaygraph(
2055 logcmdutil.displaygraph(
2049 ui,
2056 ui,
2050 repo,
2057 repo,
2051 revdag,
2058 revdag,
2052 displayer,
2059 displayer,
2053 graphmod.asciiedges,
2060 graphmod.asciiedges,
2054 props={b'nodelen': nodelen},
2061 props={b'nodelen': nodelen},
2055 )
2062 )
General Comments 0
You need to be logged in to leave comments. Login now